]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Exception handling semantics and decomposition for trees. |
66647d44 JJ |
2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
3 | Free Software Foundation, Inc. | |
6de9cd9a DN |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
6de9cd9a DN |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "rtl.h" | |
27 | #include "tm_p.h" | |
28 | #include "flags.h" | |
29 | #include "function.h" | |
30 | #include "except.h" | |
31 | #include "tree-flow.h" | |
32 | #include "tree-dump.h" | |
33 | #include "tree-inline.h" | |
34 | #include "tree-iterator.h" | |
35 | #include "tree-pass.h" | |
36 | #include "timevar.h" | |
37 | #include "langhooks.h" | |
38 | #include "ggc.h" | |
4c714dd4 | 39 | #include "toplev.h" |
726a989a RB |
40 | #include "gimple.h" |
41 | ||
42 | /* In some instances a tree and a gimple need to be stored in a same table, | |
43 | i.e. in hash tables. This is a structure to do this. */ | |
44 | typedef union {tree *tp; tree t; gimple g;} treemple; | |
6de9cd9a | 45 | |
165b54c3 SB |
46 | /* Nonzero if we are using EH to handle cleanups. */ |
47 | static int using_eh_for_cleanups_p = 0; | |
48 | ||
49 | void | |
50 | using_eh_for_cleanups (void) | |
51 | { | |
52 | using_eh_for_cleanups_p = 1; | |
53 | } | |
726a989a | 54 | |
6de9cd9a DN |
55 | /* Misc functions used in this file. */ |
56 | ||
57 | /* Compare and hash for any structure which begins with a canonical | |
aabcd309 | 58 | pointer. Assumes all pointers are interchangeable, which is sort |
6de9cd9a DN |
59 | of already assumed by gcc elsewhere IIRC. */ |
60 | ||
61 | static int | |
62 | struct_ptr_eq (const void *a, const void *b) | |
63 | { | |
858904db GDR |
64 | const void * const * x = (const void * const *) a; |
65 | const void * const * y = (const void * const *) b; | |
6de9cd9a DN |
66 | return *x == *y; |
67 | } | |
68 | ||
69 | static hashval_t | |
70 | struct_ptr_hash (const void *a) | |
71 | { | |
858904db | 72 | const void * const * x = (const void * const *) a; |
6de9cd9a DN |
73 | return (size_t)*x >> 4; |
74 | } | |
75 | ||
726a989a | 76 | |
6de9cd9a DN |
77 | /* Remember and lookup EH region data for arbitrary statements. |
78 | Really this means any statement that could_throw_p. We could | |
79 | stuff this information into the stmt_ann data structure, but: | |
80 | ||
81 | (1) We absolutely rely on this information being kept until | |
82 | we get to rtl. Once we're done with lowering here, if we lose | |
83 | the information there's no way to recover it! | |
84 | ||
19114537 | 85 | (2) There are many more statements that *cannot* throw as |
6de9cd9a DN |
86 | compared to those that can. We should be saving some amount |
87 | of space by only allocating memory for those that can throw. */ | |
88 | ||
6de9cd9a | 89 | static void |
7e5487a2 | 90 | record_stmt_eh_region (struct eh_region_d *region, gimple t) |
6de9cd9a | 91 | { |
6de9cd9a DN |
92 | if (!region) |
93 | return; | |
94 | ||
a78b491f | 95 | add_stmt_to_eh_region (t, get_eh_region_number (region)); |
6de9cd9a DN |
96 | } |
97 | ||
726a989a RB |
98 | |
99 | /* Add statement T in function IFUN to EH region NUM. */ | |
100 | ||
6de9cd9a | 101 | void |
726a989a | 102 | add_stmt_to_eh_region_fn (struct function *ifun, gimple t, int num) |
6de9cd9a DN |
103 | { |
104 | struct throw_stmt_node *n; | |
105 | void **slot; | |
106 | ||
1e128c5f | 107 | gcc_assert (num >= 0); |
726a989a | 108 | gcc_assert (gimple_code (t) != GIMPLE_RESX); |
6de9cd9a | 109 | |
858904db | 110 | n = GGC_NEW (struct throw_stmt_node); |
6de9cd9a DN |
111 | n->stmt = t; |
112 | n->region_nr = num; | |
113 | ||
98f464e0 SB |
114 | if (!get_eh_throw_stmt_table (ifun)) |
115 | set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash, | |
116 | struct_ptr_eq, | |
117 | ggc_free)); | |
118 | ||
b4660e5a | 119 | slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT); |
1e128c5f | 120 | gcc_assert (!*slot); |
6de9cd9a DN |
121 | *slot = n; |
122 | } | |
1eaba2f2 | 123 | |
726a989a RB |
124 | |
125 | /* Add statement T in the current function (cfun) to EH region number | |
126 | NUM. */ | |
127 | ||
b4660e5a | 128 | void |
726a989a | 129 | add_stmt_to_eh_region (gimple t, int num) |
b4660e5a JH |
130 | { |
131 | add_stmt_to_eh_region_fn (cfun, t, num); | |
132 | } | |
133 | ||
726a989a RB |
134 | |
135 | /* Remove statement T in function IFUN from the EH region holding it. */ | |
136 | ||
1eaba2f2 | 137 | bool |
726a989a | 138 | remove_stmt_from_eh_region_fn (struct function *ifun, gimple t) |
1eaba2f2 RH |
139 | { |
140 | struct throw_stmt_node dummy; | |
141 | void **slot; | |
142 | ||
b4660e5a | 143 | if (!get_eh_throw_stmt_table (ifun)) |
1eaba2f2 RH |
144 | return false; |
145 | ||
146 | dummy.stmt = t; | |
b4660e5a JH |
147 | slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy, |
148 | NO_INSERT); | |
1eaba2f2 RH |
149 | if (slot) |
150 | { | |
b4660e5a | 151 | htab_clear_slot (get_eh_throw_stmt_table (ifun), slot); |
1eaba2f2 RH |
152 | return true; |
153 | } | |
154 | else | |
155 | return false; | |
156 | } | |
157 | ||
726a989a RB |
158 | |
159 | /* Remove statement T in the current function (cfun) from the EH | |
160 | region holding it. */ | |
161 | ||
b4660e5a | 162 | bool |
726a989a | 163 | remove_stmt_from_eh_region (gimple t) |
b4660e5a JH |
164 | { |
165 | return remove_stmt_from_eh_region_fn (cfun, t); | |
166 | } | |
167 | ||
726a989a RB |
168 | /* Determine if statement T is inside an EH region in function IFUN. |
169 | Return the EH region number if found, return -2 if IFUN does not | |
170 | have an EH table and -1 if T could not be found in IFUN's EH region | |
171 | table. */ | |
172 | ||
6de9cd9a | 173 | int |
726a989a | 174 | lookup_stmt_eh_region_fn (struct function *ifun, gimple t) |
6de9cd9a DN |
175 | { |
176 | struct throw_stmt_node *p, n; | |
177 | ||
b4660e5a | 178 | if (!get_eh_throw_stmt_table (ifun)) |
6de9cd9a DN |
179 | return -2; |
180 | ||
726a989a RB |
181 | n.stmt = t; |
182 | p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun), &n); | |
6de9cd9a DN |
183 | return (p ? p->region_nr : -1); |
184 | } | |
185 | ||
726a989a RB |
186 | |
187 | /* Determine if statement T is inside an EH region in the current | |
188 | function (cfun). Return the EH region number if found, return -2 | |
189 | if cfun does not have an EH table and -1 if T could not be found in | |
190 | cfun's EH region table. */ | |
191 | ||
b4660e5a | 192 | int |
726a989a | 193 | lookup_stmt_eh_region (gimple t) |
b4660e5a JH |
194 | { |
195 | /* We can get called from initialized data when -fnon-call-exceptions | |
196 | is on; prevent crash. */ | |
197 | if (!cfun) | |
198 | return -1; | |
726a989a | 199 | |
b4660e5a JH |
200 | return lookup_stmt_eh_region_fn (cfun, t); |
201 | } | |
6de9cd9a | 202 | |
726a989a RB |
203 | |
204 | /* Determine if expression T is inside an EH region in the current | |
205 | function (cfun). Return the EH region number if found, return -2 | |
206 | if IFUN does not have an EH table and -1 if T could not be found in | |
207 | IFUN's EH region table. */ | |
208 | ||
209 | int | |
210 | lookup_expr_eh_region (tree t) | |
211 | { | |
212 | /* We can get called from initialized data when -fnon-call-exceptions | |
213 | is on; prevent crash. */ | |
214 | if (!cfun) | |
215 | return -1; | |
216 | ||
217 | if (!get_eh_throw_stmt_table (cfun)) | |
218 | return -2; | |
219 | ||
220 | if (t && EXPR_P (t)) | |
221 | { | |
222 | tree_ann_common_t ann = tree_common_ann (t); | |
223 | if (ann) | |
224 | return (int) ann->rn; | |
225 | } | |
226 | ||
227 | return -1; | |
228 | } | |
229 | ||
230 | ||
231 | /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY | |
6de9cd9a DN |
232 | nodes and LABEL_DECL nodes. We will use this during the second phase to |
233 | determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */ | |
234 | ||
235 | struct finally_tree_node | |
236 | { | |
726a989a RB |
237 | /* When storing a GIMPLE_TRY, we have to record a gimple. However |
238 | when deciding whether a GOTO to a certain LABEL_DECL (which is a | |
239 | tree) leaves the TRY block, its necessary to record a tree in | |
240 | this field. Thus a treemple is used. */ | |
241 | treemple child; | |
242 | gimple parent; | |
6de9cd9a DN |
243 | }; |
244 | ||
245 | /* Note that this table is *not* marked GTY. It is short-lived. */ | |
246 | static htab_t finally_tree; | |
247 | ||
248 | static void | |
726a989a | 249 | record_in_finally_tree (treemple child, gimple parent) |
6de9cd9a DN |
250 | { |
251 | struct finally_tree_node *n; | |
252 | void **slot; | |
253 | ||
858904db | 254 | n = XNEW (struct finally_tree_node); |
6de9cd9a DN |
255 | n->child = child; |
256 | n->parent = parent; | |
257 | ||
258 | slot = htab_find_slot (finally_tree, n, INSERT); | |
1e128c5f | 259 | gcc_assert (!*slot); |
6de9cd9a DN |
260 | *slot = n; |
261 | } | |
262 | ||
263 | static void | |
726a989a RB |
264 | collect_finally_tree (gimple stmt, gimple region); |
265 | ||
266 | /* Go through the gimple sequence. Works with collect_finally_tree to | |
267 | record all GIMPLE_LABEL and GIMPLE_TRY statements. */ | |
268 | ||
269 | static void | |
270 | collect_finally_tree_1 (gimple_seq seq, gimple region) | |
6de9cd9a | 271 | { |
726a989a | 272 | gimple_stmt_iterator gsi; |
6de9cd9a | 273 | |
726a989a RB |
274 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) |
275 | collect_finally_tree (gsi_stmt (gsi), region); | |
276 | } | |
6de9cd9a | 277 | |
726a989a RB |
278 | static void |
279 | collect_finally_tree (gimple stmt, gimple region) | |
280 | { | |
281 | treemple temp; | |
282 | ||
283 | switch (gimple_code (stmt)) | |
284 | { | |
285 | case GIMPLE_LABEL: | |
286 | temp.t = gimple_label_label (stmt); | |
287 | record_in_finally_tree (temp, region); | |
288 | break; | |
6de9cd9a | 289 | |
726a989a RB |
290 | case GIMPLE_TRY: |
291 | if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | |
292 | { | |
293 | temp.g = stmt; | |
294 | record_in_finally_tree (temp, region); | |
295 | collect_finally_tree_1 (gimple_try_eval (stmt), stmt); | |
296 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | |
297 | } | |
298 | else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) | |
299 | { | |
300 | collect_finally_tree_1 (gimple_try_eval (stmt), region); | |
301 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | |
302 | } | |
303 | break; | |
6de9cd9a | 304 | |
726a989a RB |
305 | case GIMPLE_CATCH: |
306 | collect_finally_tree_1 (gimple_catch_handler (stmt), region); | |
307 | break; | |
6de9cd9a | 308 | |
726a989a RB |
309 | case GIMPLE_EH_FILTER: |
310 | collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region); | |
6de9cd9a DN |
311 | break; |
312 | ||
313 | default: | |
314 | /* A type, a decl, or some kind of statement that we're not | |
315 | interested in. Don't walk them. */ | |
316 | break; | |
317 | } | |
318 | } | |
319 | ||
726a989a | 320 | |
6de9cd9a DN |
321 | /* Use the finally tree to determine if a jump from START to TARGET |
322 | would leave the try_finally node that START lives in. */ | |
323 | ||
324 | static bool | |
726a989a | 325 | outside_finally_tree (treemple start, gimple target) |
6de9cd9a DN |
326 | { |
327 | struct finally_tree_node n, *p; | |
328 | ||
329 | do | |
330 | { | |
331 | n.child = start; | |
858904db | 332 | p = (struct finally_tree_node *) htab_find (finally_tree, &n); |
6de9cd9a DN |
333 | if (!p) |
334 | return true; | |
726a989a | 335 | start.g = p->parent; |
6de9cd9a | 336 | } |
726a989a | 337 | while (start.g != target); |
6de9cd9a DN |
338 | |
339 | return false; | |
340 | } | |
726a989a RB |
341 | |
342 | /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY | |
343 | nodes into a set of gotos, magic labels, and eh regions. | |
6de9cd9a DN |
344 | The eh region creation is straight-forward, but frobbing all the gotos |
345 | and such into shape isn't. */ | |
346 | ||
24b97832 ILT |
347 | /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN |
348 | statements that are seen to escape this GIMPLE_TRY_FINALLY node. | |
349 | The idea is to record a gimple statement for everything except for | |
350 | the conditionals, which get their labels recorded. Since labels are | |
351 | of type 'tree', we need this node to store both gimple and tree | |
352 | objects. REPL_STMT is the sequence used to replace the goto/return | |
353 | statement. CONT_STMT is used to store the statement that allows | |
354 | the return/goto to jump to the original destination. */ | |
355 | ||
356 | struct goto_queue_node | |
357 | { | |
358 | treemple stmt; | |
359 | gimple_seq repl_stmt; | |
360 | gimple cont_stmt; | |
361 | int index; | |
362 | /* This is used when index >= 0 to indicate that stmt is a label (as | |
363 | opposed to a goto stmt). */ | |
364 | int is_label; | |
365 | }; | |
366 | ||
6de9cd9a DN |
367 | /* State of the world while lowering. */ |
368 | ||
369 | struct leh_state | |
370 | { | |
19114537 | 371 | /* What's "current" while constructing the eh region tree. These |
6de9cd9a DN |
372 | correspond to variables of the same name in cfun->eh, which we |
373 | don't have easy access to. */ | |
7e5487a2 | 374 | struct eh_region_d *cur_region; |
6de9cd9a DN |
375 | |
376 | /* Processing of TRY_FINALLY requires a bit more state. This is | |
377 | split out into a separate structure so that we don't have to | |
378 | copy so much when processing other nodes. */ | |
379 | struct leh_tf_state *tf; | |
380 | }; | |
381 | ||
382 | struct leh_tf_state | |
383 | { | |
726a989a RB |
384 | /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The |
385 | try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain | |
386 | this so that outside_finally_tree can reliably reference the tree used | |
387 | in the collect_finally_tree data structures. */ | |
388 | gimple try_finally_expr; | |
389 | gimple top_p; | |
390 | /* While lowering a top_p usually it is expanded into multiple statements, | |
391 | thus we need the following field to store them. */ | |
392 | gimple_seq top_p_seq; | |
6de9cd9a DN |
393 | |
394 | /* The state outside this try_finally node. */ | |
395 | struct leh_state *outer; | |
396 | ||
397 | /* The exception region created for it. */ | |
7e5487a2 | 398 | struct eh_region_d *region; |
6de9cd9a | 399 | |
24b97832 ILT |
400 | /* The goto queue. */ |
401 | struct goto_queue_node *goto_queue; | |
6de9cd9a DN |
402 | size_t goto_queue_size; |
403 | size_t goto_queue_active; | |
404 | ||
fa10beec | 405 | /* Pointer map to help in searching goto_queue when it is large. */ |
0f547d3d SE |
406 | struct pointer_map_t *goto_queue_map; |
407 | ||
6de9cd9a | 408 | /* The set of unique labels seen as entries in the goto queue. */ |
59ebc704 | 409 | VEC(tree,heap) *dest_array; |
6de9cd9a DN |
410 | |
411 | /* A label to be added at the end of the completed transformed | |
412 | sequence. It will be set if may_fallthru was true *at one time*, | |
413 | though subsequent transformations may have cleared that flag. */ | |
414 | tree fallthru_label; | |
415 | ||
19114537 | 416 | /* A label that has been registered with except.c to be the |
6de9cd9a DN |
417 | landing pad for this try block. */ |
418 | tree eh_label; | |
419 | ||
420 | /* True if it is possible to fall out the bottom of the try block. | |
421 | Cleared if the fallthru is converted to a goto. */ | |
422 | bool may_fallthru; | |
423 | ||
726a989a | 424 | /* True if any entry in goto_queue is a GIMPLE_RETURN. */ |
6de9cd9a DN |
425 | bool may_return; |
426 | ||
427 | /* True if the finally block can receive an exception edge. | |
428 | Cleared if the exception case is handled by code duplication. */ | |
429 | bool may_throw; | |
430 | }; | |
431 | ||
726a989a | 432 | static gimple_seq lower_eh_filter (struct leh_state *, gimple); |
6de9cd9a | 433 | |
6de9cd9a DN |
434 | /* Search for STMT in the goto queue. Return the replacement, |
435 | or null if the statement isn't in the queue. */ | |
436 | ||
0f547d3d SE |
437 | #define LARGE_GOTO_QUEUE 20 |
438 | ||
726a989a RB |
439 | static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq); |
440 | ||
441 | static gimple_seq | |
442 | find_goto_replacement (struct leh_tf_state *tf, treemple stmt) | |
6de9cd9a | 443 | { |
0f547d3d SE |
444 | unsigned int i; |
445 | void **slot; | |
446 | ||
447 | if (tf->goto_queue_active < LARGE_GOTO_QUEUE) | |
448 | { | |
449 | for (i = 0; i < tf->goto_queue_active; i++) | |
726a989a | 450 | if ( tf->goto_queue[i].stmt.g == stmt.g) |
0f547d3d SE |
451 | return tf->goto_queue[i].repl_stmt; |
452 | return NULL; | |
453 | } | |
454 | ||
455 | /* If we have a large number of entries in the goto_queue, create a | |
456 | pointer map and use that for searching. */ | |
457 | ||
458 | if (!tf->goto_queue_map) | |
459 | { | |
460 | tf->goto_queue_map = pointer_map_create (); | |
461 | for (i = 0; i < tf->goto_queue_active; i++) | |
462 | { | |
726a989a RB |
463 | slot = pointer_map_insert (tf->goto_queue_map, |
464 | tf->goto_queue[i].stmt.g); | |
0f547d3d | 465 | gcc_assert (*slot == NULL); |
726a989a | 466 | *slot = &tf->goto_queue[i]; |
0f547d3d SE |
467 | } |
468 | } | |
469 | ||
726a989a | 470 | slot = pointer_map_contains (tf->goto_queue_map, stmt.g); |
0f547d3d SE |
471 | if (slot != NULL) |
472 | return (((struct goto_queue_node *) *slot)->repl_stmt); | |
473 | ||
474 | return NULL; | |
6de9cd9a DN |
475 | } |
476 | ||
477 | /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a | |
726a989a | 478 | lowered GIMPLE_COND. If, by chance, the replacement is a simple goto, |
6de9cd9a | 479 | then we can just splat it in, otherwise we add the new stmts immediately |
726a989a | 480 | after the GIMPLE_COND and redirect. */ |
6de9cd9a DN |
481 | |
482 | static void | |
483 | replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf, | |
726a989a | 484 | gimple_stmt_iterator *gsi) |
6de9cd9a | 485 | { |
726a989a | 486 | tree label; |
82d6e6fc | 487 | gimple_seq new_seq; |
726a989a | 488 | treemple temp; |
c2255bc4 | 489 | location_t loc = gimple_location (gsi_stmt (*gsi)); |
6de9cd9a | 490 | |
726a989a | 491 | temp.tp = tp; |
82d6e6fc KG |
492 | new_seq = find_goto_replacement (tf, temp); |
493 | if (!new_seq) | |
6de9cd9a DN |
494 | return; |
495 | ||
82d6e6fc KG |
496 | if (gimple_seq_singleton_p (new_seq) |
497 | && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO) | |
6de9cd9a | 498 | { |
82d6e6fc | 499 | *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq)); |
6de9cd9a DN |
500 | return; |
501 | } | |
502 | ||
c2255bc4 | 503 | label = create_artificial_label (loc); |
726a989a RB |
504 | /* Set the new label for the GIMPLE_COND */ |
505 | *tp = label; | |
6de9cd9a | 506 | |
726a989a | 507 | gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING); |
82d6e6fc | 508 | gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING); |
6de9cd9a DN |
509 | } |
510 | ||
19114537 | 511 | /* The real work of replace_goto_queue. Returns with TSI updated to |
6de9cd9a DN |
512 | point to the next statement. */ |
513 | ||
726a989a | 514 | static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *); |
6de9cd9a DN |
515 | |
516 | static void | |
726a989a RB |
517 | replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf, |
518 | gimple_stmt_iterator *gsi) | |
6de9cd9a | 519 | { |
726a989a RB |
520 | gimple_seq seq; |
521 | treemple temp; | |
522 | temp.g = NULL; | |
523 | ||
524 | switch (gimple_code (stmt)) | |
6de9cd9a | 525 | { |
726a989a RB |
526 | case GIMPLE_GOTO: |
527 | case GIMPLE_RETURN: | |
528 | temp.g = stmt; | |
529 | seq = find_goto_replacement (tf, temp); | |
530 | if (seq) | |
6de9cd9a | 531 | { |
726a989a RB |
532 | gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT); |
533 | gsi_remove (gsi, false); | |
6de9cd9a DN |
534 | return; |
535 | } | |
536 | break; | |
537 | ||
726a989a RB |
538 | case GIMPLE_COND: |
539 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi); | |
540 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi); | |
6de9cd9a DN |
541 | break; |
542 | ||
726a989a RB |
543 | case GIMPLE_TRY: |
544 | replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf); | |
545 | replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf); | |
6de9cd9a | 546 | break; |
726a989a RB |
547 | case GIMPLE_CATCH: |
548 | replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf); | |
6de9cd9a | 549 | break; |
726a989a RB |
550 | case GIMPLE_EH_FILTER: |
551 | replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf); | |
6de9cd9a DN |
552 | break; |
553 | ||
6de9cd9a DN |
554 | default: |
555 | /* These won't have gotos in them. */ | |
556 | break; | |
557 | } | |
558 | ||
726a989a | 559 | gsi_next (gsi); |
6de9cd9a DN |
560 | } |
561 | ||
726a989a | 562 | /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */ |
6de9cd9a DN |
563 | |
564 | static void | |
726a989a | 565 | replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf) |
6de9cd9a | 566 | { |
726a989a RB |
567 | gimple_stmt_iterator gsi = gsi_start (seq); |
568 | ||
569 | while (!gsi_end_p (gsi)) | |
570 | replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi); | |
6de9cd9a DN |
571 | } |
572 | ||
573 | /* Replace all goto queue members. */ | |
574 | ||
575 | static void | |
576 | replace_goto_queue (struct leh_tf_state *tf) | |
577 | { | |
8287d24a EB |
578 | if (tf->goto_queue_active == 0) |
579 | return; | |
726a989a | 580 | replace_goto_queue_stmt_list (tf->top_p_seq, tf); |
6de9cd9a DN |
581 | } |
582 | ||
726a989a RB |
583 | /* Add a new record to the goto queue contained in TF. NEW_STMT is the |
584 | data to be added, IS_LABEL indicates whether NEW_STMT is a label or | |
585 | a gimple return. */ | |
6de9cd9a DN |
586 | |
587 | static void | |
726a989a RB |
588 | record_in_goto_queue (struct leh_tf_state *tf, |
589 | treemple new_stmt, | |
590 | int index, | |
591 | bool is_label) | |
6de9cd9a | 592 | { |
6de9cd9a | 593 | size_t active, size; |
726a989a | 594 | struct goto_queue_node *q; |
6de9cd9a | 595 | |
0f547d3d SE |
596 | gcc_assert (!tf->goto_queue_map); |
597 | ||
6de9cd9a DN |
598 | active = tf->goto_queue_active; |
599 | size = tf->goto_queue_size; | |
600 | if (active >= size) | |
601 | { | |
602 | size = (size ? size * 2 : 32); | |
603 | tf->goto_queue_size = size; | |
604 | tf->goto_queue | |
858904db | 605 | = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size); |
6de9cd9a DN |
606 | } |
607 | ||
608 | q = &tf->goto_queue[active]; | |
609 | tf->goto_queue_active = active + 1; | |
19114537 | 610 | |
6de9cd9a | 611 | memset (q, 0, sizeof (*q)); |
726a989a | 612 | q->stmt = new_stmt; |
6de9cd9a | 613 | q->index = index; |
726a989a RB |
614 | q->is_label = is_label; |
615 | } | |
616 | ||
617 | /* Record the LABEL label in the goto queue contained in TF. | |
618 | TF is not null. */ | |
619 | ||
620 | static void | |
621 | record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label) | |
622 | { | |
623 | int index; | |
624 | treemple temp, new_stmt; | |
625 | ||
626 | if (!label) | |
627 | return; | |
628 | ||
629 | /* Computed and non-local gotos do not get processed. Given | |
630 | their nature we can neither tell whether we've escaped the | |
631 | finally block nor redirect them if we knew. */ | |
632 | if (TREE_CODE (label) != LABEL_DECL) | |
633 | return; | |
634 | ||
635 | /* No need to record gotos that don't leave the try block. */ | |
636 | temp.t = label; | |
637 | if (!outside_finally_tree (temp, tf->try_finally_expr)) | |
638 | return; | |
639 | ||
640 | if (! tf->dest_array) | |
641 | { | |
642 | tf->dest_array = VEC_alloc (tree, heap, 10); | |
643 | VEC_quick_push (tree, tf->dest_array, label); | |
644 | index = 0; | |
645 | } | |
646 | else | |
647 | { | |
648 | int n = VEC_length (tree, tf->dest_array); | |
649 | for (index = 0; index < n; ++index) | |
650 | if (VEC_index (tree, tf->dest_array, index) == label) | |
651 | break; | |
652 | if (index == n) | |
653 | VEC_safe_push (tree, heap, tf->dest_array, label); | |
654 | } | |
655 | ||
656 | /* In the case of a GOTO we want to record the destination label, | |
657 | since with a GIMPLE_COND we have an easy access to the then/else | |
658 | labels. */ | |
659 | new_stmt = stmt; | |
660 | record_in_goto_queue (tf, new_stmt, index, true); | |
661 | ||
662 | } | |
663 | ||
664 | /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally | |
665 | node, and if so record that fact in the goto queue associated with that | |
666 | try_finally node. */ | |
667 | ||
668 | static void | |
669 | maybe_record_in_goto_queue (struct leh_state *state, gimple stmt) | |
670 | { | |
671 | struct leh_tf_state *tf = state->tf; | |
672 | treemple new_stmt; | |
673 | ||
674 | if (!tf) | |
675 | return; | |
676 | ||
677 | switch (gimple_code (stmt)) | |
678 | { | |
679 | case GIMPLE_COND: | |
680 | new_stmt.tp = gimple_op_ptr (stmt, 2); | |
681 | record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt)); | |
682 | new_stmt.tp = gimple_op_ptr (stmt, 3); | |
683 | record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt)); | |
684 | break; | |
685 | case GIMPLE_GOTO: | |
686 | new_stmt.g = stmt; | |
687 | record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt)); | |
688 | break; | |
689 | ||
690 | case GIMPLE_RETURN: | |
691 | tf->may_return = true; | |
692 | new_stmt.g = stmt; | |
693 | record_in_goto_queue (tf, new_stmt, -1, false); | |
694 | break; | |
695 | ||
696 | default: | |
697 | gcc_unreachable (); | |
698 | } | |
6de9cd9a DN |
699 | } |
700 | ||
726a989a | 701 | |
6de9cd9a | 702 | #ifdef ENABLE_CHECKING |
726a989a | 703 | /* We do not process GIMPLE_SWITCHes for now. As long as the original source |
6de9cd9a | 704 | was in fact structured, and we've not yet done jump threading, then none |
726a989a | 705 | of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */ |
6de9cd9a DN |
706 | |
707 | static void | |
726a989a | 708 | verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr) |
6de9cd9a DN |
709 | { |
710 | struct leh_tf_state *tf = state->tf; | |
711 | size_t i, n; | |
6de9cd9a DN |
712 | |
713 | if (!tf) | |
714 | return; | |
715 | ||
726a989a | 716 | n = gimple_switch_num_labels (switch_expr); |
6de9cd9a DN |
717 | |
718 | for (i = 0; i < n; ++i) | |
719 | { | |
726a989a RB |
720 | treemple temp; |
721 | tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i)); | |
722 | temp.t = lab; | |
723 | gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr)); | |
6de9cd9a DN |
724 | } |
725 | } | |
726 | #else | |
727 | #define verify_norecord_switch_expr(state, switch_expr) | |
728 | #endif | |
729 | ||
730 | /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P | |
731 | whatever is needed to finish the return. If MOD is non-null, insert it | |
732 | before the new branch. RETURN_VALUE_P is a cache containing a temporary | |
9cf737f8 | 733 | variable to be used in manipulating the value returned from the function. */ |
6de9cd9a DN |
734 | |
735 | static void | |
726a989a | 736 | do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, |
6de9cd9a DN |
737 | tree *return_value_p) |
738 | { | |
726a989a RB |
739 | tree ret_expr; |
740 | gimple x; | |
741 | ||
742 | /* In the case of a return, the queue node must be a gimple statement. */ | |
743 | gcc_assert (!q->is_label); | |
744 | ||
745 | ret_expr = gimple_return_retval (q->stmt.g); | |
6de9cd9a DN |
746 | |
747 | if (ret_expr) | |
748 | { | |
726a989a RB |
749 | if (!*return_value_p) |
750 | *return_value_p = ret_expr; | |
751 | else | |
752 | gcc_assert (*return_value_p == ret_expr); | |
753 | q->cont_stmt = q->stmt.g; | |
6de9cd9a DN |
754 | /* The nasty part about redirecting the return value is that the |
755 | return value itself is to be computed before the FINALLY block | |
756 | is executed. e.g. | |
757 | ||
758 | int x; | |
759 | int foo (void) | |
760 | { | |
761 | x = 0; | |
762 | try { | |
763 | return x; | |
764 | } finally { | |
765 | x++; | |
766 | } | |
767 | } | |
768 | ||
769 | should return 0, not 1. Arrange for this to happen by copying | |
770 | computed the return value into a local temporary. This also | |
771 | allows us to redirect multiple return statements through the | |
772 | same destination block; whether this is a net win or not really | |
773 | depends, I guess, but it does make generation of the switch in | |
774 | lower_try_finally_switch easier. */ | |
775 | ||
726a989a | 776 | if (TREE_CODE (ret_expr) == RESULT_DECL) |
6de9cd9a DN |
777 | { |
778 | if (!*return_value_p) | |
779 | *return_value_p = ret_expr; | |
6de9cd9a | 780 | else |
1e128c5f | 781 | gcc_assert (*return_value_p == ret_expr); |
726a989a | 782 | q->cont_stmt = q->stmt.g; |
6de9cd9a | 783 | } |
726a989a RB |
784 | else |
785 | gcc_unreachable (); | |
6de9cd9a DN |
786 | } |
787 | else | |
6de9cd9a | 788 | /* If we don't return a value, all return statements are the same. */ |
726a989a RB |
789 | q->cont_stmt = q->stmt.g; |
790 | ||
791 | if (!q->repl_stmt) | |
792 | q->repl_stmt = gimple_seq_alloc (); | |
6de9cd9a DN |
793 | |
794 | if (mod) | |
726a989a | 795 | gimple_seq_add_seq (&q->repl_stmt, mod); |
6de9cd9a | 796 | |
726a989a RB |
797 | x = gimple_build_goto (finlab); |
798 | gimple_seq_add_stmt (&q->repl_stmt, x); | |
6de9cd9a DN |
799 | } |
800 | ||
726a989a | 801 | /* Similar, but easier, for GIMPLE_GOTO. */ |
6de9cd9a DN |
802 | |
803 | static void | |
726a989a RB |
804 | do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, |
805 | struct leh_tf_state *tf) | |
6de9cd9a | 806 | { |
726a989a RB |
807 | gimple x; |
808 | ||
809 | gcc_assert (q->is_label); | |
810 | if (!q->repl_stmt) | |
811 | q->repl_stmt = gimple_seq_alloc (); | |
812 | ||
813 | q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array,q->index)); | |
6de9cd9a | 814 | |
6de9cd9a | 815 | if (mod) |
726a989a | 816 | gimple_seq_add_seq (&q->repl_stmt, mod); |
6de9cd9a | 817 | |
726a989a RB |
818 | x = gimple_build_goto (finlab); |
819 | gimple_seq_add_stmt (&q->repl_stmt, x); | |
6de9cd9a DN |
820 | } |
821 | ||
822 | /* We want to transform | |
823 | try { body; } catch { stuff; } | |
824 | to | |
825 | body; goto over; lab: stuff; over: | |
826 | ||
726a989a | 827 | TP is a GIMPLE_TRY node. LAB is the label that |
6de9cd9a DN |
828 | should be placed before the second operand, or NULL. OVER is |
829 | an existing label that should be put at the exit, or NULL. */ | |
830 | ||
726a989a RB |
831 | static gimple_seq |
832 | frob_into_branch_around (gimple tp, tree lab, tree over) | |
6de9cd9a | 833 | { |
726a989a RB |
834 | gimple x; |
835 | gimple_seq cleanup, result; | |
c2255bc4 | 836 | location_t loc = gimple_location (tp); |
6de9cd9a | 837 | |
726a989a RB |
838 | cleanup = gimple_try_cleanup (tp); |
839 | result = gimple_try_eval (tp); | |
6de9cd9a | 840 | |
726a989a | 841 | if (gimple_seq_may_fallthru (result)) |
6de9cd9a DN |
842 | { |
843 | if (!over) | |
c2255bc4 | 844 | over = create_artificial_label (loc); |
726a989a RB |
845 | x = gimple_build_goto (over); |
846 | gimple_seq_add_stmt (&result, x); | |
6de9cd9a DN |
847 | } |
848 | ||
849 | if (lab) | |
850 | { | |
726a989a RB |
851 | x = gimple_build_label (lab); |
852 | gimple_seq_add_stmt (&result, x); | |
6de9cd9a DN |
853 | } |
854 | ||
726a989a | 855 | gimple_seq_add_seq (&result, cleanup); |
6de9cd9a DN |
856 | |
857 | if (over) | |
858 | { | |
726a989a RB |
859 | x = gimple_build_label (over); |
860 | gimple_seq_add_stmt (&result, x); | |
6de9cd9a | 861 | } |
726a989a | 862 | return result; |
6de9cd9a DN |
863 | } |
864 | ||
865 | /* A subroutine of lower_try_finally. Duplicate the tree rooted at T. | |
866 | Make sure to record all new labels found. */ | |
867 | ||
726a989a RB |
868 | static gimple_seq |
869 | lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state) | |
6de9cd9a | 870 | { |
726a989a RB |
871 | gimple region = NULL; |
872 | gimple_seq new_seq; | |
6de9cd9a | 873 | |
726a989a | 874 | new_seq = copy_gimple_seq_and_replace_locals (seq); |
6de9cd9a DN |
875 | |
876 | if (outer_state->tf) | |
877 | region = outer_state->tf->try_finally_expr; | |
726a989a | 878 | collect_finally_tree_1 (new_seq, region); |
6de9cd9a | 879 | |
726a989a | 880 | return new_seq; |
6de9cd9a DN |
881 | } |
882 | ||
883 | /* A subroutine of lower_try_finally. Create a fallthru label for | |
884 | the given try_finally state. The only tricky bit here is that | |
885 | we have to make sure to record the label in our outer context. */ | |
886 | ||
887 | static tree | |
888 | lower_try_finally_fallthru_label (struct leh_tf_state *tf) | |
889 | { | |
890 | tree label = tf->fallthru_label; | |
726a989a RB |
891 | treemple temp; |
892 | ||
6de9cd9a DN |
893 | if (!label) |
894 | { | |
c2255bc4 | 895 | label = create_artificial_label (gimple_location (tf->try_finally_expr)); |
6de9cd9a DN |
896 | tf->fallthru_label = label; |
897 | if (tf->outer->tf) | |
726a989a RB |
898 | { |
899 | temp.t = label; | |
900 | record_in_finally_tree (temp, tf->outer->tf->try_finally_expr); | |
901 | } | |
6de9cd9a DN |
902 | } |
903 | return label; | |
904 | } | |
905 | ||
906 | /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions | |
907 | returns non-null, then the language requires that the exception path out | |
908 | of a try_finally be treated specially. To wit: the code within the | |
909 | finally block may not itself throw an exception. We have two choices here. | |
910 | First we can duplicate the finally block and wrap it in a must_not_throw | |
911 | region. Second, we can generate code like | |
912 | ||
913 | try { | |
914 | finally_block; | |
915 | } catch { | |
916 | if (fintmp == eh_edge) | |
917 | protect_cleanup_actions; | |
918 | } | |
919 | ||
920 | where "fintmp" is the temporary used in the switch statement generation | |
921 | alternative considered below. For the nonce, we always choose the first | |
19114537 | 922 | option. |
6de9cd9a | 923 | |
3f117656 | 924 | THIS_STATE may be null if this is a try-cleanup, not a try-finally. */ |
6de9cd9a DN |
925 | |
926 | static void | |
927 | honor_protect_cleanup_actions (struct leh_state *outer_state, | |
928 | struct leh_state *this_state, | |
929 | struct leh_tf_state *tf) | |
930 | { | |
726a989a RB |
931 | gimple protect_cleanup_actions; |
932 | gimple_stmt_iterator gsi; | |
6de9cd9a | 933 | bool finally_may_fallthru; |
726a989a RB |
934 | gimple_seq finally; |
935 | gimple x; | |
6de9cd9a DN |
936 | |
937 | /* First check for nothing to do. */ | |
938 | if (lang_protect_cleanup_actions) | |
939 | protect_cleanup_actions = lang_protect_cleanup_actions (); | |
940 | else | |
941 | protect_cleanup_actions = NULL; | |
942 | ||
726a989a | 943 | finally = gimple_try_cleanup (tf->top_p); |
6de9cd9a DN |
944 | |
945 | /* If the EH case of the finally block can fall through, this may be a | |
946 | structure of the form | |
947 | try { | |
948 | try { | |
949 | throw ...; | |
950 | } cleanup { | |
951 | try { | |
952 | throw ...; | |
953 | } catch (...) { | |
954 | } | |
955 | } | |
956 | } catch (...) { | |
957 | yyy; | |
958 | } | |
959 | E.g. with an inline destructor with an embedded try block. In this | |
960 | case we must save the runtime EH data around the nested exception. | |
961 | ||
962 | This complication means that any time the previous runtime data might | |
963 | be used (via fallthru from the finally) we handle the eh case here, | |
964 | whether or not protect_cleanup_actions is active. */ | |
965 | ||
726a989a | 966 | finally_may_fallthru = gimple_seq_may_fallthru (finally); |
6de9cd9a DN |
967 | if (!finally_may_fallthru && !protect_cleanup_actions) |
968 | return; | |
969 | ||
970 | /* Duplicate the FINALLY block. Only need to do this for try-finally, | |
971 | and not for cleanups. */ | |
972 | if (this_state) | |
973 | finally = lower_try_finally_dup_block (finally, outer_state); | |
974 | ||
33b45227 JM |
975 | /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP |
976 | set, the handler of the TRY_CATCH_EXPR is another cleanup which ought | |
977 | to be in an enclosing scope, but needs to be implemented at this level | |
978 | to avoid a nesting violation (see wrap_temporary_cleanups in | |
979 | cp/decl.c). Since it's logically at an outer level, we should call | |
980 | terminate before we get to it, so strip it away before adding the | |
981 | MUST_NOT_THROW filter. */ | |
726a989a RB |
982 | gsi = gsi_start (finally); |
983 | x = gsi_stmt (gsi); | |
33b45227 | 984 | if (protect_cleanup_actions |
726a989a RB |
985 | && gimple_code (x) == GIMPLE_TRY |
986 | && gimple_try_kind (x) == GIMPLE_TRY_CATCH | |
987 | && gimple_try_catch_is_cleanup (x)) | |
33b45227 | 988 | { |
726a989a RB |
989 | gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT); |
990 | gsi_remove (&gsi, false); | |
33b45227 JM |
991 | } |
992 | ||
6de9cd9a DN |
993 | /* Resume execution after the exception. Adding this now lets |
994 | lower_eh_filter not add unnecessary gotos, as it is clear that | |
995 | we never fallthru from this copy of the finally block. */ | |
996 | if (finally_may_fallthru) | |
997 | { | |
b28fb6a1 | 998 | tree save_eptr, save_filt; |
726a989a | 999 | tree tmp; |
b28fb6a1 JL |
1000 | |
1001 | save_eptr = create_tmp_var (ptr_type_node, "save_eptr"); | |
1002 | save_filt = create_tmp_var (integer_type_node, "save_filt"); | |
6de9cd9a | 1003 | |
726a989a RB |
1004 | gsi = gsi_start (finally); |
1005 | tmp = build0 (EXC_PTR_EXPR, ptr_type_node); | |
1006 | x = gimple_build_assign (save_eptr, tmp); | |
1007 | gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING); | |
6de9cd9a | 1008 | |
726a989a RB |
1009 | tmp = build0 (FILTER_EXPR, integer_type_node); |
1010 | x = gimple_build_assign (save_filt, tmp); | |
1011 | gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING); | |
6de9cd9a | 1012 | |
726a989a RB |
1013 | gsi = gsi_last (finally); |
1014 | tmp = build0 (EXC_PTR_EXPR, ptr_type_node); | |
1015 | x = gimple_build_assign (tmp, save_eptr); | |
1016 | gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING); | |
6de9cd9a | 1017 | |
726a989a RB |
1018 | tmp = build0 (FILTER_EXPR, integer_type_node); |
1019 | x = gimple_build_assign (tmp, save_filt); | |
1020 | gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING); | |
6de9cd9a | 1021 | |
726a989a RB |
1022 | x = gimple_build_resx (get_eh_region_number (tf->region)); |
1023 | gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING); | |
6de9cd9a DN |
1024 | } |
1025 | ||
1026 | /* Wrap the block with protect_cleanup_actions as the action. */ | |
1027 | if (protect_cleanup_actions) | |
1028 | { | |
726a989a RB |
1029 | gimple_seq seq = NULL, failure = NULL; |
1030 | ||
1031 | gimple_seq_add_stmt (&failure, protect_cleanup_actions); | |
1032 | x = gimple_build_eh_filter (NULL, failure); | |
1033 | gimple_eh_filter_set_must_not_throw (x, 1); | |
1034 | ||
1035 | gimple_seq_add_stmt (&seq, x); | |
1036 | x = gimple_build_try (finally, seq, GIMPLE_TRY_CATCH); | |
1037 | finally = lower_eh_filter (outer_state, x); | |
6de9cd9a DN |
1038 | } |
1039 | else | |
726a989a | 1040 | lower_eh_constructs_1 (outer_state, finally); |
6de9cd9a DN |
1041 | |
1042 | /* Hook this up to the end of the existing try block. If we | |
1043 | previously fell through the end, we'll have to branch around. | |
1044 | This means adding a new goto, and adding it to the queue. */ | |
1045 | ||
726a989a | 1046 | gsi = gsi_last (gimple_try_eval (tf->top_p)); |
6de9cd9a DN |
1047 | |
1048 | if (tf->may_fallthru) | |
1049 | { | |
726a989a RB |
1050 | tree tmp; |
1051 | tmp = lower_try_finally_fallthru_label (tf); | |
1052 | x = gimple_build_goto (tmp); | |
1053 | gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING); | |
6de9cd9a DN |
1054 | |
1055 | if (this_state) | |
1056 | maybe_record_in_goto_queue (this_state, x); | |
1057 | ||
1058 | tf->may_fallthru = false; | |
1059 | } | |
1060 | ||
726a989a RB |
1061 | x = gimple_build_label (tf->eh_label); |
1062 | gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING); | |
1063 | gsi_insert_seq_after (&gsi, finally, GSI_CONTINUE_LINKING); | |
6de9cd9a DN |
1064 | |
1065 | /* Having now been handled, EH isn't to be considered with | |
1066 | the rest of the outgoing edges. */ | |
1067 | tf->may_throw = false; | |
1068 | } | |
1069 | ||
1070 | /* A subroutine of lower_try_finally. We have determined that there is | |
1071 | no fallthru edge out of the finally block. This means that there is | |
1072 | no outgoing edge corresponding to any incoming edge. Restructure the | |
1073 | try_finally node for this special case. */ | |
1074 | ||
1075 | static void | |
726a989a RB |
1076 | lower_try_finally_nofallthru (struct leh_state *state, |
1077 | struct leh_tf_state *tf) | |
6de9cd9a | 1078 | { |
726a989a RB |
1079 | tree lab, return_val; |
1080 | gimple x; | |
1081 | gimple_seq finally; | |
6de9cd9a DN |
1082 | struct goto_queue_node *q, *qe; |
1083 | ||
1084 | if (tf->may_throw) | |
1085 | lab = tf->eh_label; | |
1086 | else | |
c2255bc4 | 1087 | lab = create_artificial_label (gimple_location (tf->try_finally_expr)); |
6de9cd9a | 1088 | |
726a989a RB |
1089 | /* We expect that tf->top_p is a GIMPLE_TRY. */ |
1090 | finally = gimple_try_cleanup (tf->top_p); | |
1091 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
6de9cd9a | 1092 | |
726a989a RB |
1093 | x = gimple_build_label (lab); |
1094 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a DN |
1095 | |
1096 | return_val = NULL; | |
1097 | q = tf->goto_queue; | |
1098 | qe = q + tf->goto_queue_active; | |
1099 | for (; q < qe; ++q) | |
1100 | if (q->index < 0) | |
1101 | do_return_redirection (q, lab, NULL, &return_val); | |
1102 | else | |
726a989a | 1103 | do_goto_redirection (q, lab, NULL, tf); |
6de9cd9a DN |
1104 | |
1105 | replace_goto_queue (tf); | |
1106 | ||
726a989a RB |
1107 | lower_eh_constructs_1 (state, finally); |
1108 | gimple_seq_add_seq (&tf->top_p_seq, finally); | |
6de9cd9a DN |
1109 | } |
1110 | ||
1111 | /* A subroutine of lower_try_finally. We have determined that there is | |
1112 | exactly one destination of the finally block. Restructure the | |
1113 | try_finally node for this special case. */ | |
1114 | ||
1115 | static void | |
1116 | lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf) | |
1117 | { | |
1118 | struct goto_queue_node *q, *qe; | |
726a989a RB |
1119 | gimple x; |
1120 | gimple_seq finally; | |
1121 | tree finally_label; | |
c2255bc4 | 1122 | location_t loc = gimple_location (tf->try_finally_expr); |
6de9cd9a | 1123 | |
726a989a RB |
1124 | finally = gimple_try_cleanup (tf->top_p); |
1125 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
6de9cd9a | 1126 | |
726a989a | 1127 | lower_eh_constructs_1 (state, finally); |
6de9cd9a DN |
1128 | |
1129 | if (tf->may_throw) | |
1130 | { | |
1131 | /* Only reachable via the exception edge. Add the given label to | |
1132 | the head of the FINALLY block. Append a RESX at the end. */ | |
1133 | ||
726a989a RB |
1134 | x = gimple_build_label (tf->eh_label); |
1135 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1136 | |
726a989a | 1137 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
19114537 | 1138 | |
726a989a | 1139 | x = gimple_build_resx (get_eh_region_number (tf->region)); |
b4660e5a | 1140 | |
726a989a | 1141 | gimple_seq_add_stmt (&tf->top_p_seq, x); |
6de9cd9a DN |
1142 | |
1143 | return; | |
1144 | } | |
1145 | ||
1146 | if (tf->may_fallthru) | |
1147 | { | |
1148 | /* Only reachable via the fallthru edge. Do nothing but let | |
1149 | the two blocks run together; we'll fall out the bottom. */ | |
726a989a | 1150 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1151 | return; |
1152 | } | |
1153 | ||
c2255bc4 | 1154 | finally_label = create_artificial_label (loc); |
726a989a RB |
1155 | x = gimple_build_label (finally_label); |
1156 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1157 | |
726a989a | 1158 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1159 | |
1160 | q = tf->goto_queue; | |
1161 | qe = q + tf->goto_queue_active; | |
1162 | ||
1163 | if (tf->may_return) | |
1164 | { | |
1165 | /* Reachable by return expressions only. Redirect them. */ | |
1166 | tree return_val = NULL; | |
1167 | for (; q < qe; ++q) | |
1168 | do_return_redirection (q, finally_label, NULL, &return_val); | |
1169 | replace_goto_queue (tf); | |
1170 | } | |
1171 | else | |
1172 | { | |
1173 | /* Reachable by goto expressions only. Redirect them. */ | |
1174 | for (; q < qe; ++q) | |
726a989a | 1175 | do_goto_redirection (q, finally_label, NULL, tf); |
6de9cd9a | 1176 | replace_goto_queue (tf); |
19114537 | 1177 | |
59ebc704 | 1178 | if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label) |
6de9cd9a DN |
1179 | { |
1180 | /* Reachable by goto to fallthru label only. Redirect it | |
1181 | to the new label (already created, sadly), and do not | |
1182 | emit the final branch out, or the fallthru label. */ | |
1183 | tf->fallthru_label = NULL; | |
1184 | return; | |
1185 | } | |
1186 | } | |
1187 | ||
726a989a RB |
1188 | /* Place the original return/goto to the original destination |
1189 | immediately after the finally block. */ | |
1190 | x = tf->goto_queue[0].cont_stmt; | |
1191 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
1192 | maybe_record_in_goto_queue (state, x); | |
6de9cd9a DN |
1193 | } |
1194 | ||
1195 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1196 | and outgoing from the finally block. Implement this by duplicating the | |
1197 | finally block for every destination. */ | |
1198 | ||
1199 | static void | |
1200 | lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf) | |
1201 | { | |
726a989a RB |
1202 | gimple_seq finally; |
1203 | gimple_seq new_stmt; | |
1204 | gimple_seq seq; | |
1205 | gimple x; | |
1206 | tree tmp; | |
c2255bc4 | 1207 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
6de9cd9a | 1208 | |
726a989a RB |
1209 | finally = gimple_try_cleanup (tf->top_p); |
1210 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
1211 | new_stmt = NULL; | |
6de9cd9a DN |
1212 | |
1213 | if (tf->may_fallthru) | |
1214 | { | |
726a989a RB |
1215 | seq = lower_try_finally_dup_block (finally, state); |
1216 | lower_eh_constructs_1 (state, seq); | |
1217 | gimple_seq_add_seq (&new_stmt, seq); | |
6de9cd9a | 1218 | |
726a989a RB |
1219 | tmp = lower_try_finally_fallthru_label (tf); |
1220 | x = gimple_build_goto (tmp); | |
1221 | gimple_seq_add_stmt (&new_stmt, x); | |
6de9cd9a DN |
1222 | } |
1223 | ||
1224 | if (tf->may_throw) | |
1225 | { | |
726a989a RB |
1226 | x = gimple_build_label (tf->eh_label); |
1227 | gimple_seq_add_stmt (&new_stmt, x); | |
6de9cd9a | 1228 | |
726a989a RB |
1229 | seq = lower_try_finally_dup_block (finally, state); |
1230 | lower_eh_constructs_1 (state, seq); | |
1231 | gimple_seq_add_seq (&new_stmt, seq); | |
6de9cd9a | 1232 | |
726a989a RB |
1233 | x = gimple_build_resx (get_eh_region_number (tf->region)); |
1234 | gimple_seq_add_stmt (&new_stmt, x); | |
6de9cd9a DN |
1235 | } |
1236 | ||
1237 | if (tf->goto_queue) | |
1238 | { | |
1239 | struct goto_queue_node *q, *qe; | |
1240 | tree return_val = NULL; | |
dd58eb5a | 1241 | int return_index, index; |
858904db | 1242 | struct labels_s |
dd58eb5a AO |
1243 | { |
1244 | struct goto_queue_node *q; | |
1245 | tree label; | |
1246 | } *labels; | |
6de9cd9a | 1247 | |
59ebc704 | 1248 | return_index = VEC_length (tree, tf->dest_array); |
858904db | 1249 | labels = XCNEWVEC (struct labels_s, return_index + 1); |
6de9cd9a DN |
1250 | |
1251 | q = tf->goto_queue; | |
1252 | qe = q + tf->goto_queue_active; | |
1253 | for (; q < qe; q++) | |
1254 | { | |
dd58eb5a AO |
1255 | index = q->index < 0 ? return_index : q->index; |
1256 | ||
1257 | if (!labels[index].q) | |
1258 | labels[index].q = q; | |
1259 | } | |
1260 | ||
1261 | for (index = 0; index < return_index + 1; index++) | |
1262 | { | |
1263 | tree lab; | |
1264 | ||
1265 | q = labels[index].q; | |
1266 | if (! q) | |
1267 | continue; | |
1268 | ||
c2255bc4 AH |
1269 | lab = labels[index].label |
1270 | = create_artificial_label (tf_loc); | |
6de9cd9a DN |
1271 | |
1272 | if (index == return_index) | |
1273 | do_return_redirection (q, lab, NULL, &return_val); | |
1274 | else | |
726a989a | 1275 | do_goto_redirection (q, lab, NULL, tf); |
6de9cd9a | 1276 | |
726a989a RB |
1277 | x = gimple_build_label (lab); |
1278 | gimple_seq_add_stmt (&new_stmt, x); | |
6de9cd9a | 1279 | |
726a989a RB |
1280 | seq = lower_try_finally_dup_block (finally, state); |
1281 | lower_eh_constructs_1 (state, seq); | |
1282 | gimple_seq_add_seq (&new_stmt, seq); | |
6de9cd9a | 1283 | |
726a989a | 1284 | gimple_seq_add_stmt (&new_stmt, q->cont_stmt); |
dd58eb5a | 1285 | maybe_record_in_goto_queue (state, q->cont_stmt); |
6de9cd9a | 1286 | } |
dd58eb5a AO |
1287 | |
1288 | for (q = tf->goto_queue; q < qe; q++) | |
1289 | { | |
1290 | tree lab; | |
1291 | ||
1292 | index = q->index < 0 ? return_index : q->index; | |
1293 | ||
1294 | if (labels[index].q == q) | |
1295 | continue; | |
1296 | ||
1297 | lab = labels[index].label; | |
1298 | ||
1299 | if (index == return_index) | |
1300 | do_return_redirection (q, lab, NULL, &return_val); | |
1301 | else | |
726a989a | 1302 | do_goto_redirection (q, lab, NULL, tf); |
dd58eb5a AO |
1303 | } |
1304 | ||
6de9cd9a DN |
1305 | replace_goto_queue (tf); |
1306 | free (labels); | |
1307 | } | |
1308 | ||
1309 | /* Need to link new stmts after running replace_goto_queue due | |
1310 | to not wanting to process the same goto stmts twice. */ | |
726a989a | 1311 | gimple_seq_add_seq (&tf->top_p_seq, new_stmt); |
6de9cd9a DN |
1312 | } |
1313 | ||
1314 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1315 | and outgoing from the finally block. Implement this by instrumenting | |
1316 | each incoming edge and creating a switch statement at the end of the | |
1317 | finally block that branches to the appropriate destination. */ | |
1318 | ||
1319 | static void | |
1320 | lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf) | |
1321 | { | |
1322 | struct goto_queue_node *q, *qe; | |
1323 | tree return_val = NULL; | |
726a989a | 1324 | tree finally_tmp, finally_label; |
6de9cd9a DN |
1325 | int return_index, eh_index, fallthru_index; |
1326 | int nlabels, ndests, j, last_case_index; | |
726a989a RB |
1327 | tree last_case; |
1328 | VEC (tree,heap) *case_label_vec; | |
1329 | gimple_seq switch_body; | |
1330 | gimple x; | |
1331 | tree tmp; | |
1332 | gimple switch_stmt; | |
1333 | gimple_seq finally; | |
1334 | struct pointer_map_t *cont_map = NULL; | |
c2255bc4 | 1335 | /* The location of the TRY_FINALLY stmt. */ |
d40eb158 | 1336 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
c2255bc4 AH |
1337 | /* The location of the finally block. */ |
1338 | location_t finally_loc; | |
726a989a RB |
1339 | |
1340 | switch_body = gimple_seq_alloc (); | |
6de9cd9a DN |
1341 | |
1342 | /* Mash the TRY block to the head of the chain. */ | |
726a989a RB |
1343 | finally = gimple_try_cleanup (tf->top_p); |
1344 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
6de9cd9a | 1345 | |
c2255bc4 AH |
1346 | /* The location of the finally is either the last stmt in the finally |
1347 | block or the location of the TRY_FINALLY itself. */ | |
1348 | finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ? | |
1349 | gimple_location (gimple_seq_last_stmt (tf->top_p_seq)) | |
1350 | : tf_loc; | |
1351 | ||
6de9cd9a | 1352 | /* Lower the finally block itself. */ |
726a989a | 1353 | lower_eh_constructs_1 (state, finally); |
6de9cd9a DN |
1354 | |
1355 | /* Prepare for switch statement generation. */ | |
59ebc704 | 1356 | nlabels = VEC_length (tree, tf->dest_array); |
6de9cd9a DN |
1357 | return_index = nlabels; |
1358 | eh_index = return_index + tf->may_return; | |
1359 | fallthru_index = eh_index + tf->may_throw; | |
1360 | ndests = fallthru_index + tf->may_fallthru; | |
1361 | ||
1362 | finally_tmp = create_tmp_var (integer_type_node, "finally_tmp"); | |
c2255bc4 | 1363 | finally_label = create_artificial_label (finally_loc); |
6de9cd9a | 1364 | |
726a989a RB |
1365 | /* We use VEC_quick_push on case_label_vec throughout this function, |
1366 | since we know the size in advance and allocate precisely as muce | |
1367 | space as needed. */ | |
1368 | case_label_vec = VEC_alloc (tree, heap, ndests); | |
6de9cd9a DN |
1369 | last_case = NULL; |
1370 | last_case_index = 0; | |
1371 | ||
1372 | /* Begin inserting code for getting to the finally block. Things | |
1373 | are done in this order to correspond to the sequence the code is | |
1374 | layed out. */ | |
1375 | ||
1376 | if (tf->may_fallthru) | |
1377 | { | |
726a989a RB |
1378 | x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node, |
1379 | fallthru_index)); | |
1380 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a DN |
1381 | |
1382 | if (tf->may_throw) | |
1383 | { | |
726a989a RB |
1384 | x = gimple_build_goto (finally_label); |
1385 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a DN |
1386 | } |
1387 | ||
1388 | ||
b4257cfc RG |
1389 | last_case = build3 (CASE_LABEL_EXPR, void_type_node, |
1390 | build_int_cst (NULL_TREE, fallthru_index), NULL, | |
c2255bc4 | 1391 | create_artificial_label (tf_loc)); |
726a989a | 1392 | VEC_quick_push (tree, case_label_vec, last_case); |
6de9cd9a DN |
1393 | last_case_index++; |
1394 | ||
726a989a RB |
1395 | x = gimple_build_label (CASE_LABEL (last_case)); |
1396 | gimple_seq_add_stmt (&switch_body, x); | |
6de9cd9a | 1397 | |
726a989a RB |
1398 | tmp = lower_try_finally_fallthru_label (tf); |
1399 | x = gimple_build_goto (tmp); | |
1400 | gimple_seq_add_stmt (&switch_body, x); | |
6de9cd9a DN |
1401 | } |
1402 | ||
1403 | if (tf->may_throw) | |
1404 | { | |
726a989a RB |
1405 | x = gimple_build_label (tf->eh_label); |
1406 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1407 | |
726a989a RB |
1408 | x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node, |
1409 | eh_index)); | |
1410 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1411 | |
b4257cfc RG |
1412 | last_case = build3 (CASE_LABEL_EXPR, void_type_node, |
1413 | build_int_cst (NULL_TREE, eh_index), NULL, | |
c2255bc4 | 1414 | create_artificial_label (tf_loc)); |
726a989a | 1415 | VEC_quick_push (tree, case_label_vec, last_case); |
6de9cd9a DN |
1416 | last_case_index++; |
1417 | ||
726a989a RB |
1418 | x = gimple_build_label (CASE_LABEL (last_case)); |
1419 | gimple_seq_add_stmt (&switch_body, x); | |
1420 | x = gimple_build_resx (get_eh_region_number (tf->region)); | |
1421 | gimple_seq_add_stmt (&switch_body, x); | |
6de9cd9a DN |
1422 | } |
1423 | ||
726a989a RB |
1424 | x = gimple_build_label (finally_label); |
1425 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1426 | |
726a989a | 1427 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1428 | |
1429 | /* Redirect each incoming goto edge. */ | |
1430 | q = tf->goto_queue; | |
1431 | qe = q + tf->goto_queue_active; | |
1432 | j = last_case_index + tf->may_return; | |
726a989a RB |
1433 | /* Prepare the assignments to finally_tmp that are executed upon the |
1434 | entrance through a particular edge. */ | |
6de9cd9a DN |
1435 | for (; q < qe; ++q) |
1436 | { | |
726a989a RB |
1437 | gimple_seq mod; |
1438 | int switch_id; | |
1439 | unsigned int case_index; | |
1440 | ||
1441 | mod = gimple_seq_alloc (); | |
6de9cd9a DN |
1442 | |
1443 | if (q->index < 0) | |
1444 | { | |
726a989a RB |
1445 | x = gimple_build_assign (finally_tmp, |
1446 | build_int_cst (integer_type_node, | |
1447 | return_index)); | |
1448 | gimple_seq_add_stmt (&mod, x); | |
6de9cd9a DN |
1449 | do_return_redirection (q, finally_label, mod, &return_val); |
1450 | switch_id = return_index; | |
1451 | } | |
1452 | else | |
1453 | { | |
726a989a RB |
1454 | x = gimple_build_assign (finally_tmp, |
1455 | build_int_cst (integer_type_node, q->index)); | |
1456 | gimple_seq_add_stmt (&mod, x); | |
1457 | do_goto_redirection (q, finally_label, mod, tf); | |
6de9cd9a DN |
1458 | switch_id = q->index; |
1459 | } | |
1460 | ||
1461 | case_index = j + q->index; | |
726a989a RB |
1462 | if (VEC_length (tree, case_label_vec) <= case_index |
1463 | || !VEC_index (tree, case_label_vec, case_index)) | |
1464 | { | |
1465 | tree case_lab; | |
1466 | void **slot; | |
1467 | case_lab = build3 (CASE_LABEL_EXPR, void_type_node, | |
1468 | build_int_cst (NULL_TREE, switch_id), NULL, | |
1469 | NULL); | |
1470 | /* We store the cont_stmt in the pointer map, so that we can recover | |
1471 | it in the loop below. We don't create the new label while | |
1472 | walking the goto_queue because pointers don't offer a stable | |
1473 | order. */ | |
1474 | if (!cont_map) | |
1475 | cont_map = pointer_map_create (); | |
1476 | slot = pointer_map_insert (cont_map, case_lab); | |
1477 | *slot = q->cont_stmt; | |
1478 | VEC_quick_push (tree, case_label_vec, case_lab); | |
1479 | } | |
dd58eb5a AO |
1480 | } |
1481 | for (j = last_case_index; j < last_case_index + nlabels; j++) | |
1482 | { | |
1483 | tree label; | |
726a989a RB |
1484 | gimple cont_stmt; |
1485 | void **slot; | |
dd58eb5a | 1486 | |
726a989a | 1487 | last_case = VEC_index (tree, case_label_vec, j); |
dd58eb5a AO |
1488 | |
1489 | gcc_assert (last_case); | |
726a989a | 1490 | gcc_assert (cont_map); |
dd58eb5a | 1491 | |
726a989a RB |
1492 | slot = pointer_map_contains (cont_map, last_case); |
1493 | /* As the comment above suggests, CASE_LABEL (last_case) was just a | |
1494 | placeholder, it does not store an actual label, yet. */ | |
1495 | gcc_assert (slot); | |
1496 | cont_stmt = *(gimple *) slot; | |
dd58eb5a | 1497 | |
c2255bc4 | 1498 | label = create_artificial_label (tf_loc); |
dd58eb5a AO |
1499 | CASE_LABEL (last_case) = label; |
1500 | ||
726a989a RB |
1501 | x = gimple_build_label (label); |
1502 | gimple_seq_add_stmt (&switch_body, x); | |
1503 | gimple_seq_add_stmt (&switch_body, cont_stmt); | |
dd58eb5a | 1504 | maybe_record_in_goto_queue (state, cont_stmt); |
6de9cd9a | 1505 | } |
726a989a RB |
1506 | if (cont_map) |
1507 | pointer_map_destroy (cont_map); | |
1508 | ||
6de9cd9a | 1509 | replace_goto_queue (tf); |
6de9cd9a | 1510 | |
0f1f6967 SB |
1511 | /* Make sure that the last case is the default label, as one is required. |
1512 | Then sort the labels, which is also required in GIMPLE. */ | |
6de9cd9a | 1513 | CASE_LOW (last_case) = NULL; |
0f1f6967 | 1514 | sort_case_labels (case_label_vec); |
6de9cd9a | 1515 | |
726a989a RB |
1516 | /* Build the switch statement, setting last_case to be the default |
1517 | label. */ | |
1518 | switch_stmt = gimple_build_switch_vec (finally_tmp, last_case, | |
1519 | case_label_vec); | |
c2255bc4 | 1520 | gimple_set_location (switch_stmt, finally_loc); |
726a989a RB |
1521 | |
1522 | /* Need to link SWITCH_STMT after running replace_goto_queue | |
1523 | due to not wanting to process the same goto stmts twice. */ | |
1524 | gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt); | |
1525 | gimple_seq_add_seq (&tf->top_p_seq, switch_body); | |
6de9cd9a DN |
1526 | } |
1527 | ||
1528 | /* Decide whether or not we are going to duplicate the finally block. | |
1529 | There are several considerations. | |
1530 | ||
1531 | First, if this is Java, then the finally block contains code | |
1532 | written by the user. It has line numbers associated with it, | |
1533 | so duplicating the block means it's difficult to set a breakpoint. | |
1534 | Since controlling code generation via -g is verboten, we simply | |
1535 | never duplicate code without optimization. | |
1536 | ||
1537 | Second, we'd like to prevent egregious code growth. One way to | |
1538 | do this is to estimate the size of the finally block, multiply | |
1539 | that by the number of copies we'd need to make, and compare against | |
1540 | the estimate of the size of the switch machinery we'd have to add. */ | |
1541 | ||
1542 | static bool | |
726a989a | 1543 | decide_copy_try_finally (int ndests, gimple_seq finally) |
6de9cd9a DN |
1544 | { |
1545 | int f_estimate, sw_estimate; | |
1546 | ||
1547 | if (!optimize) | |
1548 | return false; | |
1549 | ||
1550 | /* Finally estimate N times, plus N gotos. */ | |
726a989a | 1551 | f_estimate = count_insns_seq (finally, &eni_size_weights); |
6de9cd9a DN |
1552 | f_estimate = (f_estimate + 1) * ndests; |
1553 | ||
1554 | /* Switch statement (cost 10), N variable assignments, N gotos. */ | |
1555 | sw_estimate = 10 + 2 * ndests; | |
1556 | ||
1557 | /* Optimize for size clearly wants our best guess. */ | |
efd8f750 | 1558 | if (optimize_function_for_size_p (cfun)) |
6de9cd9a DN |
1559 | return f_estimate < sw_estimate; |
1560 | ||
1561 | /* ??? These numbers are completely made up so far. */ | |
1562 | if (optimize > 1) | |
7465ed07 | 1563 | return f_estimate < 100 || f_estimate < sw_estimate * 2; |
6de9cd9a | 1564 | else |
7465ed07 | 1565 | return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3; |
6de9cd9a DN |
1566 | } |
1567 | ||
726a989a RB |
1568 | |
1569 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes | |
6de9cd9a | 1570 | to a sequence of labels and blocks, plus the exception region trees |
19114537 | 1571 | that record all the magic. This is complicated by the need to |
6de9cd9a DN |
1572 | arrange for the FINALLY block to be executed on all exits. */ |
1573 | ||
726a989a RB |
1574 | static gimple_seq |
1575 | lower_try_finally (struct leh_state *state, gimple tp) | |
6de9cd9a DN |
1576 | { |
1577 | struct leh_tf_state this_tf; | |
1578 | struct leh_state this_state; | |
1579 | int ndests; | |
c2255bc4 | 1580 | location_t tf_loc = gimple_location (tp); |
6de9cd9a DN |
1581 | |
1582 | /* Process the try block. */ | |
1583 | ||
1584 | memset (&this_tf, 0, sizeof (this_tf)); | |
726a989a | 1585 | this_tf.try_finally_expr = tp; |
6de9cd9a DN |
1586 | this_tf.top_p = tp; |
1587 | this_tf.outer = state; | |
1588 | if (using_eh_for_cleanups_p) | |
1589 | this_tf.region | |
f08a18d0 | 1590 | = gen_eh_region_cleanup (state->cur_region); |
6de9cd9a DN |
1591 | else |
1592 | this_tf.region = NULL; | |
1593 | ||
1594 | this_state.cur_region = this_tf.region; | |
6de9cd9a DN |
1595 | this_state.tf = &this_tf; |
1596 | ||
726a989a | 1597 | lower_eh_constructs_1 (&this_state, gimple_try_eval(tp)); |
6de9cd9a DN |
1598 | |
1599 | /* Determine if the try block is escaped through the bottom. */ | |
726a989a | 1600 | this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
6de9cd9a DN |
1601 | |
1602 | /* Determine if any exceptions are possible within the try block. */ | |
1603 | if (using_eh_for_cleanups_p) | |
1604 | this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region); | |
1605 | if (this_tf.may_throw) | |
1606 | { | |
c2255bc4 | 1607 | this_tf.eh_label = create_artificial_label (tf_loc); |
6de9cd9a DN |
1608 | set_eh_region_tree_label (this_tf.region, this_tf.eh_label); |
1609 | honor_protect_cleanup_actions (state, &this_state, &this_tf); | |
1610 | } | |
1611 | ||
6de9cd9a DN |
1612 | /* Determine how many edges (still) reach the finally block. Or rather, |
1613 | how many destinations are reached by the finally block. Use this to | |
1614 | determine how we process the finally block itself. */ | |
1615 | ||
59ebc704 | 1616 | ndests = VEC_length (tree, this_tf.dest_array); |
6de9cd9a DN |
1617 | ndests += this_tf.may_fallthru; |
1618 | ndests += this_tf.may_return; | |
1619 | ndests += this_tf.may_throw; | |
1620 | ||
1621 | /* If the FINALLY block is not reachable, dike it out. */ | |
1622 | if (ndests == 0) | |
726a989a RB |
1623 | { |
1624 | gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp)); | |
1625 | gimple_try_set_cleanup (tp, NULL); | |
1626 | } | |
6de9cd9a DN |
1627 | /* If the finally block doesn't fall through, then any destination |
1628 | we might try to impose there isn't reached either. There may be | |
1629 | some minor amount of cleanup and redirection still needed. */ | |
726a989a | 1630 | else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp))) |
6de9cd9a DN |
1631 | lower_try_finally_nofallthru (state, &this_tf); |
1632 | ||
1633 | /* We can easily special-case redirection to a single destination. */ | |
1634 | else if (ndests == 1) | |
1635 | lower_try_finally_onedest (state, &this_tf); | |
726a989a | 1636 | else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp))) |
6de9cd9a DN |
1637 | lower_try_finally_copy (state, &this_tf); |
1638 | else | |
1639 | lower_try_finally_switch (state, &this_tf); | |
1640 | ||
1641 | /* If someone requested we add a label at the end of the transformed | |
1642 | block, do so. */ | |
1643 | if (this_tf.fallthru_label) | |
1644 | { | |
726a989a RB |
1645 | /* This must be reached only if ndests == 0. */ |
1646 | gimple x = gimple_build_label (this_tf.fallthru_label); | |
1647 | gimple_seq_add_stmt (&this_tf.top_p_seq, x); | |
6de9cd9a DN |
1648 | } |
1649 | ||
59ebc704 | 1650 | VEC_free (tree, heap, this_tf.dest_array); |
6de9cd9a DN |
1651 | if (this_tf.goto_queue) |
1652 | free (this_tf.goto_queue); | |
0f547d3d SE |
1653 | if (this_tf.goto_queue_map) |
1654 | pointer_map_destroy (this_tf.goto_queue_map); | |
726a989a RB |
1655 | |
1656 | return this_tf.top_p_seq; | |
6de9cd9a DN |
1657 | } |
1658 | ||
726a989a RB |
1659 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a |
1660 | list of GIMPLE_CATCH to a sequence of labels and blocks, plus the | |
1661 | exception region trees that records all the magic. */ | |
6de9cd9a | 1662 | |
726a989a RB |
1663 | static gimple_seq |
1664 | lower_catch (struct leh_state *state, gimple tp) | |
6de9cd9a | 1665 | { |
7e5487a2 | 1666 | struct eh_region_d *try_region; |
6de9cd9a | 1667 | struct leh_state this_state; |
726a989a | 1668 | gimple_stmt_iterator gsi; |
6de9cd9a | 1669 | tree out_label; |
c2255bc4 | 1670 | location_t try_catch_loc = gimple_location (tp); |
6de9cd9a DN |
1671 | |
1672 | try_region = gen_eh_region_try (state->cur_region); | |
1673 | this_state.cur_region = try_region; | |
6de9cd9a DN |
1674 | this_state.tf = state->tf; |
1675 | ||
726a989a | 1676 | lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); |
6de9cd9a DN |
1677 | |
1678 | if (!get_eh_region_may_contain_throw (try_region)) | |
1679 | { | |
726a989a | 1680 | return gimple_try_eval (tp); |
6de9cd9a DN |
1681 | } |
1682 | ||
1683 | out_label = NULL; | |
726a989a | 1684 | for (gsi = gsi_start (gimple_try_cleanup (tp)); !gsi_end_p (gsi); ) |
6de9cd9a | 1685 | { |
7e5487a2 | 1686 | struct eh_region_d *catch_region; |
726a989a | 1687 | tree eh_label; |
82d6e6fc | 1688 | gimple x, gcatch; |
6de9cd9a | 1689 | |
82d6e6fc | 1690 | gcatch = gsi_stmt (gsi); |
726a989a | 1691 | catch_region = gen_eh_region_catch (try_region, |
82d6e6fc | 1692 | gimple_catch_types (gcatch)); |
6de9cd9a DN |
1693 | |
1694 | this_state.cur_region = catch_region; | |
82d6e6fc | 1695 | lower_eh_constructs_1 (&this_state, gimple_catch_handler (gcatch)); |
6de9cd9a | 1696 | |
c2255bc4 | 1697 | eh_label = create_artificial_label (try_catch_loc); |
6de9cd9a DN |
1698 | set_eh_region_tree_label (catch_region, eh_label); |
1699 | ||
726a989a RB |
1700 | x = gimple_build_label (eh_label); |
1701 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
6de9cd9a | 1702 | |
82d6e6fc | 1703 | if (gimple_seq_may_fallthru (gimple_catch_handler (gcatch))) |
6de9cd9a DN |
1704 | { |
1705 | if (!out_label) | |
c2255bc4 | 1706 | out_label = create_artificial_label (try_catch_loc); |
6de9cd9a | 1707 | |
726a989a | 1708 | x = gimple_build_goto (out_label); |
82d6e6fc | 1709 | gimple_seq_add_stmt (gimple_catch_handler_ptr (gcatch), x); |
6de9cd9a DN |
1710 | } |
1711 | ||
82d6e6fc | 1712 | gsi_insert_seq_before (&gsi, gimple_catch_handler (gcatch), |
726a989a RB |
1713 | GSI_SAME_STMT); |
1714 | gsi_remove (&gsi, false); | |
6de9cd9a DN |
1715 | } |
1716 | ||
726a989a | 1717 | return frob_into_branch_around (tp, NULL, out_label); |
6de9cd9a DN |
1718 | } |
1719 | ||
726a989a RB |
1720 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a |
1721 | GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception | |
6de9cd9a DN |
1722 | region trees that record all the magic. */ |
1723 | ||
726a989a RB |
1724 | static gimple_seq |
1725 | lower_eh_filter (struct leh_state *state, gimple tp) | |
6de9cd9a DN |
1726 | { |
1727 | struct leh_state this_state; | |
7e5487a2 | 1728 | struct eh_region_d *this_region; |
726a989a | 1729 | gimple inner; |
6de9cd9a | 1730 | tree eh_label; |
19114537 | 1731 | |
726a989a RB |
1732 | inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
1733 | ||
1734 | if (gimple_eh_filter_must_not_throw (inner)) | |
6de9cd9a DN |
1735 | this_region = gen_eh_region_must_not_throw (state->cur_region); |
1736 | else | |
1737 | this_region = gen_eh_region_allowed (state->cur_region, | |
726a989a | 1738 | gimple_eh_filter_types (inner)); |
6de9cd9a DN |
1739 | this_state = *state; |
1740 | this_state.cur_region = this_region; | |
19114537 | 1741 | |
726a989a | 1742 | lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); |
6de9cd9a DN |
1743 | |
1744 | if (!get_eh_region_may_contain_throw (this_region)) | |
1745 | { | |
726a989a | 1746 | return gimple_try_eval (tp); |
6de9cd9a DN |
1747 | } |
1748 | ||
726a989a RB |
1749 | lower_eh_constructs_1 (state, gimple_eh_filter_failure (inner)); |
1750 | gimple_try_set_cleanup (tp, gimple_eh_filter_failure (inner)); | |
6de9cd9a | 1751 | |
c2255bc4 | 1752 | eh_label = create_artificial_label (gimple_location (inner)); |
6de9cd9a DN |
1753 | set_eh_region_tree_label (this_region, eh_label); |
1754 | ||
726a989a | 1755 | return frob_into_branch_around (tp, eh_label, NULL); |
6de9cd9a DN |
1756 | } |
1757 | ||
1758 | /* Implement a cleanup expression. This is similar to try-finally, | |
1759 | except that we only execute the cleanup block for exception edges. */ | |
1760 | ||
726a989a RB |
1761 | static gimple_seq |
1762 | lower_cleanup (struct leh_state *state, gimple tp) | |
6de9cd9a DN |
1763 | { |
1764 | struct leh_state this_state; | |
7e5487a2 | 1765 | struct eh_region_d *this_region; |
6de9cd9a | 1766 | struct leh_tf_state fake_tf; |
726a989a | 1767 | gimple_seq result; |
6de9cd9a DN |
1768 | |
1769 | /* If not using eh, then exception-only cleanups are no-ops. */ | |
1770 | if (!flag_exceptions) | |
1771 | { | |
726a989a RB |
1772 | result = gimple_try_eval (tp); |
1773 | lower_eh_constructs_1 (state, result); | |
1774 | return result; | |
6de9cd9a DN |
1775 | } |
1776 | ||
f08a18d0 | 1777 | this_region = gen_eh_region_cleanup (state->cur_region); |
6de9cd9a DN |
1778 | this_state = *state; |
1779 | this_state.cur_region = this_region; | |
1780 | ||
726a989a | 1781 | lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); |
6de9cd9a DN |
1782 | |
1783 | if (!get_eh_region_may_contain_throw (this_region)) | |
1784 | { | |
726a989a | 1785 | return gimple_try_eval (tp); |
6de9cd9a DN |
1786 | } |
1787 | ||
1788 | /* Build enough of a try-finally state so that we can reuse | |
1789 | honor_protect_cleanup_actions. */ | |
1790 | memset (&fake_tf, 0, sizeof (fake_tf)); | |
c2255bc4 | 1791 | fake_tf.top_p = fake_tf.try_finally_expr = tp; |
6de9cd9a DN |
1792 | fake_tf.outer = state; |
1793 | fake_tf.region = this_region; | |
726a989a | 1794 | fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
6de9cd9a DN |
1795 | fake_tf.may_throw = true; |
1796 | ||
c2255bc4 | 1797 | fake_tf.eh_label = create_artificial_label (gimple_location (tp)); |
6de9cd9a DN |
1798 | set_eh_region_tree_label (this_region, fake_tf.eh_label); |
1799 | ||
1800 | honor_protect_cleanup_actions (state, NULL, &fake_tf); | |
1801 | ||
1802 | if (fake_tf.may_throw) | |
1803 | { | |
1804 | /* In this case honor_protect_cleanup_actions had nothing to do, | |
1805 | and we should process this normally. */ | |
726a989a RB |
1806 | lower_eh_constructs_1 (state, gimple_try_cleanup (tp)); |
1807 | result = frob_into_branch_around (tp, fake_tf.eh_label, | |
1808 | fake_tf.fallthru_label); | |
6de9cd9a DN |
1809 | } |
1810 | else | |
1811 | { | |
1812 | /* In this case honor_protect_cleanup_actions did nearly all of | |
1813 | the work. All we have left is to append the fallthru_label. */ | |
1814 | ||
726a989a | 1815 | result = gimple_try_eval (tp); |
6de9cd9a DN |
1816 | if (fake_tf.fallthru_label) |
1817 | { | |
726a989a RB |
1818 | gimple x = gimple_build_label (fake_tf.fallthru_label); |
1819 | gimple_seq_add_stmt (&result, x); | |
6de9cd9a DN |
1820 | } |
1821 | } | |
726a989a | 1822 | return result; |
6de9cd9a DN |
1823 | } |
1824 | ||
726a989a RB |
1825 | |
1826 | ||
1827 | /* Main loop for lowering eh constructs. Also moves gsi to the next | |
1828 | statement. */ | |
6de9cd9a DN |
1829 | |
1830 | static void | |
726a989a | 1831 | lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) |
6de9cd9a | 1832 | { |
726a989a RB |
1833 | gimple_seq replace; |
1834 | gimple x; | |
1835 | gimple stmt = gsi_stmt (*gsi); | |
6de9cd9a | 1836 | |
726a989a | 1837 | switch (gimple_code (stmt)) |
6de9cd9a | 1838 | { |
726a989a RB |
1839 | case GIMPLE_CALL: |
1840 | case GIMPLE_ASSIGN: | |
ba4d8f9d RG |
1841 | /* If the stmt can throw use a new temporary for the assignment |
1842 | to a LHS. This makes sure the old value of the LHS is | |
1843 | available on the EH edge. */ | |
1844 | if (stmt_could_throw_p (stmt) | |
1845 | && gimple_has_lhs (stmt) | |
1846 | && !tree_could_throw_p (gimple_get_lhs (stmt)) | |
1847 | && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt)))) | |
1848 | { | |
1849 | tree lhs = gimple_get_lhs (stmt); | |
1850 | tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL); | |
1851 | gimple s = gimple_build_assign (lhs, tmp); | |
1852 | gimple_set_location (s, gimple_location (stmt)); | |
1853 | gimple_set_block (s, gimple_block (stmt)); | |
1854 | gimple_set_lhs (stmt, tmp); | |
1855 | if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE | |
1856 | || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE) | |
1857 | DECL_GIMPLE_REG_P (tmp) = 1; | |
1858 | gsi_insert_after (gsi, s, GSI_SAME_STMT); | |
1859 | } | |
6de9cd9a | 1860 | /* Look for things that can throw exceptions, and record them. */ |
726a989a | 1861 | if (state->cur_region && stmt_could_throw_p (stmt)) |
6de9cd9a | 1862 | { |
726a989a | 1863 | record_stmt_eh_region (state->cur_region, stmt); |
6de9cd9a | 1864 | note_eh_region_may_contain_throw (state->cur_region); |
6de9cd9a DN |
1865 | } |
1866 | break; | |
1867 | ||
726a989a RB |
1868 | case GIMPLE_COND: |
1869 | case GIMPLE_GOTO: | |
1870 | case GIMPLE_RETURN: | |
1871 | maybe_record_in_goto_queue (state, stmt); | |
6de9cd9a DN |
1872 | break; |
1873 | ||
726a989a RB |
1874 | case GIMPLE_SWITCH: |
1875 | verify_norecord_switch_expr (state, stmt); | |
6de9cd9a DN |
1876 | break; |
1877 | ||
726a989a RB |
1878 | case GIMPLE_TRY: |
1879 | if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | |
1880 | replace = lower_try_finally (state, stmt); | |
1881 | else | |
6de9cd9a | 1882 | { |
726a989a RB |
1883 | x = gimple_seq_first_stmt (gimple_try_cleanup (stmt)); |
1884 | switch (gimple_code (x)) | |
6de9cd9a | 1885 | { |
726a989a RB |
1886 | case GIMPLE_CATCH: |
1887 | replace = lower_catch (state, stmt); | |
1888 | break; | |
1889 | case GIMPLE_EH_FILTER: | |
1890 | replace = lower_eh_filter (state, stmt); | |
1891 | break; | |
1892 | default: | |
1893 | replace = lower_cleanup (state, stmt); | |
1894 | break; | |
6de9cd9a | 1895 | } |
6de9cd9a | 1896 | } |
726a989a RB |
1897 | |
1898 | /* Remove the old stmt and insert the transformed sequence | |
1899 | instead. */ | |
1900 | gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT); | |
1901 | gsi_remove (gsi, true); | |
1902 | ||
1903 | /* Return since we don't want gsi_next () */ | |
1904 | return; | |
6de9cd9a DN |
1905 | |
1906 | default: | |
1907 | /* A type, a decl, or some kind of statement that we're not | |
1908 | interested in. Don't walk them. */ | |
1909 | break; | |
1910 | } | |
726a989a RB |
1911 | |
1912 | gsi_next (gsi); | |
1913 | } | |
1914 | ||
1915 | /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */ | |
1916 | ||
1917 | static void | |
1918 | lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq) | |
1919 | { | |
1920 | gimple_stmt_iterator gsi; | |
1921 | for (gsi = gsi_start (seq); !gsi_end_p (gsi);) | |
1922 | lower_eh_constructs_2 (state, &gsi); | |
6de9cd9a DN |
1923 | } |
1924 | ||
c2924966 | 1925 | static unsigned int |
6de9cd9a DN |
1926 | lower_eh_constructs (void) |
1927 | { | |
1928 | struct leh_state null_state; | |
726a989a RB |
1929 | |
1930 | gimple_seq bodyp = gimple_body (current_function_decl); | |
6de9cd9a DN |
1931 | |
1932 | finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free); | |
6de9cd9a | 1933 | |
726a989a | 1934 | collect_finally_tree_1 (bodyp, NULL); |
6de9cd9a DN |
1935 | |
1936 | memset (&null_state, 0, sizeof (null_state)); | |
726a989a | 1937 | lower_eh_constructs_1 (&null_state, bodyp); |
6de9cd9a DN |
1938 | |
1939 | htab_delete (finally_tree); | |
1940 | ||
1941 | collect_eh_region_array (); | |
c2924966 | 1942 | return 0; |
6de9cd9a DN |
1943 | } |
1944 | ||
8ddbbcae | 1945 | struct gimple_opt_pass pass_lower_eh = |
6de9cd9a | 1946 | { |
8ddbbcae JH |
1947 | { |
1948 | GIMPLE_PASS, | |
6de9cd9a DN |
1949 | "eh", /* name */ |
1950 | NULL, /* gate */ | |
1951 | lower_eh_constructs, /* execute */ | |
1952 | NULL, /* sub */ | |
1953 | NULL, /* next */ | |
1954 | 0, /* static_pass_number */ | |
1955 | TV_TREE_EH, /* tv_id */ | |
1956 | PROP_gimple_lcf, /* properties_required */ | |
1957 | PROP_gimple_leh, /* properties_provided */ | |
bbbe4e7b | 1958 | 0, /* properties_destroyed */ |
6de9cd9a | 1959 | 0, /* todo_flags_start */ |
8ddbbcae JH |
1960 | TODO_dump_func /* todo_flags_finish */ |
1961 | } | |
6de9cd9a DN |
1962 | }; |
1963 | ||
1964 | \f | |
1965 | /* Construct EH edges for STMT. */ | |
1966 | ||
1967 | static void | |
7e5487a2 | 1968 | make_eh_edge (struct eh_region_d *region, void *data) |
6de9cd9a | 1969 | { |
726a989a RB |
1970 | gimple stmt; |
1971 | tree lab; | |
6de9cd9a DN |
1972 | basic_block src, dst; |
1973 | ||
726a989a | 1974 | stmt = (gimple) data; |
6de9cd9a DN |
1975 | lab = get_eh_region_tree_label (region); |
1976 | ||
726a989a | 1977 | src = gimple_bb (stmt); |
6de9cd9a DN |
1978 | dst = label_to_block (lab); |
1979 | ||
a3710436 | 1980 | make_edge (src, dst, EDGE_EH); |
6de9cd9a | 1981 | } |
19114537 | 1982 | |
a8ee227c JH |
1983 | /* See if STMT is call that might be inlined. */ |
1984 | ||
1985 | static bool | |
1986 | inlinable_call_p (gimple stmt) | |
1987 | { | |
1988 | tree decl; | |
1989 | if (gimple_code (stmt) != GIMPLE_CALL) | |
1990 | return false; | |
1991 | if (cfun->after_inlining) | |
1992 | return false; | |
1993 | /* Indirect calls can be propagated to direct call | |
1994 | and inlined. */ | |
1995 | decl = gimple_call_fndecl (stmt); | |
1996 | if (!decl) | |
1997 | return true; | |
1998 | if (cgraph_function_flags_ready | |
1999 | && cgraph_function_body_availability (cgraph_node (decl)) | |
2000 | < AVAIL_OVERWRITABLE) | |
2001 | return false; | |
2002 | return !DECL_UNINLINABLE (decl); | |
2003 | } | |
2004 | ||
6de9cd9a | 2005 | void |
726a989a | 2006 | make_eh_edges (gimple stmt) |
6de9cd9a DN |
2007 | { |
2008 | int region_nr; | |
2009 | bool is_resx; | |
a8ee227c | 2010 | bool inlinable = false; |
a203a221 | 2011 | basic_block bb; |
6de9cd9a | 2012 | |
726a989a | 2013 | if (gimple_code (stmt) == GIMPLE_RESX) |
6de9cd9a | 2014 | { |
726a989a | 2015 | region_nr = gimple_resx_region (stmt); |
6de9cd9a DN |
2016 | is_resx = true; |
2017 | } | |
2018 | else | |
2019 | { | |
2020 | region_nr = lookup_stmt_eh_region (stmt); | |
2021 | if (region_nr < 0) | |
2022 | return; | |
2023 | is_resx = false; | |
a8ee227c | 2024 | inlinable = inlinable_call_p (stmt); |
6de9cd9a DN |
2025 | } |
2026 | ||
a8ee227c | 2027 | foreach_reachable_handler (region_nr, is_resx, inlinable, make_eh_edge, stmt); |
a203a221 JH |
2028 | |
2029 | /* Make CFG profile more consistent assuming that exception will resume to first | |
2030 | available EH handler. In practice this makes little difference, but we get | |
2031 | fewer consistency errors in the dumps. */ | |
2032 | bb = gimple_bb (stmt); | |
2033 | if (is_resx && EDGE_COUNT (bb->succs)) | |
2034 | EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE; | |
6de9cd9a DN |
2035 | } |
2036 | ||
a3710436 JH |
2037 | /* Redirect EH edge E to NEW_BB. */ |
2038 | ||
2039 | edge | |
2040 | redirect_eh_edge (edge e, basic_block new_bb) | |
2041 | { | |
2042 | gimple stmt = gsi_stmt (gsi_last_bb (e->src)); | |
2043 | int region_nr, new_region_nr; | |
2044 | bool is_resx; | |
2045 | bool inlinable = false; | |
2046 | tree label = gimple_block_label (new_bb); | |
7e5487a2 | 2047 | struct eh_region_d *r; |
a3710436 JH |
2048 | |
2049 | if (gimple_code (stmt) == GIMPLE_RESX) | |
2050 | { | |
2051 | region_nr = gimple_resx_region (stmt); | |
2052 | is_resx = true; | |
2053 | } | |
2054 | else | |
2055 | { | |
2056 | region_nr = lookup_stmt_eh_region (stmt); | |
2057 | gcc_assert (region_nr >= 0); | |
2058 | is_resx = false; | |
2059 | inlinable = inlinable_call_p (stmt); | |
2060 | } | |
2061 | ||
2062 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2063 | fprintf (dump_file, "Redirecting EH edge %i->%i to %i, region %i, resx %i\n", | |
2064 | e->src->index, e->dest->index, new_bb->index, region_nr, is_resx); | |
2065 | r = redirect_eh_edge_to_label (e, label, is_resx, inlinable, region_nr); | |
2066 | new_region_nr = get_eh_region_number (r); | |
2067 | if (new_region_nr != region_nr) | |
2068 | { | |
2069 | if (is_resx) | |
2070 | gimple_resx_set_region (stmt, new_region_nr); | |
2071 | else | |
2072 | { | |
2073 | remove_stmt_from_eh_region (stmt); | |
2074 | add_stmt_to_eh_region (stmt, new_region_nr); | |
2075 | } | |
2076 | } | |
2077 | e = ssa_redirect_edge (e, new_bb); | |
2078 | return e; | |
2079 | } | |
2080 | ||
cc7220fd JH |
2081 | static bool mark_eh_edge_found_error; |
2082 | ||
2083 | /* Mark edge make_eh_edge would create for given region by setting it aux | |
2084 | field, output error if something goes wrong. */ | |
726a989a | 2085 | |
cc7220fd | 2086 | static void |
7e5487a2 | 2087 | mark_eh_edge (struct eh_region_d *region, void *data) |
cc7220fd | 2088 | { |
726a989a RB |
2089 | gimple stmt; |
2090 | tree lab; | |
cc7220fd JH |
2091 | basic_block src, dst; |
2092 | edge e; | |
2093 | ||
726a989a | 2094 | stmt = (gimple) data; |
cc7220fd JH |
2095 | lab = get_eh_region_tree_label (region); |
2096 | ||
726a989a | 2097 | src = gimple_bb (stmt); |
cc7220fd JH |
2098 | dst = label_to_block (lab); |
2099 | ||
2100 | e = find_edge (src, dst); | |
2101 | if (!e) | |
2102 | { | |
ab532386 | 2103 | error ("EH edge %i->%i is missing", src->index, dst->index); |
cc7220fd JH |
2104 | mark_eh_edge_found_error = true; |
2105 | } | |
2106 | else if (!(e->flags & EDGE_EH)) | |
2107 | { | |
ab532386 | 2108 | error ("EH edge %i->%i miss EH flag", src->index, dst->index); |
cc7220fd JH |
2109 | mark_eh_edge_found_error = true; |
2110 | } | |
2111 | else if (e->aux) | |
2112 | { | |
2113 | /* ??? might not be mistake. */ | |
ab532386 | 2114 | error ("EH edge %i->%i has duplicated regions", src->index, dst->index); |
cc7220fd JH |
2115 | mark_eh_edge_found_error = true; |
2116 | } | |
2117 | else | |
2118 | e->aux = (void *)1; | |
2119 | } | |
2120 | ||
726a989a RB |
2121 | /* Verify that BB containing STMT as the last statement, has precisely the |
2122 | edges that make_eh_edges would create. */ | |
2123 | ||
cc7220fd | 2124 | bool |
726a989a | 2125 | verify_eh_edges (gimple stmt) |
cc7220fd JH |
2126 | { |
2127 | int region_nr; | |
2128 | bool is_resx; | |
726a989a | 2129 | basic_block bb = gimple_bb (stmt); |
cc7220fd JH |
2130 | edge_iterator ei; |
2131 | edge e; | |
a8ee227c | 2132 | bool inlinable = false; |
cc7220fd JH |
2133 | |
2134 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2135 | gcc_assert (!e->aux); | |
2136 | mark_eh_edge_found_error = false; | |
726a989a | 2137 | if (gimple_code (stmt) == GIMPLE_RESX) |
cc7220fd | 2138 | { |
726a989a | 2139 | region_nr = gimple_resx_region (stmt); |
cc7220fd JH |
2140 | is_resx = true; |
2141 | } | |
2142 | else | |
2143 | { | |
2144 | region_nr = lookup_stmt_eh_region (stmt); | |
2145 | if (region_nr < 0) | |
2146 | { | |
2147 | FOR_EACH_EDGE (e, ei, bb->succs) | |
2148 | if (e->flags & EDGE_EH) | |
2149 | { | |
2150 | error ("BB %i can not throw but has EH edges", bb->index); | |
2151 | return true; | |
2152 | } | |
2153 | return false; | |
2154 | } | |
726a989a | 2155 | if (!stmt_could_throw_p (stmt)) |
cc7220fd JH |
2156 | { |
2157 | error ("BB %i last statement has incorrectly set region", bb->index); | |
2158 | return true; | |
2159 | } | |
a8ee227c | 2160 | inlinable = inlinable_call_p (stmt); |
cc7220fd JH |
2161 | is_resx = false; |
2162 | } | |
2163 | ||
a8ee227c | 2164 | foreach_reachable_handler (region_nr, is_resx, inlinable, mark_eh_edge, stmt); |
cc7220fd JH |
2165 | FOR_EACH_EDGE (e, ei, bb->succs) |
2166 | { | |
2167 | if ((e->flags & EDGE_EH) && !e->aux) | |
2168 | { | |
ab532386 | 2169 | error ("unnecessary EH edge %i->%i", bb->index, e->dest->index); |
cc7220fd JH |
2170 | mark_eh_edge_found_error = true; |
2171 | return true; | |
2172 | } | |
2173 | e->aux = NULL; | |
2174 | } | |
726a989a | 2175 | |
cc7220fd JH |
2176 | return mark_eh_edge_found_error; |
2177 | } | |
6de9cd9a DN |
2178 | |
2179 | \f | |
726a989a RB |
2180 | /* Helper function for operation_could_trap_p and stmt_could_throw_p. */ |
2181 | ||
890065bf | 2182 | bool |
726a989a RB |
2183 | operation_could_trap_helper_p (enum tree_code op, |
2184 | bool fp_operation, | |
2185 | bool honor_trapv, | |
2186 | bool honor_nans, | |
2187 | bool honor_snans, | |
2188 | tree divisor, | |
2189 | bool *handled) | |
2190 | { | |
2191 | *handled = true; | |
2192 | switch (op) | |
2193 | { | |
2194 | case TRUNC_DIV_EXPR: | |
2195 | case CEIL_DIV_EXPR: | |
2196 | case FLOOR_DIV_EXPR: | |
2197 | case ROUND_DIV_EXPR: | |
2198 | case EXACT_DIV_EXPR: | |
2199 | case CEIL_MOD_EXPR: | |
2200 | case FLOOR_MOD_EXPR: | |
2201 | case ROUND_MOD_EXPR: | |
2202 | case TRUNC_MOD_EXPR: | |
2203 | case RDIV_EXPR: | |
2204 | if (honor_snans || honor_trapv) | |
2205 | return true; | |
2206 | if (fp_operation) | |
2207 | return flag_trapping_math; | |
2208 | if (!TREE_CONSTANT (divisor) || integer_zerop (divisor)) | |
2209 | return true; | |
2210 | return false; | |
2211 | ||
2212 | case LT_EXPR: | |
2213 | case LE_EXPR: | |
2214 | case GT_EXPR: | |
2215 | case GE_EXPR: | |
2216 | case LTGT_EXPR: | |
2217 | /* Some floating point comparisons may trap. */ | |
2218 | return honor_nans; | |
2219 | ||
2220 | case EQ_EXPR: | |
2221 | case NE_EXPR: | |
2222 | case UNORDERED_EXPR: | |
2223 | case ORDERED_EXPR: | |
2224 | case UNLT_EXPR: | |
2225 | case UNLE_EXPR: | |
2226 | case UNGT_EXPR: | |
2227 | case UNGE_EXPR: | |
2228 | case UNEQ_EXPR: | |
2229 | return honor_snans; | |
2230 | ||
2231 | case CONVERT_EXPR: | |
2232 | case FIX_TRUNC_EXPR: | |
2233 | /* Conversion of floating point might trap. */ | |
2234 | return honor_nans; | |
2235 | ||
2236 | case NEGATE_EXPR: | |
2237 | case ABS_EXPR: | |
2238 | case CONJ_EXPR: | |
2239 | /* These operations don't trap with floating point. */ | |
2240 | if (honor_trapv) | |
2241 | return true; | |
2242 | return false; | |
2243 | ||
2244 | case PLUS_EXPR: | |
2245 | case MINUS_EXPR: | |
2246 | case MULT_EXPR: | |
2247 | /* Any floating arithmetic may trap. */ | |
2248 | if (fp_operation && flag_trapping_math) | |
2249 | return true; | |
2250 | if (honor_trapv) | |
2251 | return true; | |
2252 | return false; | |
2253 | ||
2254 | default: | |
2255 | /* Any floating arithmetic may trap. */ | |
2256 | if (fp_operation && flag_trapping_math) | |
2257 | return true; | |
2258 | ||
2259 | *handled = false; | |
2260 | return false; | |
2261 | } | |
2262 | } | |
2263 | ||
2264 | /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied | |
2265 | on floating-point values. HONOR_TRAPV is true if OP is applied on integer | |
2266 | type operands that may trap. If OP is a division operator, DIVISOR contains | |
2267 | the value of the divisor. */ | |
2268 | ||
2269 | bool | |
2270 | operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv, | |
2271 | tree divisor) | |
2272 | { | |
2273 | bool honor_nans = (fp_operation && flag_trapping_math | |
2274 | && !flag_finite_math_only); | |
2275 | bool honor_snans = fp_operation && flag_signaling_nans != 0; | |
2276 | bool handled; | |
2277 | ||
2278 | if (TREE_CODE_CLASS (op) != tcc_comparison | |
2279 | && TREE_CODE_CLASS (op) != tcc_unary | |
2280 | && TREE_CODE_CLASS (op) != tcc_binary) | |
2281 | return false; | |
2282 | ||
2283 | return operation_could_trap_helper_p (op, fp_operation, honor_trapv, | |
2284 | honor_nans, honor_snans, divisor, | |
2285 | &handled); | |
2286 | } | |
2287 | ||
2288 | /* Return true if EXPR can trap, as in dereferencing an invalid pointer | |
1eaba2f2 RH |
2289 | location or floating point arithmetic. C.f. the rtl version, may_trap_p. |
2290 | This routine expects only GIMPLE lhs or rhs input. */ | |
6de9cd9a DN |
2291 | |
2292 | bool | |
2293 | tree_could_trap_p (tree expr) | |
2294 | { | |
726a989a | 2295 | enum tree_code code; |
1eaba2f2 | 2296 | bool fp_operation = false; |
9675412f | 2297 | bool honor_trapv = false; |
726a989a | 2298 | tree t, base, div = NULL_TREE; |
6de9cd9a | 2299 | |
726a989a RB |
2300 | if (!expr) |
2301 | return false; | |
2302 | ||
2303 | code = TREE_CODE (expr); | |
2304 | t = TREE_TYPE (expr); | |
2305 | ||
2306 | if (t) | |
1eaba2f2 | 2307 | { |
04b03edb RAE |
2308 | if (COMPARISON_CLASS_P (expr)) |
2309 | fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
2310 | else | |
2311 | fp_operation = FLOAT_TYPE_P (t); | |
726a989a | 2312 | honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t); |
1eaba2f2 RH |
2313 | } |
2314 | ||
726a989a RB |
2315 | if (TREE_CODE_CLASS (code) == tcc_binary) |
2316 | div = TREE_OPERAND (expr, 1); | |
2317 | if (operation_could_trap_p (code, fp_operation, honor_trapv, div)) | |
2318 | return true; | |
2319 | ||
d25cee4d | 2320 | restart: |
6de9cd9a DN |
2321 | switch (code) |
2322 | { | |
ac182688 ZD |
2323 | case TARGET_MEM_REF: |
2324 | /* For TARGET_MEM_REFs use the information based on the original | |
2325 | reference. */ | |
2326 | expr = TMR_ORIGINAL (expr); | |
2327 | code = TREE_CODE (expr); | |
2328 | goto restart; | |
2329 | ||
6de9cd9a DN |
2330 | case COMPONENT_REF: |
2331 | case REALPART_EXPR: | |
2332 | case IMAGPART_EXPR: | |
2333 | case BIT_FIELD_REF: | |
483edb92 | 2334 | case VIEW_CONVERT_EXPR: |
d25cee4d RH |
2335 | case WITH_SIZE_EXPR: |
2336 | expr = TREE_OPERAND (expr, 0); | |
2337 | code = TREE_CODE (expr); | |
2338 | goto restart; | |
a7e5372d ZD |
2339 | |
2340 | case ARRAY_RANGE_REF: | |
11fc4275 EB |
2341 | base = TREE_OPERAND (expr, 0); |
2342 | if (tree_could_trap_p (base)) | |
a7e5372d ZD |
2343 | return true; |
2344 | ||
11fc4275 EB |
2345 | if (TREE_THIS_NOTRAP (expr)) |
2346 | return false; | |
2347 | ||
2348 | return !range_in_array_bounds_p (expr); | |
a7e5372d ZD |
2349 | |
2350 | case ARRAY_REF: | |
2351 | base = TREE_OPERAND (expr, 0); | |
a7e5372d ZD |
2352 | if (tree_could_trap_p (base)) |
2353 | return true; | |
2354 | ||
2355 | if (TREE_THIS_NOTRAP (expr)) | |
2356 | return false; | |
2357 | ||
2358 | return !in_array_bounds_p (expr); | |
6de9cd9a DN |
2359 | |
2360 | case INDIRECT_REF: | |
7ccf35ed DN |
2361 | case ALIGN_INDIRECT_REF: |
2362 | case MISALIGNED_INDIRECT_REF: | |
1eaba2f2 RH |
2363 | return !TREE_THIS_NOTRAP (expr); |
2364 | ||
2365 | case ASM_EXPR: | |
2366 | return TREE_THIS_VOLATILE (expr); | |
5cb2183e | 2367 | |
726a989a RB |
2368 | |
2369 | case CALL_EXPR: | |
2370 | t = get_callee_fndecl (expr); | |
2371 | /* Assume that calls to weak functions may trap. */ | |
2372 | if (!t || !DECL_P (t) || DECL_WEAK (t)) | |
1eaba2f2 | 2373 | return true; |
1eaba2f2 RH |
2374 | return false; |
2375 | ||
726a989a RB |
2376 | default: |
2377 | return false; | |
2378 | } | |
2379 | } | |
1eaba2f2 | 2380 | |
1eaba2f2 | 2381 | |
726a989a RB |
2382 | /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a |
2383 | an assignment or a conditional) may throw. */ | |
1eaba2f2 | 2384 | |
726a989a RB |
2385 | static bool |
2386 | stmt_could_throw_1_p (gimple stmt) | |
2387 | { | |
2388 | enum tree_code code = gimple_expr_code (stmt); | |
2389 | bool honor_nans = false; | |
2390 | bool honor_snans = false; | |
2391 | bool fp_operation = false; | |
2392 | bool honor_trapv = false; | |
2393 | tree t; | |
2394 | size_t i; | |
2395 | bool handled, ret; | |
9675412f | 2396 | |
726a989a RB |
2397 | if (TREE_CODE_CLASS (code) == tcc_comparison |
2398 | || TREE_CODE_CLASS (code) == tcc_unary | |
2399 | || TREE_CODE_CLASS (code) == tcc_binary) | |
2400 | { | |
2401 | t = gimple_expr_type (stmt); | |
2402 | fp_operation = FLOAT_TYPE_P (t); | |
2403 | if (fp_operation) | |
2404 | { | |
2405 | honor_nans = flag_trapping_math && !flag_finite_math_only; | |
2406 | honor_snans = flag_signaling_nans != 0; | |
2407 | } | |
2408 | else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t)) | |
2409 | honor_trapv = true; | |
2410 | } | |
2411 | ||
2412 | /* Check if the main expression may trap. */ | |
2413 | t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL; | |
2414 | ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv, | |
2415 | honor_nans, honor_snans, t, | |
2416 | &handled); | |
2417 | if (handled) | |
2418 | return ret; | |
2419 | ||
2420 | /* If the expression does not trap, see if any of the individual operands may | |
2421 | trap. */ | |
2422 | for (i = 0; i < gimple_num_ops (stmt); i++) | |
2423 | if (tree_could_trap_p (gimple_op (stmt, i))) | |
2424 | return true; | |
2425 | ||
2426 | return false; | |
2427 | } | |
2428 | ||
2429 | ||
2430 | /* Return true if statement STMT could throw an exception. */ | |
2431 | ||
2432 | bool | |
2433 | stmt_could_throw_p (gimple stmt) | |
2434 | { | |
2435 | enum gimple_code code; | |
2436 | ||
2437 | if (!flag_exceptions) | |
2438 | return false; | |
2439 | ||
2440 | /* The only statements that can throw an exception are assignments, | |
2441 | conditionals, calls and asms. */ | |
2442 | code = gimple_code (stmt); | |
2443 | if (code != GIMPLE_ASSIGN | |
2444 | && code != GIMPLE_COND | |
2445 | && code != GIMPLE_CALL | |
2446 | && code != GIMPLE_ASM) | |
2447 | return false; | |
2448 | ||
2449 | /* If exceptions can only be thrown by function calls and STMT is not a | |
2450 | GIMPLE_CALL, the statement cannot throw. */ | |
2451 | if (!flag_non_call_exceptions && code != GIMPLE_CALL) | |
2452 | return false; | |
2453 | ||
2454 | if (code == GIMPLE_ASSIGN || code == GIMPLE_COND) | |
2455 | return stmt_could_throw_1_p (stmt); | |
2456 | else if (is_gimple_call (stmt)) | |
fe89fbc5 | 2457 | return (gimple_call_flags (stmt) & ECF_NOTHROW) == 0; |
726a989a RB |
2458 | else if (gimple_code (stmt) == GIMPLE_ASM) |
2459 | return (gimple_asm_volatile_p (stmt)); | |
2460 | else | |
2461 | gcc_unreachable (); | |
2462 | ||
2463 | return false; | |
6de9cd9a DN |
2464 | } |
2465 | ||
726a989a RB |
2466 | |
2467 | /* Return true if expression T could throw an exception. */ | |
2468 | ||
6de9cd9a DN |
2469 | bool |
2470 | tree_could_throw_p (tree t) | |
2471 | { | |
2472 | if (!flag_exceptions) | |
2473 | return false; | |
726a989a | 2474 | if (TREE_CODE (t) == MODIFY_EXPR) |
6de9cd9a | 2475 | { |
67c605a5 | 2476 | if (flag_non_call_exceptions |
726a989a | 2477 | && tree_could_trap_p (TREE_OPERAND (t, 0))) |
67c605a5 | 2478 | return true; |
726a989a | 2479 | t = TREE_OPERAND (t, 1); |
6de9cd9a DN |
2480 | } |
2481 | ||
d25cee4d RH |
2482 | if (TREE_CODE (t) == WITH_SIZE_EXPR) |
2483 | t = TREE_OPERAND (t, 0); | |
6de9cd9a DN |
2484 | if (TREE_CODE (t) == CALL_EXPR) |
2485 | return (call_expr_flags (t) & ECF_NOTHROW) == 0; | |
67c605a5 JM |
2486 | if (flag_non_call_exceptions) |
2487 | return tree_could_trap_p (t); | |
6de9cd9a DN |
2488 | return false; |
2489 | } | |
2490 | ||
33977f81 JH |
2491 | /* Return true if STMT can throw an exception that is not caught within |
2492 | the current function (CFUN). */ | |
2493 | ||
2494 | bool | |
2495 | stmt_can_throw_external (gimple stmt) | |
2496 | { | |
2497 | int region_nr; | |
2498 | bool is_resx = false; | |
2499 | bool inlinable_call = false; | |
2500 | ||
2501 | if (!stmt_could_throw_p (stmt)) | |
2502 | return false; | |
2503 | ||
2504 | if (gimple_code (stmt) == GIMPLE_RESX) | |
2505 | { | |
2506 | region_nr = gimple_resx_region (stmt); | |
2507 | is_resx = true; | |
2508 | } | |
2509 | else | |
2510 | region_nr = lookup_stmt_eh_region (stmt); | |
2511 | ||
2512 | if (region_nr < 0) | |
2513 | return true; | |
2514 | ||
2515 | return can_throw_external_1 (region_nr, is_resx, inlinable_call); | |
2516 | } | |
726a989a RB |
2517 | |
2518 | /* Return true if STMT can throw an exception that is caught within | |
2519 | the current function (CFUN). */ | |
2520 | ||
6de9cd9a | 2521 | bool |
726a989a | 2522 | stmt_can_throw_internal (gimple stmt) |
6de9cd9a | 2523 | { |
a8b3ba8b | 2524 | int region_nr; |
b59fa6cf | 2525 | bool is_resx = false; |
a8ee227c | 2526 | bool inlinable_call = false; |
a8b3ba8b | 2527 | |
726a989a RB |
2528 | if (gimple_code (stmt) == GIMPLE_RESX) |
2529 | { | |
2530 | region_nr = gimple_resx_region (stmt); | |
2531 | is_resx = true; | |
2532 | } | |
a8b3ba8b | 2533 | else |
a8ee227c JH |
2534 | { |
2535 | region_nr = lookup_stmt_eh_region (stmt); | |
2536 | inlinable_call = inlinable_call_p (stmt); | |
2537 | } | |
726a989a | 2538 | |
6de9cd9a DN |
2539 | if (region_nr < 0) |
2540 | return false; | |
726a989a | 2541 | |
a8ee227c | 2542 | return can_throw_internal_1 (region_nr, is_resx, inlinable_call); |
6de9cd9a DN |
2543 | } |
2544 | ||
6de9cd9a | 2545 | |
af47810a RH |
2546 | /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced |
2547 | OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT | |
2548 | in the table if it should be in there. Return TRUE if a replacement was | |
2549 | done that my require an EH edge purge. */ | |
2550 | ||
2551 | bool | |
726a989a | 2552 | maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt) |
1eaba2f2 | 2553 | { |
af47810a RH |
2554 | int region_nr = lookup_stmt_eh_region (old_stmt); |
2555 | ||
2556 | if (region_nr >= 0) | |
2557 | { | |
726a989a | 2558 | bool new_stmt_could_throw = stmt_could_throw_p (new_stmt); |
af47810a RH |
2559 | |
2560 | if (new_stmt == old_stmt && new_stmt_could_throw) | |
2561 | return false; | |
2562 | ||
2563 | remove_stmt_from_eh_region (old_stmt); | |
2564 | if (new_stmt_could_throw) | |
2565 | { | |
2566 | add_stmt_to_eh_region (new_stmt, region_nr); | |
2567 | return false; | |
2568 | } | |
2569 | else | |
2570 | return true; | |
2571 | } | |
2572 | ||
1eaba2f2 RH |
2573 | return false; |
2574 | } | |
a24549d4 | 2575 | \f |
726a989a RB |
2576 | /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of |
2577 | GIMPLE_TRY) that are similar enough to be considered the same. Currently | |
2578 | this only handles handlers consisting of a single call, as that's the | |
2579 | important case for C++: a destructor call for a particular object showing | |
2580 | up in multiple handlers. */ | |
a24549d4 JM |
2581 | |
2582 | static bool | |
726a989a | 2583 | same_handler_p (gimple_seq oneh, gimple_seq twoh) |
a24549d4 | 2584 | { |
726a989a RB |
2585 | gimple_stmt_iterator gsi; |
2586 | gimple ones, twos; | |
2587 | unsigned int ai; | |
a24549d4 | 2588 | |
726a989a RB |
2589 | gsi = gsi_start (oneh); |
2590 | if (!gsi_one_before_end_p (gsi)) | |
a24549d4 | 2591 | return false; |
726a989a | 2592 | ones = gsi_stmt (gsi); |
a24549d4 | 2593 | |
726a989a RB |
2594 | gsi = gsi_start (twoh); |
2595 | if (!gsi_one_before_end_p (gsi)) | |
a24549d4 | 2596 | return false; |
726a989a RB |
2597 | twos = gsi_stmt (gsi); |
2598 | ||
2599 | if (!is_gimple_call (ones) | |
2600 | || !is_gimple_call (twos) | |
2601 | || gimple_call_lhs (ones) | |
2602 | || gimple_call_lhs (twos) | |
2603 | || gimple_call_chain (ones) | |
2604 | || gimple_call_chain (twos) | |
2605 | || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0) | |
2606 | || gimple_call_num_args (ones) != gimple_call_num_args (twos)) | |
a24549d4 JM |
2607 | return false; |
2608 | ||
726a989a RB |
2609 | for (ai = 0; ai < gimple_call_num_args (ones); ++ai) |
2610 | if (!operand_equal_p (gimple_call_arg (ones, ai), | |
2611 | gimple_call_arg (twos, ai), 0)) | |
a24549d4 JM |
2612 | return false; |
2613 | ||
2614 | return true; | |
2615 | } | |
2616 | ||
2617 | /* Optimize | |
2618 | try { A() } finally { try { ~B() } catch { ~A() } } | |
2619 | try { ... } finally { ~A() } | |
2620 | into | |
2621 | try { A() } catch { ~B() } | |
2622 | try { ~B() ... } finally { ~A() } | |
2623 | ||
2624 | This occurs frequently in C++, where A is a local variable and B is a | |
2625 | temporary used in the initializer for A. */ | |
2626 | ||
2627 | static void | |
726a989a | 2628 | optimize_double_finally (gimple one, gimple two) |
a24549d4 | 2629 | { |
726a989a RB |
2630 | gimple oneh; |
2631 | gimple_stmt_iterator gsi; | |
a24549d4 | 2632 | |
726a989a RB |
2633 | gsi = gsi_start (gimple_try_cleanup (one)); |
2634 | if (!gsi_one_before_end_p (gsi)) | |
a24549d4 JM |
2635 | return; |
2636 | ||
726a989a RB |
2637 | oneh = gsi_stmt (gsi); |
2638 | if (gimple_code (oneh) != GIMPLE_TRY | |
2639 | || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH) | |
a24549d4 JM |
2640 | return; |
2641 | ||
726a989a | 2642 | if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two))) |
a24549d4 | 2643 | { |
726a989a | 2644 | gimple_seq seq = gimple_try_eval (oneh); |
a24549d4 | 2645 | |
726a989a RB |
2646 | gimple_try_set_cleanup (one, seq); |
2647 | gimple_try_set_kind (one, GIMPLE_TRY_CATCH); | |
2648 | seq = copy_gimple_seq_and_replace_locals (seq); | |
2649 | gimple_seq_add_seq (&seq, gimple_try_eval (two)); | |
2650 | gimple_try_set_eval (two, seq); | |
a24549d4 JM |
2651 | } |
2652 | } | |
2653 | ||
2654 | /* Perform EH refactoring optimizations that are simpler to do when code | |
84fbffb2 | 2655 | flow has been lowered but EH structures haven't. */ |
a24549d4 JM |
2656 | |
2657 | static void | |
726a989a | 2658 | refactor_eh_r (gimple_seq seq) |
a24549d4 | 2659 | { |
726a989a RB |
2660 | gimple_stmt_iterator gsi; |
2661 | gimple one, two; | |
a24549d4 | 2662 | |
726a989a RB |
2663 | one = NULL; |
2664 | two = NULL; | |
2665 | gsi = gsi_start (seq); | |
2666 | while (1) | |
2667 | { | |
2668 | one = two; | |
2669 | if (gsi_end_p (gsi)) | |
2670 | two = NULL; | |
2671 | else | |
2672 | two = gsi_stmt (gsi); | |
2673 | if (one | |
2674 | && two | |
2675 | && gimple_code (one) == GIMPLE_TRY | |
2676 | && gimple_code (two) == GIMPLE_TRY | |
2677 | && gimple_try_kind (one) == GIMPLE_TRY_FINALLY | |
2678 | && gimple_try_kind (two) == GIMPLE_TRY_FINALLY) | |
2679 | optimize_double_finally (one, two); | |
2680 | if (one) | |
2681 | switch (gimple_code (one)) | |
a24549d4 | 2682 | { |
726a989a RB |
2683 | case GIMPLE_TRY: |
2684 | refactor_eh_r (gimple_try_eval (one)); | |
2685 | refactor_eh_r (gimple_try_cleanup (one)); | |
2686 | break; | |
2687 | case GIMPLE_CATCH: | |
2688 | refactor_eh_r (gimple_catch_handler (one)); | |
2689 | break; | |
2690 | case GIMPLE_EH_FILTER: | |
2691 | refactor_eh_r (gimple_eh_filter_failure (one)); | |
2692 | break; | |
2693 | default: | |
2694 | break; | |
a24549d4 | 2695 | } |
726a989a RB |
2696 | if (two) |
2697 | gsi_next (&gsi); | |
2698 | else | |
2699 | break; | |
a24549d4 JM |
2700 | } |
2701 | } | |
2702 | ||
2703 | static unsigned | |
2704 | refactor_eh (void) | |
2705 | { | |
726a989a | 2706 | refactor_eh_r (gimple_body (current_function_decl)); |
a24549d4 JM |
2707 | return 0; |
2708 | } | |
2709 | ||
8ddbbcae | 2710 | struct gimple_opt_pass pass_refactor_eh = |
a24549d4 | 2711 | { |
8ddbbcae JH |
2712 | { |
2713 | GIMPLE_PASS, | |
a24549d4 JM |
2714 | "ehopt", /* name */ |
2715 | NULL, /* gate */ | |
2716 | refactor_eh, /* execute */ | |
2717 | NULL, /* sub */ | |
2718 | NULL, /* next */ | |
2719 | 0, /* static_pass_number */ | |
2720 | TV_TREE_EH, /* tv_id */ | |
2721 | PROP_gimple_lcf, /* properties_required */ | |
2722 | 0, /* properties_provided */ | |
2723 | 0, /* properties_destroyed */ | |
2724 | 0, /* todo_flags_start */ | |
8ddbbcae JH |
2725 | TODO_dump_func /* todo_flags_finish */ |
2726 | } | |
a24549d4 | 2727 | }; |
a8da523f JH |
2728 | |
2729 | /* Walk statements, see what regions are really references and remove unreachable ones. */ | |
2730 | ||
2731 | static void | |
2732 | tree_remove_unreachable_handlers (void) | |
2733 | { | |
2734 | sbitmap reachable, contains_stmt; | |
2735 | VEC(int,heap) * label_to_region; | |
2736 | basic_block bb; | |
2737 | ||
2738 | label_to_region = label_to_region_map (); | |
2739 | reachable = sbitmap_alloc (num_eh_regions ()); | |
2740 | sbitmap_zero (reachable); | |
2741 | contains_stmt = sbitmap_alloc (num_eh_regions ()); | |
2742 | sbitmap_zero (contains_stmt); | |
2743 | ||
2744 | FOR_EACH_BB (bb) | |
2745 | { | |
2746 | gimple_stmt_iterator gsi; | |
2747 | int region; | |
a203a221 | 2748 | bool has_eh_preds = bb_has_eh_pred (bb); |
a8da523f JH |
2749 | |
2750 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
2751 | { | |
2752 | gimple stmt = gsi_stmt (gsi); | |
2753 | ||
2754 | if (gimple_code (stmt) == GIMPLE_LABEL && has_eh_preds) | |
2755 | { | |
2756 | int uid = LABEL_DECL_UID (gimple_label_label (stmt)); | |
496a4ef5 JH |
2757 | int region; |
2758 | ||
2759 | for (region = VEC_index (int, label_to_region, uid); | |
2760 | region; region = get_next_region_sharing_label (region)) | |
2761 | SET_BIT (reachable, region); | |
a8da523f | 2762 | } |
d3e37a9e | 2763 | if (gimple_code (stmt) == GIMPLE_RESX) |
a3710436 JH |
2764 | SET_BIT (reachable, |
2765 | VEC_index (eh_region, cfun->eh->region_array, | |
2766 | gimple_resx_region (stmt))->region_number); | |
a8da523f JH |
2767 | if ((region = lookup_stmt_eh_region (stmt)) >= 0) |
2768 | SET_BIT (contains_stmt, region); | |
2769 | } | |
2770 | } | |
2771 | ||
2772 | if (dump_file) | |
2773 | { | |
2774 | fprintf (dump_file, "Before removal of unreachable regions:\n"); | |
2775 | dump_eh_tree (dump_file, cfun); | |
2776 | fprintf (dump_file, "Reachable regions: "); | |
2777 | dump_sbitmap_file (dump_file, reachable); | |
2778 | fprintf (dump_file, "Regions containing insns: "); | |
2779 | dump_sbitmap_file (dump_file, contains_stmt); | |
2780 | } | |
2781 | ||
2782 | remove_unreachable_regions (reachable, contains_stmt); | |
2783 | sbitmap_free (reachable); | |
2784 | sbitmap_free (contains_stmt); | |
2785 | VEC_free (int, heap, label_to_region); | |
2786 | if (dump_file) | |
2787 | { | |
2788 | fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n"); | |
2789 | dump_eh_tree (dump_file, cfun); | |
2790 | fprintf (dump_file, "\n\n"); | |
2791 | } | |
2792 | } | |
2793 | ||
2794 | /* Pattern match emtpy EH receiver looking like: | |
2795 | ||
2796 | save_filt.6352_662 = [filter_expr] <<<filter object>>>; | |
2797 | save_eptr.6351_663 = [exc_ptr_expr] <<<exception object>>>; | |
2798 | <<<exception object>>> = save_eptr.6351_663; | |
2799 | <<<filter object>>> = save_filt.6352_662; | |
2800 | resx 1 | |
4e6d1743 JH |
2801 | |
2802 | And various minor variants after DCE or copy propagation. | |
a8da523f JH |
2803 | */ |
2804 | ||
2805 | static int | |
2806 | tree_empty_eh_handler_p (basic_block bb) | |
2807 | { | |
2808 | gimple_stmt_iterator gsi; | |
2809 | int region; | |
496a4ef5 JH |
2810 | edge_iterator ei; |
2811 | edge e; | |
4e6d1743 JH |
2812 | use_operand_p imm_use; |
2813 | gimple use_stmt; | |
496a4ef5 | 2814 | bool found = false; |
a8da523f JH |
2815 | |
2816 | gsi = gsi_last_bb (bb); | |
2817 | ||
2818 | /* RESX */ | |
2819 | if (gsi_end_p (gsi)) | |
2820 | return 0; | |
2821 | if (gimple_code (gsi_stmt (gsi)) != GIMPLE_RESX) | |
2822 | return 0; | |
2823 | region = gimple_resx_region (gsi_stmt (gsi)); | |
2824 | ||
2825 | /* filter_object set. */ | |
b5b8b0ac | 2826 | gsi_prev_nondebug (&gsi); |
a8da523f JH |
2827 | if (gsi_end_p (gsi)) |
2828 | return 0; | |
4e6d1743 JH |
2829 | if (gimple_code (gsi_stmt (gsi)) == GIMPLE_ASSIGN) |
2830 | { | |
2831 | tree filter_tmp; | |
2832 | tree exc_ptr_tmp; | |
a8da523f | 2833 | |
4e6d1743 JH |
2834 | if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != FILTER_EXPR) |
2835 | return 0; | |
2836 | filter_tmp = gimple_assign_rhs1 (gsi_stmt (gsi)); | |
a8da523f | 2837 | |
4e6d1743 | 2838 | /* filter_object set. */ |
b5b8b0ac | 2839 | gsi_prev_nondebug (&gsi); |
4e6d1743 JH |
2840 | if (gsi_end_p (gsi)) |
2841 | return 0; | |
2842 | if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN) | |
2843 | return 0; | |
2844 | if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != EXC_PTR_EXPR) | |
2845 | return 0; | |
2846 | exc_ptr_tmp = gimple_assign_rhs1 (gsi_stmt (gsi)); | |
2847 | ||
2848 | /* exc_ptr get. */ | |
2849 | if (TREE_CODE (exc_ptr_tmp) != EXC_PTR_EXPR) | |
2850 | { | |
b5b8b0ac | 2851 | gsi_prev_nondebug (&gsi); |
4e6d1743 JH |
2852 | if (gsi_end_p (gsi)) |
2853 | return 0; | |
2854 | if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN) | |
2855 | return 0; | |
2856 | if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != EXC_PTR_EXPR) | |
2857 | return 0; | |
2858 | if (exc_ptr_tmp != gimple_assign_lhs (gsi_stmt (gsi))) | |
2859 | return 0; | |
2860 | if (!single_imm_use (exc_ptr_tmp, &imm_use, &use_stmt)) | |
2861 | return 0; | |
2862 | } | |
a8da523f | 2863 | |
4e6d1743 JH |
2864 | /* filter_object get. */ |
2865 | if (TREE_CODE (filter_tmp) != FILTER_EXPR) | |
2866 | { | |
b5b8b0ac | 2867 | gsi_prev_nondebug (&gsi); |
4e6d1743 JH |
2868 | if (gsi_end_p (gsi)) |
2869 | return 0; | |
2870 | if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN) | |
2871 | return 0; | |
2872 | if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != FILTER_EXPR) | |
2873 | return 0; | |
2874 | if (filter_tmp != gimple_assign_lhs (gsi_stmt (gsi))) | |
2875 | return 0; | |
2876 | if (!single_imm_use (filter_tmp, &imm_use, &use_stmt)) | |
2877 | return 0; | |
2878 | } | |
a8da523f | 2879 | |
4e6d1743 | 2880 | /* label. */ |
b5b8b0ac | 2881 | gsi_prev_nondebug (&gsi); |
4e6d1743 JH |
2882 | if (gsi_end_p (gsi)) |
2883 | return 0; | |
2884 | } | |
496a4ef5 JH |
2885 | if (gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL) |
2886 | return 0; | |
2887 | ||
2888 | /* Be sure that there is at least on EH region reaching the block directly. | |
2889 | After EH edge redirection, it is possible that block is reached by one handler | |
2890 | but resumed by different. */ | |
2891 | FOR_EACH_EDGE (e, ei, bb->preds) | |
2892 | if ((e->flags & EDGE_EH)) | |
2893 | found = true; | |
2894 | if (found) | |
2895 | return region; | |
4e6d1743 JH |
2896 | return 0; |
2897 | } | |
2898 | ||
2899 | /* Return true if it is possible to remove basic block BB and propagate | |
2900 | through PHIs. | |
2901 | ||
2902 | This means that every PHI in BB has all uses such that they are PHIs | |
2903 | of basic blocks reachable througt BB and they appears only in use | |
2904 | reachable by the edge from BB to the block contianing the use. | |
2905 | ||
2906 | This is same as in merge-phi code, but in slightly more general setting | |
2907 | because BB can have multiple successors. */ | |
2908 | ||
2909 | static bool | |
2910 | all_phis_safe_to_merge (basic_block bb) | |
2911 | { | |
2912 | gimple_stmt_iterator si; | |
2913 | bool ok = true; | |
2914 | ||
2915 | for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) | |
2916 | { | |
2917 | gimple phi = gsi_stmt (si); | |
2918 | tree result = gimple_phi_result (phi); | |
2919 | gimple stmt; | |
2920 | use_operand_p imm_use; | |
2921 | imm_use_iterator imm_iter; | |
2922 | ||
2923 | /* If the PHI's result is never used, then we can just | |
2924 | ignore it. */ | |
2925 | if (has_zero_uses (result)) | |
2926 | continue; | |
2927 | /* We can always rebuild virtuals if needed. */ | |
2928 | if (!is_gimple_reg (result)) | |
2929 | continue; | |
2930 | FOR_EACH_IMM_USE_STMT (stmt, imm_iter, result) | |
2931 | { | |
2932 | if (gimple_code (stmt) != GIMPLE_PHI) | |
2933 | { | |
2934 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2935 | fprintf (dump_file, | |
2936 | "PHI result has use in non-PHI statement.\n"); | |
2937 | ok = false; | |
2938 | BREAK_FROM_IMM_USE_STMT (imm_iter); | |
2939 | } | |
2940 | else | |
2941 | FOR_EACH_IMM_USE_ON_STMT (imm_use, imm_iter) | |
2942 | { | |
2943 | edge e; | |
2944 | e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (imm_use)); | |
2945 | if (e->src != bb) | |
2946 | { | |
2947 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
2948 | fprintf (dump_file, "PHI has use in PHI not reached from" | |
2949 | "empty cleanup itself.\n"); | |
2950 | ok = false; | |
2951 | break; | |
2952 | } | |
2953 | } | |
2954 | if (!ok) | |
2955 | BREAK_FROM_IMM_USE_STMT (imm_iter); | |
2956 | } | |
2957 | if (!ok) | |
2958 | return false; | |
2959 | } | |
2960 | return ok; | |
a8da523f JH |
2961 | } |
2962 | ||
2963 | static bool dominance_info_invalidated; | |
2964 | ||
4e6d1743 JH |
2965 | /* Information to pass into make_eh_edge_and_update_phi. */ |
2966 | ||
2967 | struct update_info | |
2968 | { | |
2969 | basic_block bb_to_remove, bb; | |
2970 | edge edge_to_remove; | |
2971 | }; | |
2972 | ||
2973 | /* DATA points to update-info structure. | |
2974 | Like make_eh_edge create EH edge from DATA->bb to basic block containing | |
2975 | handler of REGION. In addition also update PHI operands by copying | |
2976 | operands from DATA->bb_to_remove. */ | |
2977 | ||
2978 | static void | |
7e5487a2 | 2979 | make_eh_edge_and_update_phi (struct eh_region_d *region, void *data) |
4e6d1743 JH |
2980 | { |
2981 | struct update_info *info = (struct update_info *) data; | |
2982 | edge e, e2; | |
2983 | tree lab; | |
2984 | basic_block src, dst; | |
2985 | gimple_stmt_iterator si; | |
2986 | ||
2987 | lab = get_eh_region_tree_label (region); | |
2988 | ||
2989 | src = info->bb; | |
2990 | dst = label_to_block (lab); | |
2991 | ||
2992 | e = find_edge (src, dst); | |
2993 | if (e) | |
2994 | { | |
2995 | gcc_assert (e->flags & EDGE_EH); | |
2996 | e->aux = e; | |
2997 | return; | |
2998 | } | |
2999 | dominance_info_invalidated = true; | |
3000 | e2 = find_edge (info->bb_to_remove, dst); | |
a3710436 | 3001 | e = make_edge (src, dst, EDGE_EH); |
4e6d1743 JH |
3002 | e->aux = e; |
3003 | gcc_assert (e2); | |
3004 | for (si = gsi_start_phis (dst); !gsi_end_p (si); gsi_next (&si)) | |
3005 | { | |
3006 | gimple phi = gsi_stmt (si); | |
3007 | tree use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e2)); | |
3008 | gimple def = (TREE_CODE (use) == SSA_NAME | |
3009 | ? SSA_NAME_DEF_STMT (use) : NULL); | |
3010 | ||
3011 | if (def && gimple_bb (def) == info->bb_to_remove) | |
3012 | { | |
3013 | use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (def, | |
3014 | info->edge_to_remove)); | |
3015 | gcc_assert (info->bb_to_remove == info->edge_to_remove->dest); | |
3016 | def = TREE_CODE (use) == SSA_NAME ? SSA_NAME_DEF_STMT (use) : NULL; | |
3017 | gcc_assert (!def | |
3018 | || gimple_bb (def) != info->bb_to_remove | |
3019 | || !is_gimple_reg (use)); | |
3020 | } | |
3021 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), use); | |
3022 | } | |
3023 | } | |
3024 | ||
3025 | /* Make EH edges corresponding to STMT while updating PHI nodes after removal | |
3026 | empty cleanup BB_TO_REMOVE joined to BB containing STMT | |
496a4ef5 | 3027 | by EDGE_TO_REMOVE. |
4e6d1743 | 3028 | |
496a4ef5 JH |
3029 | Return if EDGE_TO_REMOVE was really removed. It might stay reachable when |
3030 | not all EH regions are cleaned up. */ | |
3031 | ||
3032 | static bool | |
4e6d1743 JH |
3033 | update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove) |
3034 | { | |
3035 | int region_nr; | |
3036 | bool is_resx; | |
3037 | bool inlinable = false; | |
3038 | struct update_info info; | |
3039 | edge_iterator ei; | |
3040 | edge e; | |
3041 | int probability_sum = 0; | |
496a4ef5 | 3042 | bool removed = false; |
4e6d1743 JH |
3043 | |
3044 | info.bb_to_remove = bb_to_remove; | |
3045 | info.bb = gimple_bb (stmt); | |
3046 | info.edge_to_remove = edge_to_remove; | |
3047 | ||
3048 | if (gimple_code (stmt) == GIMPLE_RESX) | |
3049 | { | |
3050 | region_nr = gimple_resx_region (stmt); | |
3051 | is_resx = true; | |
3052 | } | |
3053 | else | |
3054 | { | |
3055 | region_nr = lookup_stmt_eh_region (stmt); | |
4e6d1743 JH |
3056 | is_resx = false; |
3057 | inlinable = inlinable_call_p (stmt); | |
3058 | } | |
3059 | ||
3060 | /* First add new edges as neccesary. */ | |
3061 | foreach_reachable_handler (region_nr, is_resx, inlinable, | |
3062 | make_eh_edge_and_update_phi, &info); | |
3063 | ||
3064 | /* And remove edges we didn't marked. */ | |
3065 | for (ei = ei_start (info.bb->succs); (e = ei_safe_edge (ei)); ) | |
3066 | { | |
496a4ef5 | 3067 | if ((e->flags & EDGE_EH) && !e->aux) |
4e6d1743 JH |
3068 | { |
3069 | dominance_info_invalidated = true; | |
496a4ef5 JH |
3070 | if (e == edge_to_remove) |
3071 | removed = true; | |
4e6d1743 JH |
3072 | remove_edge (e); |
3073 | } | |
3074 | else | |
3075 | { | |
3076 | e->aux = NULL; | |
3077 | probability_sum += e->probability; | |
3078 | ei_next (&ei); | |
3079 | } | |
3080 | } | |
3081 | ||
3082 | /* Make CFG profile more consistent assuming that exception will resume to | |
3083 | first available EH handler. In practice this makes little difference, but | |
3084 | we get fewer consistency errors in the dumps. */ | |
3085 | if (is_resx && EDGE_COUNT (info.bb->succs) && !probability_sum) | |
3086 | EDGE_SUCC (info.bb, 0)->probability = REG_BR_PROB_BASE; | |
496a4ef5 | 3087 | return removed; |
4e6d1743 JH |
3088 | } |
3089 | ||
a8da523f JH |
3090 | /* Look for basic blocks containing empty exception handler and remove them. |
3091 | This is similar to jump forwarding, just across EH edges. */ | |
3092 | ||
3093 | static bool | |
496a4ef5 | 3094 | cleanup_empty_eh (basic_block bb, VEC(int,heap) * label_to_region) |
a8da523f JH |
3095 | { |
3096 | int region; | |
4e6d1743 | 3097 | gimple_stmt_iterator si; |
496a4ef5 | 3098 | edge_iterator ei; |
a8da523f JH |
3099 | |
3100 | /* When handler of EH region winds up to be empty, we can safely | |
3101 | remove it. This leads to inner EH regions to be redirected | |
3102 | to outer one, if present in function. So we need to rebuild | |
3103 | EH edges in all sources. */ | |
4e6d1743 JH |
3104 | if ((region = tree_empty_eh_handler_p (bb)) |
3105 | && all_phis_safe_to_merge (bb)) | |
a8da523f | 3106 | { |
a8da523f | 3107 | edge e; |
496a4ef5 JH |
3108 | bool found = false, removed_some = false, has_non_eh_preds = false; |
3109 | gimple_stmt_iterator gsi; | |
a8da523f | 3110 | |
496a4ef5 JH |
3111 | /* Look for all EH regions sharing label of this block. |
3112 | If they are not same as REGION, remove them and replace them | |
3113 | by outer region of REGION. Also note if REGION itself is one | |
3114 | of them. */ | |
a8da523f | 3115 | |
496a4ef5 JH |
3116 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
3117 | if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL) | |
3118 | { | |
3119 | int uid = LABEL_DECL_UID (gimple_label_label (gsi_stmt (gsi))); | |
3120 | int r = VEC_index (int, label_to_region, uid); | |
3121 | int next; | |
3122 | ||
3123 | while (r) | |
3124 | { | |
3125 | next = get_next_region_sharing_label (r); | |
3126 | if (r == region) | |
3127 | found = true; | |
3128 | else | |
3129 | { | |
3130 | removed_some = true; | |
3131 | remove_eh_region_and_replace_by_outer_of (r, region); | |
3132 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3133 | fprintf (dump_file, "Empty EH handler %i removed and " | |
3134 | "replaced by %i\n", r, region); | |
3135 | } | |
3136 | r = next; | |
3137 | } | |
3138 | } | |
3139 | else | |
3140 | break; | |
3141 | ||
3142 | gcc_assert (found || removed_some); | |
3143 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3144 | if (!(e->flags & EDGE_EH)) | |
3145 | has_non_eh_preds = true; | |
3146 | ||
3147 | /* When block is empty EH cleanup, but it is reachable via non-EH code too, | |
3148 | we can not remove the region it is resumed via, because doing so will | |
3149 | lead to redirection of its RESX edges. | |
3150 | ||
3151 | This case will be handled later after edge forwarding if the EH cleanup | |
3152 | is really dead. */ | |
3153 | ||
3154 | if (found && !has_non_eh_preds) | |
a3710436 JH |
3155 | { |
3156 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3157 | fprintf (dump_file, "Empty EH handler %i removed.\n", region); | |
3158 | remove_eh_region (region); | |
3159 | } | |
496a4ef5 JH |
3160 | else if (!removed_some) |
3161 | return false; | |
3162 | ||
3163 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
a8da523f JH |
3164 | { |
3165 | basic_block src = e->src; | |
496a4ef5 JH |
3166 | if (!(e->flags & EDGE_EH)) |
3167 | { | |
3168 | ei_next (&ei); | |
3169 | continue; | |
3170 | } | |
4e6d1743 | 3171 | if (stmt_can_throw_internal (last_stmt (src))) |
496a4ef5 JH |
3172 | { |
3173 | if (!update_eh_edges (last_stmt (src), bb, e)) | |
3174 | ei_next (&ei); | |
3175 | } | |
3176 | else | |
3177 | remove_edge (e); | |
4e6d1743 | 3178 | } |
4e6d1743 JH |
3179 | |
3180 | /* Verify that we eliminated all uses of PHI we are going to remove. | |
3181 | If we didn't, rebuild SSA on affected variable (this is allowed only | |
3182 | for virtuals). */ | |
3183 | for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si)) | |
3184 | { | |
3185 | gimple phi = gsi_stmt (si); | |
3186 | tree result = gimple_phi_result (phi); | |
3187 | if (!has_zero_uses (result)) | |
a8da523f | 3188 | { |
4e6d1743 JH |
3189 | use_operand_p use_p; |
3190 | imm_use_iterator iter; | |
3191 | gimple stmt; | |
3192 | ||
3193 | FOR_EACH_IMM_USE_STMT (stmt, iter, result) | |
a8da523f | 3194 | { |
4e6d1743 JH |
3195 | /* We have use, see if it won't disappear after |
3196 | removing BB. */ | |
3197 | if (gimple_bb (stmt) == bb) | |
3198 | continue; | |
3199 | if (gimple_code (stmt) == GIMPLE_PHI) | |
3200 | { | |
3201 | bool bad = false; | |
3202 | ||
3203 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
3204 | if (gimple_phi_arg_edge (stmt, | |
3205 | PHI_ARG_INDEX_FROM_USE (use_p))->src != bb) | |
3206 | { | |
3207 | bad = true; | |
3208 | break; | |
3209 | } | |
3210 | ||
3211 | if (!bad) | |
3212 | continue; | |
3213 | } | |
3214 | ||
3215 | gcc_assert (!is_gimple_reg (result)); | |
3216 | mark_sym_for_renaming (SSA_NAME_VAR (result)); | |
3217 | /* As we are going to delete this block we will release all | |
3218 | defs which makes the immediate uses on use stmts invalid. | |
3219 | Avoid that by replacing all uses with the bare variable | |
3220 | and updating the stmts. */ | |
3221 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
3222 | SET_USE (use_p, SSA_NAME_VAR (result)); | |
3223 | update_stmt (stmt); | |
a8da523f | 3224 | } |
a8da523f | 3225 | } |
a8da523f | 3226 | } |
496a4ef5 JH |
3227 | if (!ei_safe_edge (ei_start (bb->preds))) |
3228 | delete_basic_block (bb); | |
a8da523f JH |
3229 | return true; |
3230 | } | |
3231 | return false; | |
3232 | } | |
3233 | ||
3234 | ||
3235 | /* Perform cleanups and lowering of exception handling | |
3236 | 1) cleanups regions with handlers doing nothing are optimized out | |
3237 | 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out | |
3238 | 3) Info about regions that are containing instructions, and regions | |
3239 | reachable via local EH edges is collected | |
3240 | 4) Eh tree is pruned for regions no longer neccesary. | |
3241 | */ | |
3242 | ||
3243 | static unsigned int | |
3244 | cleanup_eh (void) | |
3245 | { | |
3246 | bool changed = false; | |
3247 | basic_block bb; | |
496a4ef5 | 3248 | VEC(int,heap) * label_to_region; |
a8da523f JH |
3249 | int i; |
3250 | ||
3251 | if (!cfun->eh) | |
3252 | return 0; | |
3253 | if (dump_file) | |
3254 | { | |
3255 | fprintf (dump_file, "Before cleanups:\n"); | |
3256 | dump_eh_tree (dump_file, cfun); | |
3257 | } | |
3258 | ||
6d07ad98 | 3259 | if (optimize) |
a8da523f | 3260 | { |
496a4ef5 | 3261 | label_to_region = label_to_region_map (); |
6d07ad98 JH |
3262 | dominance_info_invalidated = false; |
3263 | /* We cannot use FOR_EACH_BB, since the basic blocks may get removed. */ | |
3264 | for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++) | |
3265 | { | |
3266 | bb = BASIC_BLOCK (i); | |
3267 | if (bb) | |
496a4ef5 | 3268 | changed |= cleanup_empty_eh (bb, label_to_region); |
6d07ad98 | 3269 | } |
496a4ef5 | 3270 | VEC_free (int, heap, label_to_region); |
6d07ad98 JH |
3271 | if (dominance_info_invalidated) |
3272 | { | |
3273 | free_dominance_info (CDI_DOMINATORS); | |
3274 | free_dominance_info (CDI_POST_DOMINATORS); | |
3275 | } | |
a8da523f | 3276 | |
6d07ad98 JH |
3277 | /* Removing contained cleanup can render MUST_NOT_THROW regions empty. */ |
3278 | if (changed) | |
3279 | delete_unreachable_blocks (); | |
3280 | } | |
a8da523f JH |
3281 | |
3282 | tree_remove_unreachable_handlers (); | |
3283 | if (dump_file) | |
3284 | { | |
3285 | fprintf (dump_file, "After cleanups:\n"); | |
3286 | dump_eh_tree (dump_file, cfun); | |
3287 | } | |
3288 | ||
3289 | return (changed ? TODO_cleanup_cfg | TODO_update_ssa : 0); | |
3290 | } | |
3291 | ||
3292 | struct gimple_opt_pass pass_cleanup_eh = { | |
3293 | { | |
3294 | GIMPLE_PASS, | |
3295 | "ehcleanup", /* name */ | |
3296 | NULL, /* gate */ | |
3297 | cleanup_eh, /* execute */ | |
3298 | NULL, /* sub */ | |
3299 | NULL, /* next */ | |
3300 | 0, /* static_pass_number */ | |
3301 | TV_TREE_EH, /* tv_id */ | |
3302 | PROP_gimple_lcf, /* properties_required */ | |
3303 | 0, /* properties_provided */ | |
3304 | 0, /* properties_destroyed */ | |
3305 | 0, /* todo_flags_start */ | |
3306 | TODO_dump_func /* todo_flags_finish */ | |
3307 | } | |
3308 | }; |