]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Exception handling semantics and decomposition for trees. |
afaaa67d | 2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 |
66647d44 | 3 | Free Software Foundation, Inc. |
6de9cd9a DN |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
6de9cd9a DN |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
6de9cd9a DN |
26 | #include "flags.h" |
27 | #include "function.h" | |
28 | #include "except.h" | |
9f698956 | 29 | #include "pointer-set.h" |
6de9cd9a | 30 | #include "tree-flow.h" |
6de9cd9a | 31 | #include "tree-inline.h" |
6de9cd9a | 32 | #include "tree-pass.h" |
6de9cd9a DN |
33 | #include "langhooks.h" |
34 | #include "ggc.h" | |
718f9c0f | 35 | #include "diagnostic-core.h" |
726a989a | 36 | #include "gimple.h" |
1d65f45c | 37 | #include "target.h" |
7d776ee2 | 38 | #include "cfgloop.h" |
726a989a RB |
39 | |
40 | /* In some instances a tree and a gimple need to be stored in a same table, | |
41 | i.e. in hash tables. This is a structure to do this. */ | |
42 | typedef union {tree *tp; tree t; gimple g;} treemple; | |
6de9cd9a | 43 | |
165b54c3 SB |
44 | /* Nonzero if we are using EH to handle cleanups. */ |
45 | static int using_eh_for_cleanups_p = 0; | |
46 | ||
47 | void | |
48 | using_eh_for_cleanups (void) | |
49 | { | |
50 | using_eh_for_cleanups_p = 1; | |
51 | } | |
726a989a | 52 | |
6de9cd9a DN |
53 | /* Misc functions used in this file. */ |
54 | ||
1d65f45c | 55 | /* Remember and lookup EH landing pad data for arbitrary statements. |
6de9cd9a DN |
56 | Really this means any statement that could_throw_p. We could |
57 | stuff this information into the stmt_ann data structure, but: | |
58 | ||
59 | (1) We absolutely rely on this information being kept until | |
60 | we get to rtl. Once we're done with lowering here, if we lose | |
61 | the information there's no way to recover it! | |
62 | ||
19114537 | 63 | (2) There are many more statements that *cannot* throw as |
6de9cd9a DN |
64 | compared to those that can. We should be saving some amount |
65 | of space by only allocating memory for those that can throw. */ | |
66 | ||
1d65f45c | 67 | /* Add statement T in function IFUN to landing pad NUM. */ |
726a989a | 68 | |
6de9cd9a | 69 | void |
1d65f45c | 70 | add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num) |
6de9cd9a DN |
71 | { |
72 | struct throw_stmt_node *n; | |
73 | void **slot; | |
74 | ||
1d65f45c | 75 | gcc_assert (num != 0); |
6de9cd9a | 76 | |
a9429e29 | 77 | n = ggc_alloc_throw_stmt_node (); |
6de9cd9a | 78 | n->stmt = t; |
1d65f45c | 79 | n->lp_nr = num; |
6de9cd9a | 80 | |
98f464e0 SB |
81 | if (!get_eh_throw_stmt_table (ifun)) |
82 | set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash, | |
83 | struct_ptr_eq, | |
84 | ggc_free)); | |
85 | ||
b4660e5a | 86 | slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT); |
1e128c5f | 87 | gcc_assert (!*slot); |
6de9cd9a DN |
88 | *slot = n; |
89 | } | |
1eaba2f2 | 90 | |
1d65f45c | 91 | /* Add statement T in the current function (cfun) to EH landing pad NUM. */ |
726a989a | 92 | |
b4660e5a | 93 | void |
1d65f45c | 94 | add_stmt_to_eh_lp (gimple t, int num) |
b4660e5a | 95 | { |
1d65f45c RH |
96 | add_stmt_to_eh_lp_fn (cfun, t, num); |
97 | } | |
98 | ||
99 | /* Add statement T to the single EH landing pad in REGION. */ | |
100 | ||
101 | static void | |
102 | record_stmt_eh_region (eh_region region, gimple t) | |
103 | { | |
104 | if (region == NULL) | |
105 | return; | |
106 | if (region->type == ERT_MUST_NOT_THROW) | |
107 | add_stmt_to_eh_lp_fn (cfun, t, -region->index); | |
108 | else | |
109 | { | |
110 | eh_landing_pad lp = region->landing_pads; | |
111 | if (lp == NULL) | |
112 | lp = gen_eh_landing_pad (region); | |
113 | else | |
114 | gcc_assert (lp->next_lp == NULL); | |
115 | add_stmt_to_eh_lp_fn (cfun, t, lp->index); | |
116 | } | |
b4660e5a JH |
117 | } |
118 | ||
726a989a | 119 | |
1d65f45c | 120 | /* Remove statement T in function IFUN from its EH landing pad. */ |
726a989a | 121 | |
1eaba2f2 | 122 | bool |
1d65f45c | 123 | remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t) |
1eaba2f2 RH |
124 | { |
125 | struct throw_stmt_node dummy; | |
126 | void **slot; | |
127 | ||
b4660e5a | 128 | if (!get_eh_throw_stmt_table (ifun)) |
1eaba2f2 RH |
129 | return false; |
130 | ||
131 | dummy.stmt = t; | |
b4660e5a JH |
132 | slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy, |
133 | NO_INSERT); | |
1eaba2f2 RH |
134 | if (slot) |
135 | { | |
b4660e5a | 136 | htab_clear_slot (get_eh_throw_stmt_table (ifun), slot); |
1eaba2f2 RH |
137 | return true; |
138 | } | |
139 | else | |
140 | return false; | |
141 | } | |
142 | ||
726a989a | 143 | |
1d65f45c RH |
144 | /* Remove statement T in the current function (cfun) from its |
145 | EH landing pad. */ | |
726a989a | 146 | |
b4660e5a | 147 | bool |
1d65f45c | 148 | remove_stmt_from_eh_lp (gimple t) |
b4660e5a | 149 | { |
1d65f45c | 150 | return remove_stmt_from_eh_lp_fn (cfun, t); |
b4660e5a JH |
151 | } |
152 | ||
726a989a | 153 | /* Determine if statement T is inside an EH region in function IFUN. |
1d65f45c RH |
154 | Positive numbers indicate a landing pad index; negative numbers |
155 | indicate a MUST_NOT_THROW region index; zero indicates that the | |
156 | statement is not recorded in the region table. */ | |
726a989a | 157 | |
6de9cd9a | 158 | int |
1d65f45c | 159 | lookup_stmt_eh_lp_fn (struct function *ifun, gimple t) |
6de9cd9a DN |
160 | { |
161 | struct throw_stmt_node *p, n; | |
162 | ||
1d65f45c RH |
163 | if (ifun->eh->throw_stmt_table == NULL) |
164 | return 0; | |
6de9cd9a | 165 | |
726a989a | 166 | n.stmt = t; |
1d65f45c RH |
167 | p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n); |
168 | return p ? p->lp_nr : 0; | |
6de9cd9a DN |
169 | } |
170 | ||
1d65f45c | 171 | /* Likewise, but always use the current function. */ |
726a989a | 172 | |
b4660e5a | 173 | int |
1d65f45c | 174 | lookup_stmt_eh_lp (gimple t) |
b4660e5a JH |
175 | { |
176 | /* We can get called from initialized data when -fnon-call-exceptions | |
177 | is on; prevent crash. */ | |
178 | if (!cfun) | |
1d65f45c RH |
179 | return 0; |
180 | return lookup_stmt_eh_lp_fn (cfun, t); | |
b4660e5a | 181 | } |
6de9cd9a | 182 | |
726a989a | 183 | /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY |
6de9cd9a DN |
184 | nodes and LABEL_DECL nodes. We will use this during the second phase to |
185 | determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */ | |
186 | ||
187 | struct finally_tree_node | |
188 | { | |
726a989a RB |
189 | /* When storing a GIMPLE_TRY, we have to record a gimple. However |
190 | when deciding whether a GOTO to a certain LABEL_DECL (which is a | |
191 | tree) leaves the TRY block, its necessary to record a tree in | |
192 | this field. Thus a treemple is used. */ | |
1d65f45c | 193 | treemple child; |
726a989a | 194 | gimple parent; |
6de9cd9a DN |
195 | }; |
196 | ||
197 | /* Note that this table is *not* marked GTY. It is short-lived. */ | |
198 | static htab_t finally_tree; | |
199 | ||
200 | static void | |
726a989a | 201 | record_in_finally_tree (treemple child, gimple parent) |
6de9cd9a DN |
202 | { |
203 | struct finally_tree_node *n; | |
204 | void **slot; | |
205 | ||
858904db | 206 | n = XNEW (struct finally_tree_node); |
6de9cd9a DN |
207 | n->child = child; |
208 | n->parent = parent; | |
209 | ||
210 | slot = htab_find_slot (finally_tree, n, INSERT); | |
1e128c5f | 211 | gcc_assert (!*slot); |
6de9cd9a DN |
212 | *slot = n; |
213 | } | |
214 | ||
215 | static void | |
726a989a RB |
216 | collect_finally_tree (gimple stmt, gimple region); |
217 | ||
1d65f45c | 218 | /* Go through the gimple sequence. Works with collect_finally_tree to |
726a989a RB |
219 | record all GIMPLE_LABEL and GIMPLE_TRY statements. */ |
220 | ||
221 | static void | |
222 | collect_finally_tree_1 (gimple_seq seq, gimple region) | |
6de9cd9a | 223 | { |
726a989a | 224 | gimple_stmt_iterator gsi; |
6de9cd9a | 225 | |
726a989a RB |
226 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) |
227 | collect_finally_tree (gsi_stmt (gsi), region); | |
228 | } | |
6de9cd9a | 229 | |
726a989a RB |
230 | static void |
231 | collect_finally_tree (gimple stmt, gimple region) | |
232 | { | |
233 | treemple temp; | |
234 | ||
235 | switch (gimple_code (stmt)) | |
236 | { | |
237 | case GIMPLE_LABEL: | |
238 | temp.t = gimple_label_label (stmt); | |
239 | record_in_finally_tree (temp, region); | |
240 | break; | |
6de9cd9a | 241 | |
726a989a RB |
242 | case GIMPLE_TRY: |
243 | if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | |
244 | { | |
245 | temp.g = stmt; | |
246 | record_in_finally_tree (temp, region); | |
247 | collect_finally_tree_1 (gimple_try_eval (stmt), stmt); | |
248 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | |
249 | } | |
250 | else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) | |
251 | { | |
252 | collect_finally_tree_1 (gimple_try_eval (stmt), region); | |
253 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | |
254 | } | |
255 | break; | |
6de9cd9a | 256 | |
726a989a RB |
257 | case GIMPLE_CATCH: |
258 | collect_finally_tree_1 (gimple_catch_handler (stmt), region); | |
259 | break; | |
6de9cd9a | 260 | |
726a989a RB |
261 | case GIMPLE_EH_FILTER: |
262 | collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region); | |
6de9cd9a DN |
263 | break; |
264 | ||
0a35513e AH |
265 | case GIMPLE_EH_ELSE: |
266 | collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region); | |
267 | collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region); | |
268 | break; | |
269 | ||
6de9cd9a DN |
270 | default: |
271 | /* A type, a decl, or some kind of statement that we're not | |
272 | interested in. Don't walk them. */ | |
273 | break; | |
274 | } | |
275 | } | |
276 | ||
726a989a | 277 | |
6de9cd9a DN |
278 | /* Use the finally tree to determine if a jump from START to TARGET |
279 | would leave the try_finally node that START lives in. */ | |
280 | ||
281 | static bool | |
726a989a | 282 | outside_finally_tree (treemple start, gimple target) |
6de9cd9a DN |
283 | { |
284 | struct finally_tree_node n, *p; | |
285 | ||
286 | do | |
287 | { | |
288 | n.child = start; | |
858904db | 289 | p = (struct finally_tree_node *) htab_find (finally_tree, &n); |
6de9cd9a DN |
290 | if (!p) |
291 | return true; | |
726a989a | 292 | start.g = p->parent; |
6de9cd9a | 293 | } |
726a989a | 294 | while (start.g != target); |
6de9cd9a DN |
295 | |
296 | return false; | |
297 | } | |
726a989a RB |
298 | |
299 | /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY | |
300 | nodes into a set of gotos, magic labels, and eh regions. | |
6de9cd9a DN |
301 | The eh region creation is straight-forward, but frobbing all the gotos |
302 | and such into shape isn't. */ | |
303 | ||
b8698a0f | 304 | /* The sequence into which we record all EH stuff. This will be |
1d65f45c RH |
305 | placed at the end of the function when we're all done. */ |
306 | static gimple_seq eh_seq; | |
307 | ||
308 | /* Record whether an EH region contains something that can throw, | |
309 | indexed by EH region number. */ | |
b7da9fd4 | 310 | static bitmap eh_region_may_contain_throw_map; |
1d65f45c | 311 | |
24b97832 ILT |
312 | /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN |
313 | statements that are seen to escape this GIMPLE_TRY_FINALLY node. | |
314 | The idea is to record a gimple statement for everything except for | |
315 | the conditionals, which get their labels recorded. Since labels are | |
316 | of type 'tree', we need this node to store both gimple and tree | |
317 | objects. REPL_STMT is the sequence used to replace the goto/return | |
318 | statement. CONT_STMT is used to store the statement that allows | |
319 | the return/goto to jump to the original destination. */ | |
320 | ||
321 | struct goto_queue_node | |
322 | { | |
323 | treemple stmt; | |
324 | gimple_seq repl_stmt; | |
325 | gimple cont_stmt; | |
326 | int index; | |
327 | /* This is used when index >= 0 to indicate that stmt is a label (as | |
328 | opposed to a goto stmt). */ | |
329 | int is_label; | |
330 | }; | |
331 | ||
6de9cd9a DN |
332 | /* State of the world while lowering. */ |
333 | ||
334 | struct leh_state | |
335 | { | |
19114537 | 336 | /* What's "current" while constructing the eh region tree. These |
6de9cd9a DN |
337 | correspond to variables of the same name in cfun->eh, which we |
338 | don't have easy access to. */ | |
1d65f45c RH |
339 | eh_region cur_region; |
340 | ||
341 | /* What's "current" for the purposes of __builtin_eh_pointer. For | |
342 | a CATCH, this is the associated TRY. For an EH_FILTER, this is | |
343 | the associated ALLOWED_EXCEPTIONS, etc. */ | |
344 | eh_region ehp_region; | |
6de9cd9a DN |
345 | |
346 | /* Processing of TRY_FINALLY requires a bit more state. This is | |
347 | split out into a separate structure so that we don't have to | |
348 | copy so much when processing other nodes. */ | |
349 | struct leh_tf_state *tf; | |
350 | }; | |
351 | ||
352 | struct leh_tf_state | |
353 | { | |
726a989a RB |
354 | /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The |
355 | try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain | |
356 | this so that outside_finally_tree can reliably reference the tree used | |
357 | in the collect_finally_tree data structures. */ | |
358 | gimple try_finally_expr; | |
359 | gimple top_p; | |
1d65f45c | 360 | |
726a989a RB |
361 | /* While lowering a top_p usually it is expanded into multiple statements, |
362 | thus we need the following field to store them. */ | |
363 | gimple_seq top_p_seq; | |
6de9cd9a DN |
364 | |
365 | /* The state outside this try_finally node. */ | |
366 | struct leh_state *outer; | |
367 | ||
368 | /* The exception region created for it. */ | |
1d65f45c | 369 | eh_region region; |
6de9cd9a | 370 | |
24b97832 ILT |
371 | /* The goto queue. */ |
372 | struct goto_queue_node *goto_queue; | |
6de9cd9a DN |
373 | size_t goto_queue_size; |
374 | size_t goto_queue_active; | |
375 | ||
fa10beec | 376 | /* Pointer map to help in searching goto_queue when it is large. */ |
0f547d3d SE |
377 | struct pointer_map_t *goto_queue_map; |
378 | ||
6de9cd9a | 379 | /* The set of unique labels seen as entries in the goto queue. */ |
59ebc704 | 380 | VEC(tree,heap) *dest_array; |
6de9cd9a DN |
381 | |
382 | /* A label to be added at the end of the completed transformed | |
383 | sequence. It will be set if may_fallthru was true *at one time*, | |
384 | though subsequent transformations may have cleared that flag. */ | |
385 | tree fallthru_label; | |
386 | ||
6de9cd9a DN |
387 | /* True if it is possible to fall out the bottom of the try block. |
388 | Cleared if the fallthru is converted to a goto. */ | |
389 | bool may_fallthru; | |
390 | ||
726a989a | 391 | /* True if any entry in goto_queue is a GIMPLE_RETURN. */ |
6de9cd9a DN |
392 | bool may_return; |
393 | ||
394 | /* True if the finally block can receive an exception edge. | |
395 | Cleared if the exception case is handled by code duplication. */ | |
396 | bool may_throw; | |
397 | }; | |
398 | ||
1d65f45c | 399 | static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple); |
6de9cd9a | 400 | |
6de9cd9a DN |
401 | /* Search for STMT in the goto queue. Return the replacement, |
402 | or null if the statement isn't in the queue. */ | |
403 | ||
0f547d3d SE |
404 | #define LARGE_GOTO_QUEUE 20 |
405 | ||
355a7673 | 406 | static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq); |
726a989a RB |
407 | |
408 | static gimple_seq | |
409 | find_goto_replacement (struct leh_tf_state *tf, treemple stmt) | |
6de9cd9a | 410 | { |
0f547d3d SE |
411 | unsigned int i; |
412 | void **slot; | |
413 | ||
414 | if (tf->goto_queue_active < LARGE_GOTO_QUEUE) | |
415 | { | |
416 | for (i = 0; i < tf->goto_queue_active; i++) | |
726a989a | 417 | if ( tf->goto_queue[i].stmt.g == stmt.g) |
0f547d3d SE |
418 | return tf->goto_queue[i].repl_stmt; |
419 | return NULL; | |
420 | } | |
421 | ||
422 | /* If we have a large number of entries in the goto_queue, create a | |
423 | pointer map and use that for searching. */ | |
424 | ||
425 | if (!tf->goto_queue_map) | |
426 | { | |
427 | tf->goto_queue_map = pointer_map_create (); | |
428 | for (i = 0; i < tf->goto_queue_active; i++) | |
429 | { | |
726a989a RB |
430 | slot = pointer_map_insert (tf->goto_queue_map, |
431 | tf->goto_queue[i].stmt.g); | |
0f547d3d | 432 | gcc_assert (*slot == NULL); |
726a989a | 433 | *slot = &tf->goto_queue[i]; |
0f547d3d SE |
434 | } |
435 | } | |
436 | ||
726a989a | 437 | slot = pointer_map_contains (tf->goto_queue_map, stmt.g); |
0f547d3d SE |
438 | if (slot != NULL) |
439 | return (((struct goto_queue_node *) *slot)->repl_stmt); | |
440 | ||
441 | return NULL; | |
6de9cd9a DN |
442 | } |
443 | ||
444 | /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a | |
726a989a | 445 | lowered GIMPLE_COND. If, by chance, the replacement is a simple goto, |
6de9cd9a | 446 | then we can just splat it in, otherwise we add the new stmts immediately |
726a989a | 447 | after the GIMPLE_COND and redirect. */ |
6de9cd9a DN |
448 | |
449 | static void | |
450 | replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf, | |
726a989a | 451 | gimple_stmt_iterator *gsi) |
6de9cd9a | 452 | { |
726a989a | 453 | tree label; |
82d6e6fc | 454 | gimple_seq new_seq; |
726a989a | 455 | treemple temp; |
c2255bc4 | 456 | location_t loc = gimple_location (gsi_stmt (*gsi)); |
6de9cd9a | 457 | |
726a989a | 458 | temp.tp = tp; |
82d6e6fc KG |
459 | new_seq = find_goto_replacement (tf, temp); |
460 | if (!new_seq) | |
6de9cd9a DN |
461 | return; |
462 | ||
82d6e6fc KG |
463 | if (gimple_seq_singleton_p (new_seq) |
464 | && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO) | |
6de9cd9a | 465 | { |
82d6e6fc | 466 | *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq)); |
6de9cd9a DN |
467 | return; |
468 | } | |
469 | ||
c2255bc4 | 470 | label = create_artificial_label (loc); |
726a989a RB |
471 | /* Set the new label for the GIMPLE_COND */ |
472 | *tp = label; | |
6de9cd9a | 473 | |
726a989a | 474 | gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING); |
82d6e6fc | 475 | gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING); |
6de9cd9a DN |
476 | } |
477 | ||
19114537 | 478 | /* The real work of replace_goto_queue. Returns with TSI updated to |
6de9cd9a DN |
479 | point to the next statement. */ |
480 | ||
355a7673 | 481 | static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *); |
6de9cd9a DN |
482 | |
483 | static void | |
726a989a RB |
484 | replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf, |
485 | gimple_stmt_iterator *gsi) | |
6de9cd9a | 486 | { |
726a989a RB |
487 | gimple_seq seq; |
488 | treemple temp; | |
489 | temp.g = NULL; | |
490 | ||
491 | switch (gimple_code (stmt)) | |
6de9cd9a | 492 | { |
726a989a RB |
493 | case GIMPLE_GOTO: |
494 | case GIMPLE_RETURN: | |
495 | temp.g = stmt; | |
496 | seq = find_goto_replacement (tf, temp); | |
497 | if (seq) | |
6de9cd9a | 498 | { |
726a989a RB |
499 | gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT); |
500 | gsi_remove (gsi, false); | |
6de9cd9a DN |
501 | return; |
502 | } | |
503 | break; | |
504 | ||
726a989a RB |
505 | case GIMPLE_COND: |
506 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi); | |
507 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi); | |
6de9cd9a DN |
508 | break; |
509 | ||
726a989a | 510 | case GIMPLE_TRY: |
355a7673 MM |
511 | replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf); |
512 | replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf); | |
6de9cd9a | 513 | break; |
726a989a | 514 | case GIMPLE_CATCH: |
355a7673 | 515 | replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf); |
6de9cd9a | 516 | break; |
726a989a | 517 | case GIMPLE_EH_FILTER: |
355a7673 | 518 | replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf); |
6de9cd9a | 519 | break; |
0a35513e | 520 | case GIMPLE_EH_ELSE: |
355a7673 MM |
521 | replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf); |
522 | replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf); | |
0a35513e | 523 | break; |
6de9cd9a | 524 | |
6de9cd9a DN |
525 | default: |
526 | /* These won't have gotos in them. */ | |
527 | break; | |
528 | } | |
529 | ||
726a989a | 530 | gsi_next (gsi); |
6de9cd9a DN |
531 | } |
532 | ||
726a989a | 533 | /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */ |
6de9cd9a DN |
534 | |
535 | static void | |
355a7673 | 536 | replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf) |
6de9cd9a | 537 | { |
355a7673 | 538 | gimple_stmt_iterator gsi = gsi_start (*seq); |
726a989a RB |
539 | |
540 | while (!gsi_end_p (gsi)) | |
541 | replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi); | |
6de9cd9a DN |
542 | } |
543 | ||
544 | /* Replace all goto queue members. */ | |
545 | ||
546 | static void | |
547 | replace_goto_queue (struct leh_tf_state *tf) | |
548 | { | |
8287d24a EB |
549 | if (tf->goto_queue_active == 0) |
550 | return; | |
355a7673 MM |
551 | replace_goto_queue_stmt_list (&tf->top_p_seq, tf); |
552 | replace_goto_queue_stmt_list (&eh_seq, tf); | |
6de9cd9a DN |
553 | } |
554 | ||
726a989a RB |
555 | /* Add a new record to the goto queue contained in TF. NEW_STMT is the |
556 | data to be added, IS_LABEL indicates whether NEW_STMT is a label or | |
557 | a gimple return. */ | |
6de9cd9a DN |
558 | |
559 | static void | |
726a989a RB |
560 | record_in_goto_queue (struct leh_tf_state *tf, |
561 | treemple new_stmt, | |
562 | int index, | |
563 | bool is_label) | |
6de9cd9a | 564 | { |
6de9cd9a | 565 | size_t active, size; |
726a989a | 566 | struct goto_queue_node *q; |
6de9cd9a | 567 | |
0f547d3d SE |
568 | gcc_assert (!tf->goto_queue_map); |
569 | ||
6de9cd9a DN |
570 | active = tf->goto_queue_active; |
571 | size = tf->goto_queue_size; | |
572 | if (active >= size) | |
573 | { | |
574 | size = (size ? size * 2 : 32); | |
575 | tf->goto_queue_size = size; | |
576 | tf->goto_queue | |
858904db | 577 | = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size); |
6de9cd9a DN |
578 | } |
579 | ||
580 | q = &tf->goto_queue[active]; | |
581 | tf->goto_queue_active = active + 1; | |
19114537 | 582 | |
6de9cd9a | 583 | memset (q, 0, sizeof (*q)); |
726a989a | 584 | q->stmt = new_stmt; |
6de9cd9a | 585 | q->index = index; |
726a989a RB |
586 | q->is_label = is_label; |
587 | } | |
588 | ||
589 | /* Record the LABEL label in the goto queue contained in TF. | |
590 | TF is not null. */ | |
591 | ||
592 | static void | |
593 | record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label) | |
594 | { | |
595 | int index; | |
596 | treemple temp, new_stmt; | |
597 | ||
598 | if (!label) | |
599 | return; | |
600 | ||
601 | /* Computed and non-local gotos do not get processed. Given | |
602 | their nature we can neither tell whether we've escaped the | |
603 | finally block nor redirect them if we knew. */ | |
604 | if (TREE_CODE (label) != LABEL_DECL) | |
605 | return; | |
606 | ||
607 | /* No need to record gotos that don't leave the try block. */ | |
608 | temp.t = label; | |
609 | if (!outside_finally_tree (temp, tf->try_finally_expr)) | |
610 | return; | |
611 | ||
612 | if (! tf->dest_array) | |
613 | { | |
614 | tf->dest_array = VEC_alloc (tree, heap, 10); | |
615 | VEC_quick_push (tree, tf->dest_array, label); | |
616 | index = 0; | |
617 | } | |
618 | else | |
619 | { | |
620 | int n = VEC_length (tree, tf->dest_array); | |
621 | for (index = 0; index < n; ++index) | |
622 | if (VEC_index (tree, tf->dest_array, index) == label) | |
623 | break; | |
624 | if (index == n) | |
625 | VEC_safe_push (tree, heap, tf->dest_array, label); | |
626 | } | |
627 | ||
628 | /* In the case of a GOTO we want to record the destination label, | |
629 | since with a GIMPLE_COND we have an easy access to the then/else | |
630 | labels. */ | |
631 | new_stmt = stmt; | |
632 | record_in_goto_queue (tf, new_stmt, index, true); | |
726a989a RB |
633 | } |
634 | ||
635 | /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally | |
636 | node, and if so record that fact in the goto queue associated with that | |
637 | try_finally node. */ | |
638 | ||
639 | static void | |
640 | maybe_record_in_goto_queue (struct leh_state *state, gimple stmt) | |
641 | { | |
642 | struct leh_tf_state *tf = state->tf; | |
643 | treemple new_stmt; | |
644 | ||
645 | if (!tf) | |
646 | return; | |
647 | ||
648 | switch (gimple_code (stmt)) | |
649 | { | |
650 | case GIMPLE_COND: | |
651 | new_stmt.tp = gimple_op_ptr (stmt, 2); | |
652 | record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt)); | |
653 | new_stmt.tp = gimple_op_ptr (stmt, 3); | |
654 | record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt)); | |
655 | break; | |
656 | case GIMPLE_GOTO: | |
657 | new_stmt.g = stmt; | |
658 | record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt)); | |
659 | break; | |
660 | ||
661 | case GIMPLE_RETURN: | |
662 | tf->may_return = true; | |
663 | new_stmt.g = stmt; | |
664 | record_in_goto_queue (tf, new_stmt, -1, false); | |
665 | break; | |
666 | ||
667 | default: | |
668 | gcc_unreachable (); | |
669 | } | |
6de9cd9a DN |
670 | } |
671 | ||
726a989a | 672 | |
6de9cd9a | 673 | #ifdef ENABLE_CHECKING |
726a989a | 674 | /* We do not process GIMPLE_SWITCHes for now. As long as the original source |
6de9cd9a | 675 | was in fact structured, and we've not yet done jump threading, then none |
726a989a | 676 | of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */ |
6de9cd9a DN |
677 | |
678 | static void | |
726a989a | 679 | verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr) |
6de9cd9a DN |
680 | { |
681 | struct leh_tf_state *tf = state->tf; | |
682 | size_t i, n; | |
6de9cd9a DN |
683 | |
684 | if (!tf) | |
685 | return; | |
686 | ||
726a989a | 687 | n = gimple_switch_num_labels (switch_expr); |
6de9cd9a DN |
688 | |
689 | for (i = 0; i < n; ++i) | |
690 | { | |
726a989a RB |
691 | treemple temp; |
692 | tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i)); | |
693 | temp.t = lab; | |
694 | gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr)); | |
6de9cd9a DN |
695 | } |
696 | } | |
697 | #else | |
698 | #define verify_norecord_switch_expr(state, switch_expr) | |
699 | #endif | |
700 | ||
8d686507 ILT |
701 | /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is |
702 | non-null, insert it before the new branch. */ | |
6de9cd9a DN |
703 | |
704 | static void | |
8d686507 | 705 | do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod) |
6de9cd9a | 706 | { |
726a989a RB |
707 | gimple x; |
708 | ||
8d686507 | 709 | /* In the case of a return, the queue node must be a gimple statement. */ |
726a989a RB |
710 | gcc_assert (!q->is_label); |
711 | ||
8d686507 | 712 | /* Note that the return value may have already been computed, e.g., |
6de9cd9a | 713 | |
8d686507 ILT |
714 | int x; |
715 | int foo (void) | |
6de9cd9a | 716 | { |
8d686507 ILT |
717 | x = 0; |
718 | try { | |
719 | return x; | |
720 | } finally { | |
721 | x++; | |
722 | } | |
6de9cd9a | 723 | } |
8d686507 ILT |
724 | |
725 | should return 0, not 1. We don't have to do anything to make | |
726 | this happens because the return value has been placed in the | |
727 | RESULT_DECL already. */ | |
728 | ||
729 | q->cont_stmt = q->stmt.g; | |
726a989a | 730 | |
6de9cd9a | 731 | if (mod) |
726a989a | 732 | gimple_seq_add_seq (&q->repl_stmt, mod); |
6de9cd9a | 733 | |
726a989a RB |
734 | x = gimple_build_goto (finlab); |
735 | gimple_seq_add_stmt (&q->repl_stmt, x); | |
6de9cd9a DN |
736 | } |
737 | ||
726a989a | 738 | /* Similar, but easier, for GIMPLE_GOTO. */ |
6de9cd9a DN |
739 | |
740 | static void | |
726a989a RB |
741 | do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, |
742 | struct leh_tf_state *tf) | |
6de9cd9a | 743 | { |
726a989a RB |
744 | gimple x; |
745 | ||
746 | gcc_assert (q->is_label); | |
726a989a | 747 | |
1d65f45c | 748 | q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index)); |
6de9cd9a | 749 | |
6de9cd9a | 750 | if (mod) |
726a989a | 751 | gimple_seq_add_seq (&q->repl_stmt, mod); |
6de9cd9a | 752 | |
726a989a RB |
753 | x = gimple_build_goto (finlab); |
754 | gimple_seq_add_stmt (&q->repl_stmt, x); | |
6de9cd9a DN |
755 | } |
756 | ||
1d65f45c RH |
757 | /* Emit a standard landing pad sequence into SEQ for REGION. */ |
758 | ||
759 | static void | |
760 | emit_post_landing_pad (gimple_seq *seq, eh_region region) | |
761 | { | |
762 | eh_landing_pad lp = region->landing_pads; | |
763 | gimple x; | |
764 | ||
765 | if (lp == NULL) | |
766 | lp = gen_eh_landing_pad (region); | |
767 | ||
768 | lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION); | |
769 | EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index; | |
770 | ||
771 | x = gimple_build_label (lp->post_landing_pad); | |
772 | gimple_seq_add_stmt (seq, x); | |
773 | } | |
774 | ||
775 | /* Emit a RESX statement into SEQ for REGION. */ | |
776 | ||
777 | static void | |
778 | emit_resx (gimple_seq *seq, eh_region region) | |
779 | { | |
780 | gimple x = gimple_build_resx (region->index); | |
781 | gimple_seq_add_stmt (seq, x); | |
782 | if (region->outer) | |
783 | record_stmt_eh_region (region->outer, x); | |
784 | } | |
785 | ||
786 | /* Emit an EH_DISPATCH statement into SEQ for REGION. */ | |
787 | ||
788 | static void | |
789 | emit_eh_dispatch (gimple_seq *seq, eh_region region) | |
790 | { | |
791 | gimple x = gimple_build_eh_dispatch (region->index); | |
792 | gimple_seq_add_stmt (seq, x); | |
793 | } | |
794 | ||
795 | /* Note that the current EH region may contain a throw, or a | |
796 | call to a function which itself may contain a throw. */ | |
797 | ||
798 | static void | |
799 | note_eh_region_may_contain_throw (eh_region region) | |
800 | { | |
fcaa4ca4 | 801 | while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index)) |
1d65f45c | 802 | { |
6788475a JJ |
803 | if (region->type == ERT_MUST_NOT_THROW) |
804 | break; | |
1d65f45c RH |
805 | region = region->outer; |
806 | if (region == NULL) | |
807 | break; | |
808 | } | |
809 | } | |
810 | ||
b7da9fd4 RH |
811 | /* Check if REGION has been marked as containing a throw. If REGION is |
812 | NULL, this predicate is false. */ | |
813 | ||
814 | static inline bool | |
815 | eh_region_may_contain_throw (eh_region r) | |
816 | { | |
817 | return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index); | |
818 | } | |
819 | ||
6de9cd9a DN |
820 | /* We want to transform |
821 | try { body; } catch { stuff; } | |
822 | to | |
1d65f45c RH |
823 | normal_seqence: |
824 | body; | |
825 | over: | |
826 | eh_seqence: | |
827 | landing_pad: | |
828 | stuff; | |
829 | goto over; | |
830 | ||
831 | TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad | |
6de9cd9a DN |
832 | should be placed before the second operand, or NULL. OVER is |
833 | an existing label that should be put at the exit, or NULL. */ | |
834 | ||
726a989a | 835 | static gimple_seq |
1d65f45c | 836 | frob_into_branch_around (gimple tp, eh_region region, tree over) |
6de9cd9a | 837 | { |
726a989a RB |
838 | gimple x; |
839 | gimple_seq cleanup, result; | |
c2255bc4 | 840 | location_t loc = gimple_location (tp); |
6de9cd9a | 841 | |
726a989a RB |
842 | cleanup = gimple_try_cleanup (tp); |
843 | result = gimple_try_eval (tp); | |
6de9cd9a | 844 | |
1d65f45c RH |
845 | if (region) |
846 | emit_post_landing_pad (&eh_seq, region); | |
847 | ||
848 | if (gimple_seq_may_fallthru (cleanup)) | |
6de9cd9a DN |
849 | { |
850 | if (!over) | |
c2255bc4 | 851 | over = create_artificial_label (loc); |
726a989a | 852 | x = gimple_build_goto (over); |
1d65f45c | 853 | gimple_seq_add_stmt (&cleanup, x); |
6de9cd9a | 854 | } |
1d65f45c | 855 | gimple_seq_add_seq (&eh_seq, cleanup); |
6de9cd9a DN |
856 | |
857 | if (over) | |
858 | { | |
726a989a RB |
859 | x = gimple_build_label (over); |
860 | gimple_seq_add_stmt (&result, x); | |
6de9cd9a | 861 | } |
726a989a | 862 | return result; |
6de9cd9a DN |
863 | } |
864 | ||
865 | /* A subroutine of lower_try_finally. Duplicate the tree rooted at T. | |
866 | Make sure to record all new labels found. */ | |
867 | ||
726a989a RB |
868 | static gimple_seq |
869 | lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state) | |
6de9cd9a | 870 | { |
726a989a RB |
871 | gimple region = NULL; |
872 | gimple_seq new_seq; | |
6de9cd9a | 873 | |
726a989a | 874 | new_seq = copy_gimple_seq_and_replace_locals (seq); |
6de9cd9a DN |
875 | |
876 | if (outer_state->tf) | |
877 | region = outer_state->tf->try_finally_expr; | |
726a989a | 878 | collect_finally_tree_1 (new_seq, region); |
6de9cd9a | 879 | |
726a989a | 880 | return new_seq; |
6de9cd9a DN |
881 | } |
882 | ||
883 | /* A subroutine of lower_try_finally. Create a fallthru label for | |
884 | the given try_finally state. The only tricky bit here is that | |
885 | we have to make sure to record the label in our outer context. */ | |
886 | ||
887 | static tree | |
888 | lower_try_finally_fallthru_label (struct leh_tf_state *tf) | |
889 | { | |
890 | tree label = tf->fallthru_label; | |
726a989a RB |
891 | treemple temp; |
892 | ||
6de9cd9a DN |
893 | if (!label) |
894 | { | |
c2255bc4 | 895 | label = create_artificial_label (gimple_location (tf->try_finally_expr)); |
6de9cd9a DN |
896 | tf->fallthru_label = label; |
897 | if (tf->outer->tf) | |
726a989a RB |
898 | { |
899 | temp.t = label; | |
900 | record_in_finally_tree (temp, tf->outer->tf->try_finally_expr); | |
901 | } | |
6de9cd9a DN |
902 | } |
903 | return label; | |
904 | } | |
905 | ||
0a35513e AH |
906 | /* A subroutine of lower_try_finally. If FINALLY consits of a |
907 | GIMPLE_EH_ELSE node, return it. */ | |
908 | ||
909 | static inline gimple | |
910 | get_eh_else (gimple_seq finally) | |
911 | { | |
912 | gimple x = gimple_seq_first_stmt (finally); | |
913 | if (gimple_code (x) == GIMPLE_EH_ELSE) | |
914 | { | |
915 | gcc_assert (gimple_seq_singleton_p (finally)); | |
916 | return x; | |
917 | } | |
918 | return NULL; | |
919 | } | |
920 | ||
3b06d379 SB |
921 | /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions |
922 | langhook returns non-null, then the language requires that the exception | |
923 | path out of a try_finally be treated specially. To wit: the code within | |
924 | the finally block may not itself throw an exception. We have two choices | |
925 | here. First we can duplicate the finally block and wrap it in a | |
926 | must_not_throw region. Second, we can generate code like | |
6de9cd9a DN |
927 | |
928 | try { | |
929 | finally_block; | |
930 | } catch { | |
931 | if (fintmp == eh_edge) | |
932 | protect_cleanup_actions; | |
933 | } | |
934 | ||
935 | where "fintmp" is the temporary used in the switch statement generation | |
936 | alternative considered below. For the nonce, we always choose the first | |
19114537 | 937 | option. |
6de9cd9a | 938 | |
3f117656 | 939 | THIS_STATE may be null if this is a try-cleanup, not a try-finally. */ |
6de9cd9a DN |
940 | |
941 | static void | |
942 | honor_protect_cleanup_actions (struct leh_state *outer_state, | |
943 | struct leh_state *this_state, | |
944 | struct leh_tf_state *tf) | |
945 | { | |
1d65f45c | 946 | tree protect_cleanup_actions; |
726a989a | 947 | gimple_stmt_iterator gsi; |
6de9cd9a | 948 | bool finally_may_fallthru; |
726a989a | 949 | gimple_seq finally; |
0a35513e | 950 | gimple x, eh_else; |
6de9cd9a DN |
951 | |
952 | /* First check for nothing to do. */ | |
3b06d379 | 953 | if (lang_hooks.eh_protect_cleanup_actions == NULL) |
1d65f45c | 954 | return; |
3b06d379 | 955 | protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions (); |
1d65f45c RH |
956 | if (protect_cleanup_actions == NULL) |
957 | return; | |
6de9cd9a | 958 | |
726a989a | 959 | finally = gimple_try_cleanup (tf->top_p); |
0a35513e | 960 | eh_else = get_eh_else (finally); |
6de9cd9a DN |
961 | |
962 | /* Duplicate the FINALLY block. Only need to do this for try-finally, | |
0a35513e AH |
963 | and not for cleanups. If we've got an EH_ELSE, extract it now. */ |
964 | if (eh_else) | |
965 | { | |
966 | finally = gimple_eh_else_e_body (eh_else); | |
967 | gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else)); | |
968 | } | |
969 | else if (this_state) | |
6de9cd9a | 970 | finally = lower_try_finally_dup_block (finally, outer_state); |
0a35513e | 971 | finally_may_fallthru = gimple_seq_may_fallthru (finally); |
6de9cd9a | 972 | |
33b45227 JM |
973 | /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP |
974 | set, the handler of the TRY_CATCH_EXPR is another cleanup which ought | |
975 | to be in an enclosing scope, but needs to be implemented at this level | |
976 | to avoid a nesting violation (see wrap_temporary_cleanups in | |
977 | cp/decl.c). Since it's logically at an outer level, we should call | |
978 | terminate before we get to it, so strip it away before adding the | |
979 | MUST_NOT_THROW filter. */ | |
726a989a RB |
980 | gsi = gsi_start (finally); |
981 | x = gsi_stmt (gsi); | |
1d65f45c | 982 | if (gimple_code (x) == GIMPLE_TRY |
726a989a RB |
983 | && gimple_try_kind (x) == GIMPLE_TRY_CATCH |
984 | && gimple_try_catch_is_cleanup (x)) | |
33b45227 | 985 | { |
726a989a RB |
986 | gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT); |
987 | gsi_remove (&gsi, false); | |
33b45227 JM |
988 | } |
989 | ||
6de9cd9a | 990 | /* Wrap the block with protect_cleanup_actions as the action. */ |
1d65f45c RH |
991 | x = gimple_build_eh_must_not_throw (protect_cleanup_actions); |
992 | x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x), | |
993 | GIMPLE_TRY_CATCH); | |
994 | finally = lower_eh_must_not_throw (outer_state, x); | |
995 | ||
996 | /* Drop all of this into the exception sequence. */ | |
997 | emit_post_landing_pad (&eh_seq, tf->region); | |
998 | gimple_seq_add_seq (&eh_seq, finally); | |
999 | if (finally_may_fallthru) | |
1000 | emit_resx (&eh_seq, tf->region); | |
6de9cd9a DN |
1001 | |
1002 | /* Having now been handled, EH isn't to be considered with | |
1003 | the rest of the outgoing edges. */ | |
1004 | tf->may_throw = false; | |
1005 | } | |
1006 | ||
1007 | /* A subroutine of lower_try_finally. We have determined that there is | |
1008 | no fallthru edge out of the finally block. This means that there is | |
1009 | no outgoing edge corresponding to any incoming edge. Restructure the | |
1010 | try_finally node for this special case. */ | |
1011 | ||
1012 | static void | |
726a989a RB |
1013 | lower_try_finally_nofallthru (struct leh_state *state, |
1014 | struct leh_tf_state *tf) | |
6de9cd9a | 1015 | { |
8d686507 | 1016 | tree lab; |
0a35513e | 1017 | gimple x, eh_else; |
726a989a | 1018 | gimple_seq finally; |
6de9cd9a DN |
1019 | struct goto_queue_node *q, *qe; |
1020 | ||
1d65f45c | 1021 | lab = create_artificial_label (gimple_location (tf->try_finally_expr)); |
6de9cd9a | 1022 | |
726a989a RB |
1023 | /* We expect that tf->top_p is a GIMPLE_TRY. */ |
1024 | finally = gimple_try_cleanup (tf->top_p); | |
1025 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
6de9cd9a | 1026 | |
726a989a RB |
1027 | x = gimple_build_label (lab); |
1028 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1029 | |
6de9cd9a DN |
1030 | q = tf->goto_queue; |
1031 | qe = q + tf->goto_queue_active; | |
1032 | for (; q < qe; ++q) | |
1033 | if (q->index < 0) | |
8d686507 | 1034 | do_return_redirection (q, lab, NULL); |
6de9cd9a | 1035 | else |
726a989a | 1036 | do_goto_redirection (q, lab, NULL, tf); |
6de9cd9a DN |
1037 | |
1038 | replace_goto_queue (tf); | |
1039 | ||
0a35513e AH |
1040 | /* Emit the finally block into the stream. Lower EH_ELSE at this time. */ |
1041 | eh_else = get_eh_else (finally); | |
1042 | if (eh_else) | |
1043 | { | |
1044 | finally = gimple_eh_else_n_body (eh_else); | |
355a7673 | 1045 | lower_eh_constructs_1 (state, &finally); |
0a35513e | 1046 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
1d65f45c | 1047 | |
0a35513e AH |
1048 | if (tf->may_throw) |
1049 | { | |
1050 | finally = gimple_eh_else_e_body (eh_else); | |
355a7673 | 1051 | lower_eh_constructs_1 (state, &finally); |
0a35513e AH |
1052 | |
1053 | emit_post_landing_pad (&eh_seq, tf->region); | |
1054 | gimple_seq_add_seq (&eh_seq, finally); | |
1055 | } | |
1056 | } | |
1057 | else | |
1d65f45c | 1058 | { |
355a7673 | 1059 | lower_eh_constructs_1 (state, &finally); |
0a35513e | 1060 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
1d65f45c | 1061 | |
0a35513e AH |
1062 | if (tf->may_throw) |
1063 | { | |
1064 | emit_post_landing_pad (&eh_seq, tf->region); | |
1065 | ||
1066 | x = gimple_build_goto (lab); | |
1067 | gimple_seq_add_stmt (&eh_seq, x); | |
1068 | } | |
1d65f45c | 1069 | } |
6de9cd9a DN |
1070 | } |
1071 | ||
1072 | /* A subroutine of lower_try_finally. We have determined that there is | |
1073 | exactly one destination of the finally block. Restructure the | |
1074 | try_finally node for this special case. */ | |
1075 | ||
1076 | static void | |
1077 | lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf) | |
1078 | { | |
1079 | struct goto_queue_node *q, *qe; | |
726a989a RB |
1080 | gimple x; |
1081 | gimple_seq finally; | |
1082 | tree finally_label; | |
c2255bc4 | 1083 | location_t loc = gimple_location (tf->try_finally_expr); |
6de9cd9a | 1084 | |
726a989a RB |
1085 | finally = gimple_try_cleanup (tf->top_p); |
1086 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
6de9cd9a | 1087 | |
0a35513e AH |
1088 | /* Since there's only one destination, and the destination edge can only |
1089 | either be EH or non-EH, that implies that all of our incoming edges | |
1090 | are of the same type. Therefore we can lower EH_ELSE immediately. */ | |
1091 | x = get_eh_else (finally); | |
1092 | if (x) | |
1093 | { | |
1094 | if (tf->may_throw) | |
1095 | finally = gimple_eh_else_e_body (x); | |
1096 | else | |
1097 | finally = gimple_eh_else_n_body (x); | |
1098 | } | |
1099 | ||
355a7673 | 1100 | lower_eh_constructs_1 (state, &finally); |
6de9cd9a DN |
1101 | |
1102 | if (tf->may_throw) | |
1103 | { | |
1104 | /* Only reachable via the exception edge. Add the given label to | |
1105 | the head of the FINALLY block. Append a RESX at the end. */ | |
1d65f45c RH |
1106 | emit_post_landing_pad (&eh_seq, tf->region); |
1107 | gimple_seq_add_seq (&eh_seq, finally); | |
1108 | emit_resx (&eh_seq, tf->region); | |
6de9cd9a DN |
1109 | return; |
1110 | } | |
1111 | ||
1112 | if (tf->may_fallthru) | |
1113 | { | |
1114 | /* Only reachable via the fallthru edge. Do nothing but let | |
1115 | the two blocks run together; we'll fall out the bottom. */ | |
726a989a | 1116 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1117 | return; |
1118 | } | |
1119 | ||
c2255bc4 | 1120 | finally_label = create_artificial_label (loc); |
726a989a RB |
1121 | x = gimple_build_label (finally_label); |
1122 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1123 | |
726a989a | 1124 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1125 | |
1126 | q = tf->goto_queue; | |
1127 | qe = q + tf->goto_queue_active; | |
1128 | ||
1129 | if (tf->may_return) | |
1130 | { | |
1131 | /* Reachable by return expressions only. Redirect them. */ | |
6de9cd9a | 1132 | for (; q < qe; ++q) |
8d686507 | 1133 | do_return_redirection (q, finally_label, NULL); |
6de9cd9a DN |
1134 | replace_goto_queue (tf); |
1135 | } | |
1136 | else | |
1137 | { | |
1138 | /* Reachable by goto expressions only. Redirect them. */ | |
1139 | for (; q < qe; ++q) | |
726a989a | 1140 | do_goto_redirection (q, finally_label, NULL, tf); |
6de9cd9a | 1141 | replace_goto_queue (tf); |
19114537 | 1142 | |
59ebc704 | 1143 | if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label) |
6de9cd9a DN |
1144 | { |
1145 | /* Reachable by goto to fallthru label only. Redirect it | |
1146 | to the new label (already created, sadly), and do not | |
1147 | emit the final branch out, or the fallthru label. */ | |
1148 | tf->fallthru_label = NULL; | |
1149 | return; | |
1150 | } | |
1151 | } | |
1152 | ||
726a989a RB |
1153 | /* Place the original return/goto to the original destination |
1154 | immediately after the finally block. */ | |
1155 | x = tf->goto_queue[0].cont_stmt; | |
1156 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
1157 | maybe_record_in_goto_queue (state, x); | |
6de9cd9a DN |
1158 | } |
1159 | ||
1160 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1161 | and outgoing from the finally block. Implement this by duplicating the | |
1162 | finally block for every destination. */ | |
1163 | ||
1164 | static void | |
1165 | lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf) | |
1166 | { | |
726a989a RB |
1167 | gimple_seq finally; |
1168 | gimple_seq new_stmt; | |
1169 | gimple_seq seq; | |
0a35513e | 1170 | gimple x, eh_else; |
726a989a | 1171 | tree tmp; |
c2255bc4 | 1172 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
6de9cd9a | 1173 | |
726a989a | 1174 | finally = gimple_try_cleanup (tf->top_p); |
0a35513e AH |
1175 | |
1176 | /* Notice EH_ELSE, and simplify some of the remaining code | |
1177 | by considering FINALLY to be the normal return path only. */ | |
1178 | eh_else = get_eh_else (finally); | |
1179 | if (eh_else) | |
1180 | finally = gimple_eh_else_n_body (eh_else); | |
1181 | ||
726a989a RB |
1182 | tf->top_p_seq = gimple_try_eval (tf->top_p); |
1183 | new_stmt = NULL; | |
6de9cd9a DN |
1184 | |
1185 | if (tf->may_fallthru) | |
1186 | { | |
726a989a | 1187 | seq = lower_try_finally_dup_block (finally, state); |
355a7673 | 1188 | lower_eh_constructs_1 (state, &seq); |
726a989a | 1189 | gimple_seq_add_seq (&new_stmt, seq); |
6de9cd9a | 1190 | |
726a989a RB |
1191 | tmp = lower_try_finally_fallthru_label (tf); |
1192 | x = gimple_build_goto (tmp); | |
1193 | gimple_seq_add_stmt (&new_stmt, x); | |
6de9cd9a DN |
1194 | } |
1195 | ||
1196 | if (tf->may_throw) | |
1197 | { | |
0a35513e AH |
1198 | /* We don't need to copy the EH path of EH_ELSE, |
1199 | since it is only emitted once. */ | |
1200 | if (eh_else) | |
1201 | seq = gimple_eh_else_e_body (eh_else); | |
1202 | else | |
1203 | seq = lower_try_finally_dup_block (finally, state); | |
355a7673 | 1204 | lower_eh_constructs_1 (state, &seq); |
6de9cd9a | 1205 | |
288f5b2e RH |
1206 | emit_post_landing_pad (&eh_seq, tf->region); |
1207 | gimple_seq_add_seq (&eh_seq, seq); | |
1d65f45c | 1208 | emit_resx (&eh_seq, tf->region); |
6de9cd9a DN |
1209 | } |
1210 | ||
1211 | if (tf->goto_queue) | |
1212 | { | |
1213 | struct goto_queue_node *q, *qe; | |
dd58eb5a | 1214 | int return_index, index; |
858904db | 1215 | struct labels_s |
dd58eb5a AO |
1216 | { |
1217 | struct goto_queue_node *q; | |
1218 | tree label; | |
1219 | } *labels; | |
6de9cd9a | 1220 | |
59ebc704 | 1221 | return_index = VEC_length (tree, tf->dest_array); |
858904db | 1222 | labels = XCNEWVEC (struct labels_s, return_index + 1); |
6de9cd9a DN |
1223 | |
1224 | q = tf->goto_queue; | |
1225 | qe = q + tf->goto_queue_active; | |
1226 | for (; q < qe; q++) | |
1227 | { | |
dd58eb5a AO |
1228 | index = q->index < 0 ? return_index : q->index; |
1229 | ||
1230 | if (!labels[index].q) | |
1231 | labels[index].q = q; | |
1232 | } | |
1233 | ||
1234 | for (index = 0; index < return_index + 1; index++) | |
1235 | { | |
1236 | tree lab; | |
1237 | ||
1238 | q = labels[index].q; | |
1239 | if (! q) | |
1240 | continue; | |
1241 | ||
c2255bc4 AH |
1242 | lab = labels[index].label |
1243 | = create_artificial_label (tf_loc); | |
6de9cd9a DN |
1244 | |
1245 | if (index == return_index) | |
8d686507 | 1246 | do_return_redirection (q, lab, NULL); |
6de9cd9a | 1247 | else |
726a989a | 1248 | do_goto_redirection (q, lab, NULL, tf); |
6de9cd9a | 1249 | |
726a989a RB |
1250 | x = gimple_build_label (lab); |
1251 | gimple_seq_add_stmt (&new_stmt, x); | |
6de9cd9a | 1252 | |
726a989a | 1253 | seq = lower_try_finally_dup_block (finally, state); |
355a7673 | 1254 | lower_eh_constructs_1 (state, &seq); |
726a989a | 1255 | gimple_seq_add_seq (&new_stmt, seq); |
6de9cd9a | 1256 | |
726a989a | 1257 | gimple_seq_add_stmt (&new_stmt, q->cont_stmt); |
dd58eb5a | 1258 | maybe_record_in_goto_queue (state, q->cont_stmt); |
6de9cd9a | 1259 | } |
dd58eb5a AO |
1260 | |
1261 | for (q = tf->goto_queue; q < qe; q++) | |
1262 | { | |
1263 | tree lab; | |
1264 | ||
1265 | index = q->index < 0 ? return_index : q->index; | |
1266 | ||
1267 | if (labels[index].q == q) | |
1268 | continue; | |
1269 | ||
1270 | lab = labels[index].label; | |
1271 | ||
1272 | if (index == return_index) | |
8d686507 | 1273 | do_return_redirection (q, lab, NULL); |
dd58eb5a | 1274 | else |
726a989a | 1275 | do_goto_redirection (q, lab, NULL, tf); |
dd58eb5a | 1276 | } |
1d65f45c | 1277 | |
6de9cd9a DN |
1278 | replace_goto_queue (tf); |
1279 | free (labels); | |
1280 | } | |
1281 | ||
1282 | /* Need to link new stmts after running replace_goto_queue due | |
1283 | to not wanting to process the same goto stmts twice. */ | |
726a989a | 1284 | gimple_seq_add_seq (&tf->top_p_seq, new_stmt); |
6de9cd9a DN |
1285 | } |
1286 | ||
1287 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1288 | and outgoing from the finally block. Implement this by instrumenting | |
1289 | each incoming edge and creating a switch statement at the end of the | |
1290 | finally block that branches to the appropriate destination. */ | |
1291 | ||
1292 | static void | |
1293 | lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf) | |
1294 | { | |
1295 | struct goto_queue_node *q, *qe; | |
726a989a | 1296 | tree finally_tmp, finally_label; |
6de9cd9a DN |
1297 | int return_index, eh_index, fallthru_index; |
1298 | int nlabels, ndests, j, last_case_index; | |
726a989a RB |
1299 | tree last_case; |
1300 | VEC (tree,heap) *case_label_vec; | |
355a7673 | 1301 | gimple_seq switch_body = NULL; |
0a35513e | 1302 | gimple x, eh_else; |
726a989a RB |
1303 | tree tmp; |
1304 | gimple switch_stmt; | |
1305 | gimple_seq finally; | |
1306 | struct pointer_map_t *cont_map = NULL; | |
c2255bc4 | 1307 | /* The location of the TRY_FINALLY stmt. */ |
d40eb158 | 1308 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
c2255bc4 AH |
1309 | /* The location of the finally block. */ |
1310 | location_t finally_loc; | |
726a989a | 1311 | |
0a35513e AH |
1312 | finally = gimple_try_cleanup (tf->top_p); |
1313 | eh_else = get_eh_else (finally); | |
6de9cd9a DN |
1314 | |
1315 | /* Mash the TRY block to the head of the chain. */ | |
726a989a | 1316 | tf->top_p_seq = gimple_try_eval (tf->top_p); |
6de9cd9a | 1317 | |
c2255bc4 AH |
1318 | /* The location of the finally is either the last stmt in the finally |
1319 | block or the location of the TRY_FINALLY itself. */ | |
0118b919 EB |
1320 | x = gimple_seq_last_stmt (finally); |
1321 | finally_loc = x ? gimple_location (x) : tf_loc; | |
c2255bc4 | 1322 | |
6de9cd9a | 1323 | /* Lower the finally block itself. */ |
355a7673 | 1324 | lower_eh_constructs_1 (state, &finally); |
6de9cd9a DN |
1325 | |
1326 | /* Prepare for switch statement generation. */ | |
59ebc704 | 1327 | nlabels = VEC_length (tree, tf->dest_array); |
6de9cd9a DN |
1328 | return_index = nlabels; |
1329 | eh_index = return_index + tf->may_return; | |
0a35513e | 1330 | fallthru_index = eh_index + (tf->may_throw && !eh_else); |
6de9cd9a DN |
1331 | ndests = fallthru_index + tf->may_fallthru; |
1332 | ||
1333 | finally_tmp = create_tmp_var (integer_type_node, "finally_tmp"); | |
c2255bc4 | 1334 | finally_label = create_artificial_label (finally_loc); |
6de9cd9a | 1335 | |
726a989a RB |
1336 | /* We use VEC_quick_push on case_label_vec throughout this function, |
1337 | since we know the size in advance and allocate precisely as muce | |
1338 | space as needed. */ | |
1339 | case_label_vec = VEC_alloc (tree, heap, ndests); | |
6de9cd9a DN |
1340 | last_case = NULL; |
1341 | last_case_index = 0; | |
1342 | ||
1343 | /* Begin inserting code for getting to the finally block. Things | |
1344 | are done in this order to correspond to the sequence the code is | |
073a8998 | 1345 | laid out. */ |
6de9cd9a DN |
1346 | |
1347 | if (tf->may_fallthru) | |
1348 | { | |
1d65f45c | 1349 | x = gimple_build_assign (finally_tmp, |
413581ba RG |
1350 | build_int_cst (integer_type_node, |
1351 | fallthru_index)); | |
726a989a | 1352 | gimple_seq_add_stmt (&tf->top_p_seq, x); |
6de9cd9a | 1353 | |
3d528853 NF |
1354 | tmp = build_int_cst (integer_type_node, fallthru_index); |
1355 | last_case = build_case_label (tmp, NULL, | |
1356 | create_artificial_label (tf_loc)); | |
726a989a | 1357 | VEC_quick_push (tree, case_label_vec, last_case); |
6de9cd9a DN |
1358 | last_case_index++; |
1359 | ||
726a989a RB |
1360 | x = gimple_build_label (CASE_LABEL (last_case)); |
1361 | gimple_seq_add_stmt (&switch_body, x); | |
6de9cd9a | 1362 | |
726a989a RB |
1363 | tmp = lower_try_finally_fallthru_label (tf); |
1364 | x = gimple_build_goto (tmp); | |
1365 | gimple_seq_add_stmt (&switch_body, x); | |
6de9cd9a DN |
1366 | } |
1367 | ||
0a35513e AH |
1368 | /* For EH_ELSE, emit the exception path (plus resx) now, then |
1369 | subsequently we only need consider the normal path. */ | |
1370 | if (eh_else) | |
1371 | { | |
1372 | if (tf->may_throw) | |
1373 | { | |
1374 | finally = gimple_eh_else_e_body (eh_else); | |
355a7673 | 1375 | lower_eh_constructs_1 (state, &finally); |
0a35513e AH |
1376 | |
1377 | emit_post_landing_pad (&eh_seq, tf->region); | |
1378 | gimple_seq_add_seq (&eh_seq, finally); | |
1379 | emit_resx (&eh_seq, tf->region); | |
1380 | } | |
1381 | ||
1382 | finally = gimple_eh_else_n_body (eh_else); | |
1383 | } | |
1384 | else if (tf->may_throw) | |
6de9cd9a | 1385 | { |
1d65f45c | 1386 | emit_post_landing_pad (&eh_seq, tf->region); |
6de9cd9a | 1387 | |
1d65f45c | 1388 | x = gimple_build_assign (finally_tmp, |
413581ba | 1389 | build_int_cst (integer_type_node, eh_index)); |
1d65f45c RH |
1390 | gimple_seq_add_stmt (&eh_seq, x); |
1391 | ||
1392 | x = gimple_build_goto (finally_label); | |
1393 | gimple_seq_add_stmt (&eh_seq, x); | |
6de9cd9a | 1394 | |
3d528853 NF |
1395 | tmp = build_int_cst (integer_type_node, eh_index); |
1396 | last_case = build_case_label (tmp, NULL, | |
1397 | create_artificial_label (tf_loc)); | |
726a989a | 1398 | VEC_quick_push (tree, case_label_vec, last_case); |
6de9cd9a DN |
1399 | last_case_index++; |
1400 | ||
726a989a | 1401 | x = gimple_build_label (CASE_LABEL (last_case)); |
1d65f45c RH |
1402 | gimple_seq_add_stmt (&eh_seq, x); |
1403 | emit_resx (&eh_seq, tf->region); | |
6de9cd9a DN |
1404 | } |
1405 | ||
726a989a RB |
1406 | x = gimple_build_label (finally_label); |
1407 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1408 | |
726a989a | 1409 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1410 | |
1411 | /* Redirect each incoming goto edge. */ | |
1412 | q = tf->goto_queue; | |
1413 | qe = q + tf->goto_queue_active; | |
1414 | j = last_case_index + tf->may_return; | |
726a989a RB |
1415 | /* Prepare the assignments to finally_tmp that are executed upon the |
1416 | entrance through a particular edge. */ | |
6de9cd9a DN |
1417 | for (; q < qe; ++q) |
1418 | { | |
355a7673 | 1419 | gimple_seq mod = NULL; |
726a989a RB |
1420 | int switch_id; |
1421 | unsigned int case_index; | |
1422 | ||
6de9cd9a DN |
1423 | if (q->index < 0) |
1424 | { | |
726a989a | 1425 | x = gimple_build_assign (finally_tmp, |
413581ba RG |
1426 | build_int_cst (integer_type_node, |
1427 | return_index)); | |
726a989a | 1428 | gimple_seq_add_stmt (&mod, x); |
8d686507 | 1429 | do_return_redirection (q, finally_label, mod); |
6de9cd9a DN |
1430 | switch_id = return_index; |
1431 | } | |
1432 | else | |
1433 | { | |
726a989a | 1434 | x = gimple_build_assign (finally_tmp, |
413581ba | 1435 | build_int_cst (integer_type_node, q->index)); |
726a989a RB |
1436 | gimple_seq_add_stmt (&mod, x); |
1437 | do_goto_redirection (q, finally_label, mod, tf); | |
6de9cd9a DN |
1438 | switch_id = q->index; |
1439 | } | |
1440 | ||
1441 | case_index = j + q->index; | |
726a989a RB |
1442 | if (VEC_length (tree, case_label_vec) <= case_index |
1443 | || !VEC_index (tree, case_label_vec, case_index)) | |
1444 | { | |
1445 | tree case_lab; | |
1446 | void **slot; | |
3d528853 NF |
1447 | tmp = build_int_cst (integer_type_node, switch_id); |
1448 | case_lab = build_case_label (tmp, NULL, | |
1449 | create_artificial_label (tf_loc)); | |
726a989a | 1450 | /* We store the cont_stmt in the pointer map, so that we can recover |
ffa03772 | 1451 | it in the loop below. */ |
726a989a RB |
1452 | if (!cont_map) |
1453 | cont_map = pointer_map_create (); | |
1454 | slot = pointer_map_insert (cont_map, case_lab); | |
1455 | *slot = q->cont_stmt; | |
1456 | VEC_quick_push (tree, case_label_vec, case_lab); | |
1457 | } | |
dd58eb5a AO |
1458 | } |
1459 | for (j = last_case_index; j < last_case_index + nlabels; j++) | |
1460 | { | |
726a989a RB |
1461 | gimple cont_stmt; |
1462 | void **slot; | |
dd58eb5a | 1463 | |
726a989a | 1464 | last_case = VEC_index (tree, case_label_vec, j); |
dd58eb5a AO |
1465 | |
1466 | gcc_assert (last_case); | |
726a989a | 1467 | gcc_assert (cont_map); |
dd58eb5a | 1468 | |
726a989a | 1469 | slot = pointer_map_contains (cont_map, last_case); |
726a989a RB |
1470 | gcc_assert (slot); |
1471 | cont_stmt = *(gimple *) slot; | |
dd58eb5a | 1472 | |
ffa03772 | 1473 | x = gimple_build_label (CASE_LABEL (last_case)); |
726a989a RB |
1474 | gimple_seq_add_stmt (&switch_body, x); |
1475 | gimple_seq_add_stmt (&switch_body, cont_stmt); | |
dd58eb5a | 1476 | maybe_record_in_goto_queue (state, cont_stmt); |
6de9cd9a | 1477 | } |
726a989a RB |
1478 | if (cont_map) |
1479 | pointer_map_destroy (cont_map); | |
1480 | ||
6de9cd9a | 1481 | replace_goto_queue (tf); |
6de9cd9a | 1482 | |
0f1f6967 SB |
1483 | /* Make sure that the last case is the default label, as one is required. |
1484 | Then sort the labels, which is also required in GIMPLE. */ | |
6de9cd9a | 1485 | CASE_LOW (last_case) = NULL; |
0f1f6967 | 1486 | sort_case_labels (case_label_vec); |
6de9cd9a | 1487 | |
726a989a RB |
1488 | /* Build the switch statement, setting last_case to be the default |
1489 | label. */ | |
1490 | switch_stmt = gimple_build_switch_vec (finally_tmp, last_case, | |
1491 | case_label_vec); | |
c2255bc4 | 1492 | gimple_set_location (switch_stmt, finally_loc); |
726a989a RB |
1493 | |
1494 | /* Need to link SWITCH_STMT after running replace_goto_queue | |
1495 | due to not wanting to process the same goto stmts twice. */ | |
1496 | gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt); | |
1497 | gimple_seq_add_seq (&tf->top_p_seq, switch_body); | |
6de9cd9a DN |
1498 | } |
1499 | ||
1500 | /* Decide whether or not we are going to duplicate the finally block. | |
1501 | There are several considerations. | |
1502 | ||
1503 | First, if this is Java, then the finally block contains code | |
1504 | written by the user. It has line numbers associated with it, | |
1505 | so duplicating the block means it's difficult to set a breakpoint. | |
1506 | Since controlling code generation via -g is verboten, we simply | |
1507 | never duplicate code without optimization. | |
1508 | ||
1509 | Second, we'd like to prevent egregious code growth. One way to | |
1510 | do this is to estimate the size of the finally block, multiply | |
1511 | that by the number of copies we'd need to make, and compare against | |
1512 | the estimate of the size of the switch machinery we'd have to add. */ | |
1513 | ||
1514 | static bool | |
0a35513e | 1515 | decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally) |
6de9cd9a DN |
1516 | { |
1517 | int f_estimate, sw_estimate; | |
0a35513e AH |
1518 | gimple eh_else; |
1519 | ||
1520 | /* If there's an EH_ELSE involved, the exception path is separate | |
1521 | and really doesn't come into play for this computation. */ | |
1522 | eh_else = get_eh_else (finally); | |
1523 | if (eh_else) | |
1524 | { | |
1525 | ndests -= may_throw; | |
1526 | finally = gimple_eh_else_n_body (eh_else); | |
1527 | } | |
6de9cd9a DN |
1528 | |
1529 | if (!optimize) | |
bccc50d4 JJ |
1530 | { |
1531 | gimple_stmt_iterator gsi; | |
1532 | ||
1533 | if (ndests == 1) | |
1534 | return true; | |
1535 | ||
1536 | for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1537 | { | |
1538 | gimple stmt = gsi_stmt (gsi); | |
1539 | if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt)) | |
1540 | return false; | |
1541 | } | |
1542 | return true; | |
1543 | } | |
6de9cd9a DN |
1544 | |
1545 | /* Finally estimate N times, plus N gotos. */ | |
726a989a | 1546 | f_estimate = count_insns_seq (finally, &eni_size_weights); |
6de9cd9a DN |
1547 | f_estimate = (f_estimate + 1) * ndests; |
1548 | ||
1549 | /* Switch statement (cost 10), N variable assignments, N gotos. */ | |
1550 | sw_estimate = 10 + 2 * ndests; | |
1551 | ||
1552 | /* Optimize for size clearly wants our best guess. */ | |
efd8f750 | 1553 | if (optimize_function_for_size_p (cfun)) |
6de9cd9a DN |
1554 | return f_estimate < sw_estimate; |
1555 | ||
1556 | /* ??? These numbers are completely made up so far. */ | |
1557 | if (optimize > 1) | |
7465ed07 | 1558 | return f_estimate < 100 || f_estimate < sw_estimate * 2; |
6de9cd9a | 1559 | else |
7465ed07 | 1560 | return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3; |
6de9cd9a DN |
1561 | } |
1562 | ||
d3f28910 JM |
1563 | /* REG is the enclosing region for a possible cleanup region, or the region |
1564 | itself. Returns TRUE if such a region would be unreachable. | |
1565 | ||
1566 | Cleanup regions within a must-not-throw region aren't actually reachable | |
1567 | even if there are throwing stmts within them, because the personality | |
1568 | routine will call terminate before unwinding. */ | |
1569 | ||
1570 | static bool | |
1571 | cleanup_is_dead_in (eh_region reg) | |
1572 | { | |
1573 | while (reg && reg->type == ERT_CLEANUP) | |
1574 | reg = reg->outer; | |
1575 | return (reg && reg->type == ERT_MUST_NOT_THROW); | |
1576 | } | |
726a989a RB |
1577 | |
1578 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes | |
6de9cd9a | 1579 | to a sequence of labels and blocks, plus the exception region trees |
19114537 | 1580 | that record all the magic. This is complicated by the need to |
6de9cd9a DN |
1581 | arrange for the FINALLY block to be executed on all exits. */ |
1582 | ||
726a989a RB |
1583 | static gimple_seq |
1584 | lower_try_finally (struct leh_state *state, gimple tp) | |
6de9cd9a DN |
1585 | { |
1586 | struct leh_tf_state this_tf; | |
1587 | struct leh_state this_state; | |
1588 | int ndests; | |
e19d1f06 | 1589 | gimple_seq old_eh_seq; |
6de9cd9a DN |
1590 | |
1591 | /* Process the try block. */ | |
1592 | ||
1593 | memset (&this_tf, 0, sizeof (this_tf)); | |
726a989a | 1594 | this_tf.try_finally_expr = tp; |
6de9cd9a DN |
1595 | this_tf.top_p = tp; |
1596 | this_tf.outer = state; | |
d3f28910 JM |
1597 | if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region)) |
1598 | { | |
1599 | this_tf.region = gen_eh_region_cleanup (state->cur_region); | |
1600 | this_state.cur_region = this_tf.region; | |
1601 | } | |
6de9cd9a | 1602 | else |
d3f28910 JM |
1603 | { |
1604 | this_tf.region = NULL; | |
1605 | this_state.cur_region = state->cur_region; | |
1606 | } | |
6de9cd9a | 1607 | |
1d65f45c | 1608 | this_state.ehp_region = state->ehp_region; |
6de9cd9a DN |
1609 | this_state.tf = &this_tf; |
1610 | ||
e19d1f06 RH |
1611 | old_eh_seq = eh_seq; |
1612 | eh_seq = NULL; | |
1613 | ||
355a7673 | 1614 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a DN |
1615 | |
1616 | /* Determine if the try block is escaped through the bottom. */ | |
726a989a | 1617 | this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
6de9cd9a DN |
1618 | |
1619 | /* Determine if any exceptions are possible within the try block. */ | |
d3f28910 | 1620 | if (this_tf.region) |
b7da9fd4 | 1621 | this_tf.may_throw = eh_region_may_contain_throw (this_tf.region); |
6de9cd9a | 1622 | if (this_tf.may_throw) |
1d65f45c | 1623 | honor_protect_cleanup_actions (state, &this_state, &this_tf); |
6de9cd9a | 1624 | |
6de9cd9a DN |
1625 | /* Determine how many edges (still) reach the finally block. Or rather, |
1626 | how many destinations are reached by the finally block. Use this to | |
1627 | determine how we process the finally block itself. */ | |
1628 | ||
59ebc704 | 1629 | ndests = VEC_length (tree, this_tf.dest_array); |
6de9cd9a DN |
1630 | ndests += this_tf.may_fallthru; |
1631 | ndests += this_tf.may_return; | |
1632 | ndests += this_tf.may_throw; | |
1633 | ||
1634 | /* If the FINALLY block is not reachable, dike it out. */ | |
1635 | if (ndests == 0) | |
726a989a RB |
1636 | { |
1637 | gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp)); | |
1638 | gimple_try_set_cleanup (tp, NULL); | |
1639 | } | |
6de9cd9a DN |
1640 | /* If the finally block doesn't fall through, then any destination |
1641 | we might try to impose there isn't reached either. There may be | |
1642 | some minor amount of cleanup and redirection still needed. */ | |
726a989a | 1643 | else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp))) |
6de9cd9a DN |
1644 | lower_try_finally_nofallthru (state, &this_tf); |
1645 | ||
1646 | /* We can easily special-case redirection to a single destination. */ | |
1647 | else if (ndests == 1) | |
1648 | lower_try_finally_onedest (state, &this_tf); | |
0a35513e AH |
1649 | else if (decide_copy_try_finally (ndests, this_tf.may_throw, |
1650 | gimple_try_cleanup (tp))) | |
6de9cd9a DN |
1651 | lower_try_finally_copy (state, &this_tf); |
1652 | else | |
1653 | lower_try_finally_switch (state, &this_tf); | |
1654 | ||
1655 | /* If someone requested we add a label at the end of the transformed | |
1656 | block, do so. */ | |
1657 | if (this_tf.fallthru_label) | |
1658 | { | |
726a989a RB |
1659 | /* This must be reached only if ndests == 0. */ |
1660 | gimple x = gimple_build_label (this_tf.fallthru_label); | |
1661 | gimple_seq_add_stmt (&this_tf.top_p_seq, x); | |
6de9cd9a DN |
1662 | } |
1663 | ||
59ebc704 | 1664 | VEC_free (tree, heap, this_tf.dest_array); |
04695783 | 1665 | free (this_tf.goto_queue); |
0f547d3d SE |
1666 | if (this_tf.goto_queue_map) |
1667 | pointer_map_destroy (this_tf.goto_queue_map); | |
726a989a | 1668 | |
e19d1f06 RH |
1669 | /* If there was an old (aka outer) eh_seq, append the current eh_seq. |
1670 | If there was no old eh_seq, then the append is trivially already done. */ | |
1671 | if (old_eh_seq) | |
1672 | { | |
1673 | if (eh_seq == NULL) | |
1674 | eh_seq = old_eh_seq; | |
1675 | else | |
1676 | { | |
1677 | gimple_seq new_eh_seq = eh_seq; | |
1678 | eh_seq = old_eh_seq; | |
1679 | gimple_seq_add_seq(&eh_seq, new_eh_seq); | |
1680 | } | |
1681 | } | |
1682 | ||
726a989a | 1683 | return this_tf.top_p_seq; |
6de9cd9a DN |
1684 | } |
1685 | ||
726a989a RB |
1686 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a |
1687 | list of GIMPLE_CATCH to a sequence of labels and blocks, plus the | |
1688 | exception region trees that records all the magic. */ | |
6de9cd9a | 1689 | |
726a989a RB |
1690 | static gimple_seq |
1691 | lower_catch (struct leh_state *state, gimple tp) | |
6de9cd9a | 1692 | { |
b7da9fd4 RH |
1693 | eh_region try_region = NULL; |
1694 | struct leh_state this_state = *state; | |
726a989a | 1695 | gimple_stmt_iterator gsi; |
6de9cd9a | 1696 | tree out_label; |
355a7673 | 1697 | gimple_seq new_seq, cleanup; |
1d65f45c | 1698 | gimple x; |
c2255bc4 | 1699 | location_t try_catch_loc = gimple_location (tp); |
6de9cd9a | 1700 | |
b7da9fd4 RH |
1701 | if (flag_exceptions) |
1702 | { | |
1703 | try_region = gen_eh_region_try (state->cur_region); | |
1704 | this_state.cur_region = try_region; | |
1705 | } | |
6de9cd9a | 1706 | |
355a7673 | 1707 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1708 | |
b7da9fd4 | 1709 | if (!eh_region_may_contain_throw (try_region)) |
1d65f45c RH |
1710 | return gimple_try_eval (tp); |
1711 | ||
1712 | new_seq = NULL; | |
1713 | emit_eh_dispatch (&new_seq, try_region); | |
1714 | emit_resx (&new_seq, try_region); | |
1715 | ||
1716 | this_state.cur_region = state->cur_region; | |
1717 | this_state.ehp_region = try_region; | |
6de9cd9a DN |
1718 | |
1719 | out_label = NULL; | |
355a7673 MM |
1720 | cleanup = gimple_try_cleanup (tp); |
1721 | for (gsi = gsi_start (cleanup); | |
1d65f45c RH |
1722 | !gsi_end_p (gsi); |
1723 | gsi_next (&gsi)) | |
6de9cd9a | 1724 | { |
1d65f45c RH |
1725 | eh_catch c; |
1726 | gimple gcatch; | |
1727 | gimple_seq handler; | |
6de9cd9a | 1728 | |
82d6e6fc | 1729 | gcatch = gsi_stmt (gsi); |
1d65f45c | 1730 | c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch)); |
6de9cd9a | 1731 | |
1d65f45c | 1732 | handler = gimple_catch_handler (gcatch); |
355a7673 | 1733 | lower_eh_constructs_1 (&this_state, &handler); |
6de9cd9a | 1734 | |
1d65f45c RH |
1735 | c->label = create_artificial_label (UNKNOWN_LOCATION); |
1736 | x = gimple_build_label (c->label); | |
1737 | gimple_seq_add_stmt (&new_seq, x); | |
6de9cd9a | 1738 | |
1d65f45c | 1739 | gimple_seq_add_seq (&new_seq, handler); |
6de9cd9a | 1740 | |
1d65f45c | 1741 | if (gimple_seq_may_fallthru (new_seq)) |
6de9cd9a DN |
1742 | { |
1743 | if (!out_label) | |
c2255bc4 | 1744 | out_label = create_artificial_label (try_catch_loc); |
6de9cd9a | 1745 | |
726a989a | 1746 | x = gimple_build_goto (out_label); |
1d65f45c | 1747 | gimple_seq_add_stmt (&new_seq, x); |
6de9cd9a | 1748 | } |
d815d34e MM |
1749 | if (!c->type_list) |
1750 | break; | |
6de9cd9a DN |
1751 | } |
1752 | ||
1d65f45c RH |
1753 | gimple_try_set_cleanup (tp, new_seq); |
1754 | ||
1755 | return frob_into_branch_around (tp, try_region, out_label); | |
6de9cd9a DN |
1756 | } |
1757 | ||
726a989a RB |
1758 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a |
1759 | GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception | |
6de9cd9a DN |
1760 | region trees that record all the magic. */ |
1761 | ||
726a989a RB |
1762 | static gimple_seq |
1763 | lower_eh_filter (struct leh_state *state, gimple tp) | |
6de9cd9a | 1764 | { |
b7da9fd4 RH |
1765 | struct leh_state this_state = *state; |
1766 | eh_region this_region = NULL; | |
1d65f45c RH |
1767 | gimple inner, x; |
1768 | gimple_seq new_seq; | |
19114537 | 1769 | |
726a989a RB |
1770 | inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
1771 | ||
b7da9fd4 RH |
1772 | if (flag_exceptions) |
1773 | { | |
1774 | this_region = gen_eh_region_allowed (state->cur_region, | |
1775 | gimple_eh_filter_types (inner)); | |
1776 | this_state.cur_region = this_region; | |
1777 | } | |
19114537 | 1778 | |
355a7673 | 1779 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1780 | |
b7da9fd4 | 1781 | if (!eh_region_may_contain_throw (this_region)) |
1d65f45c RH |
1782 | return gimple_try_eval (tp); |
1783 | ||
1784 | new_seq = NULL; | |
1785 | this_state.cur_region = state->cur_region; | |
1786 | this_state.ehp_region = this_region; | |
1787 | ||
1788 | emit_eh_dispatch (&new_seq, this_region); | |
1789 | emit_resx (&new_seq, this_region); | |
1790 | ||
1791 | this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION); | |
1792 | x = gimple_build_label (this_region->u.allowed.label); | |
1793 | gimple_seq_add_stmt (&new_seq, x); | |
1794 | ||
355a7673 | 1795 | lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner)); |
1d65f45c RH |
1796 | gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner)); |
1797 | ||
1798 | gimple_try_set_cleanup (tp, new_seq); | |
6de9cd9a | 1799 | |
1d65f45c RH |
1800 | return frob_into_branch_around (tp, this_region, NULL); |
1801 | } | |
1802 | ||
1803 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with | |
1804 | an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks, | |
1805 | plus the exception region trees that record all the magic. */ | |
1806 | ||
1807 | static gimple_seq | |
1808 | lower_eh_must_not_throw (struct leh_state *state, gimple tp) | |
1809 | { | |
b7da9fd4 | 1810 | struct leh_state this_state = *state; |
1d65f45c | 1811 | |
b7da9fd4 RH |
1812 | if (flag_exceptions) |
1813 | { | |
1814 | gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); | |
1815 | eh_region this_region; | |
1d65f45c | 1816 | |
b7da9fd4 RH |
1817 | this_region = gen_eh_region_must_not_throw (state->cur_region); |
1818 | this_region->u.must_not_throw.failure_decl | |
1819 | = gimple_eh_must_not_throw_fndecl (inner); | |
1820 | this_region->u.must_not_throw.failure_loc = gimple_location (tp); | |
1d65f45c | 1821 | |
b7da9fd4 RH |
1822 | /* In order to get mangling applied to this decl, we must mark it |
1823 | used now. Otherwise, pass_ipa_free_lang_data won't think it | |
1824 | needs to happen. */ | |
1825 | TREE_USED (this_region->u.must_not_throw.failure_decl) = 1; | |
1d65f45c | 1826 | |
b7da9fd4 RH |
1827 | this_state.cur_region = this_region; |
1828 | } | |
6de9cd9a | 1829 | |
355a7673 | 1830 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1831 | |
1d65f45c | 1832 | return gimple_try_eval (tp); |
6de9cd9a DN |
1833 | } |
1834 | ||
1835 | /* Implement a cleanup expression. This is similar to try-finally, | |
1836 | except that we only execute the cleanup block for exception edges. */ | |
1837 | ||
726a989a RB |
1838 | static gimple_seq |
1839 | lower_cleanup (struct leh_state *state, gimple tp) | |
6de9cd9a | 1840 | { |
b7da9fd4 RH |
1841 | struct leh_state this_state = *state; |
1842 | eh_region this_region = NULL; | |
6de9cd9a | 1843 | struct leh_tf_state fake_tf; |
726a989a | 1844 | gimple_seq result; |
d3f28910 | 1845 | bool cleanup_dead = cleanup_is_dead_in (state->cur_region); |
6de9cd9a | 1846 | |
d3f28910 | 1847 | if (flag_exceptions && !cleanup_dead) |
6de9cd9a | 1848 | { |
b7da9fd4 RH |
1849 | this_region = gen_eh_region_cleanup (state->cur_region); |
1850 | this_state.cur_region = this_region; | |
6de9cd9a DN |
1851 | } |
1852 | ||
355a7673 | 1853 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1854 | |
d3f28910 | 1855 | if (cleanup_dead || !eh_region_may_contain_throw (this_region)) |
1d65f45c | 1856 | return gimple_try_eval (tp); |
6de9cd9a DN |
1857 | |
1858 | /* Build enough of a try-finally state so that we can reuse | |
1859 | honor_protect_cleanup_actions. */ | |
1860 | memset (&fake_tf, 0, sizeof (fake_tf)); | |
c2255bc4 | 1861 | fake_tf.top_p = fake_tf.try_finally_expr = tp; |
6de9cd9a DN |
1862 | fake_tf.outer = state; |
1863 | fake_tf.region = this_region; | |
726a989a | 1864 | fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
6de9cd9a DN |
1865 | fake_tf.may_throw = true; |
1866 | ||
6de9cd9a DN |
1867 | honor_protect_cleanup_actions (state, NULL, &fake_tf); |
1868 | ||
1869 | if (fake_tf.may_throw) | |
1870 | { | |
1871 | /* In this case honor_protect_cleanup_actions had nothing to do, | |
1872 | and we should process this normally. */ | |
355a7673 | 1873 | lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp)); |
1d65f45c RH |
1874 | result = frob_into_branch_around (tp, this_region, |
1875 | fake_tf.fallthru_label); | |
6de9cd9a DN |
1876 | } |
1877 | else | |
1878 | { | |
1879 | /* In this case honor_protect_cleanup_actions did nearly all of | |
1880 | the work. All we have left is to append the fallthru_label. */ | |
1881 | ||
726a989a | 1882 | result = gimple_try_eval (tp); |
6de9cd9a DN |
1883 | if (fake_tf.fallthru_label) |
1884 | { | |
726a989a RB |
1885 | gimple x = gimple_build_label (fake_tf.fallthru_label); |
1886 | gimple_seq_add_stmt (&result, x); | |
6de9cd9a DN |
1887 | } |
1888 | } | |
726a989a | 1889 | return result; |
6de9cd9a DN |
1890 | } |
1891 | ||
1d65f45c | 1892 | /* Main loop for lowering eh constructs. Also moves gsi to the next |
726a989a | 1893 | statement. */ |
6de9cd9a DN |
1894 | |
1895 | static void | |
726a989a | 1896 | lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) |
6de9cd9a | 1897 | { |
726a989a RB |
1898 | gimple_seq replace; |
1899 | gimple x; | |
1900 | gimple stmt = gsi_stmt (*gsi); | |
6de9cd9a | 1901 | |
726a989a | 1902 | switch (gimple_code (stmt)) |
6de9cd9a | 1903 | { |
726a989a | 1904 | case GIMPLE_CALL: |
1d65f45c RH |
1905 | { |
1906 | tree fndecl = gimple_call_fndecl (stmt); | |
1907 | tree rhs, lhs; | |
1908 | ||
1909 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
1910 | switch (DECL_FUNCTION_CODE (fndecl)) | |
1911 | { | |
1912 | case BUILT_IN_EH_POINTER: | |
1913 | /* The front end may have generated a call to | |
1914 | __builtin_eh_pointer (0) within a catch region. Replace | |
1915 | this zero argument with the current catch region number. */ | |
1916 | if (state->ehp_region) | |
1917 | { | |
413581ba RG |
1918 | tree nr = build_int_cst (integer_type_node, |
1919 | state->ehp_region->index); | |
1d65f45c RH |
1920 | gimple_call_set_arg (stmt, 0, nr); |
1921 | } | |
1922 | else | |
1923 | { | |
1924 | /* The user has dome something silly. Remove it. */ | |
9a9d280e | 1925 | rhs = null_pointer_node; |
1d65f45c RH |
1926 | goto do_replace; |
1927 | } | |
1928 | break; | |
1929 | ||
1930 | case BUILT_IN_EH_FILTER: | |
1931 | /* ??? This should never appear, but since it's a builtin it | |
1932 | is accessible to abuse by users. Just remove it and | |
1933 | replace the use with the arbitrary value zero. */ | |
1934 | rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0); | |
1935 | do_replace: | |
1936 | lhs = gimple_call_lhs (stmt); | |
1937 | x = gimple_build_assign (lhs, rhs); | |
1938 | gsi_insert_before (gsi, x, GSI_SAME_STMT); | |
1939 | /* FALLTHRU */ | |
1940 | ||
1941 | case BUILT_IN_EH_COPY_VALUES: | |
1942 | /* Likewise this should not appear. Remove it. */ | |
1943 | gsi_remove (gsi, true); | |
1944 | return; | |
1945 | ||
1946 | default: | |
1947 | break; | |
1948 | } | |
1949 | } | |
1950 | /* FALLTHRU */ | |
1951 | ||
726a989a | 1952 | case GIMPLE_ASSIGN: |
ba4d8f9d RG |
1953 | /* If the stmt can throw use a new temporary for the assignment |
1954 | to a LHS. This makes sure the old value of the LHS is | |
87cd4259 | 1955 | available on the EH edge. Only do so for statements that |
073a8998 | 1956 | potentially fall through (no noreturn calls e.g.), otherwise |
87cd4259 | 1957 | this new assignment might create fake fallthru regions. */ |
ba4d8f9d RG |
1958 | if (stmt_could_throw_p (stmt) |
1959 | && gimple_has_lhs (stmt) | |
87cd4259 | 1960 | && gimple_stmt_may_fallthru (stmt) |
ba4d8f9d RG |
1961 | && !tree_could_throw_p (gimple_get_lhs (stmt)) |
1962 | && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt)))) | |
1963 | { | |
1964 | tree lhs = gimple_get_lhs (stmt); | |
1965 | tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL); | |
1966 | gimple s = gimple_build_assign (lhs, tmp); | |
1967 | gimple_set_location (s, gimple_location (stmt)); | |
1968 | gimple_set_block (s, gimple_block (stmt)); | |
1969 | gimple_set_lhs (stmt, tmp); | |
1970 | if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE | |
1971 | || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE) | |
1972 | DECL_GIMPLE_REG_P (tmp) = 1; | |
1973 | gsi_insert_after (gsi, s, GSI_SAME_STMT); | |
1974 | } | |
6de9cd9a | 1975 | /* Look for things that can throw exceptions, and record them. */ |
726a989a | 1976 | if (state->cur_region && stmt_could_throw_p (stmt)) |
6de9cd9a | 1977 | { |
726a989a | 1978 | record_stmt_eh_region (state->cur_region, stmt); |
6de9cd9a | 1979 | note_eh_region_may_contain_throw (state->cur_region); |
6de9cd9a DN |
1980 | } |
1981 | break; | |
1982 | ||
726a989a RB |
1983 | case GIMPLE_COND: |
1984 | case GIMPLE_GOTO: | |
1985 | case GIMPLE_RETURN: | |
1986 | maybe_record_in_goto_queue (state, stmt); | |
6de9cd9a DN |
1987 | break; |
1988 | ||
726a989a RB |
1989 | case GIMPLE_SWITCH: |
1990 | verify_norecord_switch_expr (state, stmt); | |
6de9cd9a DN |
1991 | break; |
1992 | ||
726a989a RB |
1993 | case GIMPLE_TRY: |
1994 | if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | |
1995 | replace = lower_try_finally (state, stmt); | |
1996 | else | |
6de9cd9a | 1997 | { |
726a989a | 1998 | x = gimple_seq_first_stmt (gimple_try_cleanup (stmt)); |
6728ee79 | 1999 | if (!x) |
6de9cd9a | 2000 | { |
6728ee79 | 2001 | replace = gimple_try_eval (stmt); |
355a7673 | 2002 | lower_eh_constructs_1 (state, &replace); |
6de9cd9a | 2003 | } |
6728ee79 MM |
2004 | else |
2005 | switch (gimple_code (x)) | |
2006 | { | |
2007 | case GIMPLE_CATCH: | |
2008 | replace = lower_catch (state, stmt); | |
2009 | break; | |
2010 | case GIMPLE_EH_FILTER: | |
2011 | replace = lower_eh_filter (state, stmt); | |
2012 | break; | |
2013 | case GIMPLE_EH_MUST_NOT_THROW: | |
2014 | replace = lower_eh_must_not_throw (state, stmt); | |
2015 | break; | |
0a35513e AH |
2016 | case GIMPLE_EH_ELSE: |
2017 | /* This code is only valid with GIMPLE_TRY_FINALLY. */ | |
2018 | gcc_unreachable (); | |
6728ee79 MM |
2019 | default: |
2020 | replace = lower_cleanup (state, stmt); | |
2021 | break; | |
2022 | } | |
6de9cd9a | 2023 | } |
726a989a RB |
2024 | |
2025 | /* Remove the old stmt and insert the transformed sequence | |
2026 | instead. */ | |
2027 | gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT); | |
2028 | gsi_remove (gsi, true); | |
2029 | ||
2030 | /* Return since we don't want gsi_next () */ | |
2031 | return; | |
6de9cd9a | 2032 | |
0a35513e AH |
2033 | case GIMPLE_EH_ELSE: |
2034 | /* We should be eliminating this in lower_try_finally et al. */ | |
2035 | gcc_unreachable (); | |
2036 | ||
6de9cd9a DN |
2037 | default: |
2038 | /* A type, a decl, or some kind of statement that we're not | |
2039 | interested in. Don't walk them. */ | |
2040 | break; | |
2041 | } | |
726a989a RB |
2042 | |
2043 | gsi_next (gsi); | |
2044 | } | |
2045 | ||
2046 | /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */ | |
2047 | ||
2048 | static void | |
355a7673 | 2049 | lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq) |
726a989a RB |
2050 | { |
2051 | gimple_stmt_iterator gsi; | |
355a7673 | 2052 | for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);) |
726a989a | 2053 | lower_eh_constructs_2 (state, &gsi); |
6de9cd9a DN |
2054 | } |
2055 | ||
c2924966 | 2056 | static unsigned int |
6de9cd9a DN |
2057 | lower_eh_constructs (void) |
2058 | { | |
2059 | struct leh_state null_state; | |
1d65f45c | 2060 | gimple_seq bodyp; |
726a989a | 2061 | |
1d65f45c RH |
2062 | bodyp = gimple_body (current_function_decl); |
2063 | if (bodyp == NULL) | |
2064 | return 0; | |
6de9cd9a DN |
2065 | |
2066 | finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free); | |
b7da9fd4 | 2067 | eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL); |
1d65f45c | 2068 | memset (&null_state, 0, sizeof (null_state)); |
6de9cd9a | 2069 | |
726a989a | 2070 | collect_finally_tree_1 (bodyp, NULL); |
355a7673 MM |
2071 | lower_eh_constructs_1 (&null_state, &bodyp); |
2072 | gimple_set_body (current_function_decl, bodyp); | |
6de9cd9a | 2073 | |
1d65f45c RH |
2074 | /* We assume there's a return statement, or something, at the end of |
2075 | the function, and thus ploping the EH sequence afterward won't | |
2076 | change anything. */ | |
2077 | gcc_assert (!gimple_seq_may_fallthru (bodyp)); | |
2078 | gimple_seq_add_seq (&bodyp, eh_seq); | |
2079 | ||
2080 | /* We assume that since BODYP already existed, adding EH_SEQ to it | |
2081 | didn't change its value, and we don't have to re-set the function. */ | |
2082 | gcc_assert (bodyp == gimple_body (current_function_decl)); | |
6de9cd9a | 2083 | |
1d65f45c | 2084 | htab_delete (finally_tree); |
b7da9fd4 | 2085 | BITMAP_FREE (eh_region_may_contain_throw_map); |
1d65f45c | 2086 | eh_seq = NULL; |
f9417da1 RG |
2087 | |
2088 | /* If this function needs a language specific EH personality routine | |
2089 | and the frontend didn't already set one do so now. */ | |
2090 | if (function_needs_eh_personality (cfun) == eh_personality_lang | |
2091 | && !DECL_FUNCTION_PERSONALITY (current_function_decl)) | |
2092 | DECL_FUNCTION_PERSONALITY (current_function_decl) | |
2093 | = lang_hooks.eh_personality (); | |
2094 | ||
c2924966 | 2095 | return 0; |
6de9cd9a DN |
2096 | } |
2097 | ||
8ddbbcae | 2098 | struct gimple_opt_pass pass_lower_eh = |
6de9cd9a | 2099 | { |
8ddbbcae JH |
2100 | { |
2101 | GIMPLE_PASS, | |
6de9cd9a DN |
2102 | "eh", /* name */ |
2103 | NULL, /* gate */ | |
2104 | lower_eh_constructs, /* execute */ | |
2105 | NULL, /* sub */ | |
2106 | NULL, /* next */ | |
2107 | 0, /* static_pass_number */ | |
2108 | TV_TREE_EH, /* tv_id */ | |
2109 | PROP_gimple_lcf, /* properties_required */ | |
2110 | PROP_gimple_leh, /* properties_provided */ | |
bbbe4e7b | 2111 | 0, /* properties_destroyed */ |
6de9cd9a | 2112 | 0, /* todo_flags_start */ |
22c5fa5f | 2113 | 0 /* todo_flags_finish */ |
8ddbbcae | 2114 | } |
6de9cd9a | 2115 | }; |
6de9cd9a | 2116 | \f |
1d65f45c RH |
2117 | /* Create the multiple edges from an EH_DISPATCH statement to all of |
2118 | the possible handlers for its EH region. Return true if there's | |
2119 | no fallthru edge; false if there is. */ | |
6de9cd9a | 2120 | |
1d65f45c RH |
2121 | bool |
2122 | make_eh_dispatch_edges (gimple stmt) | |
6de9cd9a | 2123 | { |
1d65f45c RH |
2124 | eh_region r; |
2125 | eh_catch c; | |
6de9cd9a DN |
2126 | basic_block src, dst; |
2127 | ||
1d65f45c | 2128 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); |
726a989a | 2129 | src = gimple_bb (stmt); |
6de9cd9a | 2130 | |
1d65f45c RH |
2131 | switch (r->type) |
2132 | { | |
2133 | case ERT_TRY: | |
2134 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
2135 | { | |
2136 | dst = label_to_block (c->label); | |
2137 | make_edge (src, dst, 0); | |
19114537 | 2138 | |
1d65f45c RH |
2139 | /* A catch-all handler doesn't have a fallthru. */ |
2140 | if (c->type_list == NULL) | |
2141 | return false; | |
2142 | } | |
2143 | break; | |
a8ee227c | 2144 | |
1d65f45c RH |
2145 | case ERT_ALLOWED_EXCEPTIONS: |
2146 | dst = label_to_block (r->u.allowed.label); | |
2147 | make_edge (src, dst, 0); | |
2148 | break; | |
2149 | ||
2150 | default: | |
2151 | gcc_unreachable (); | |
2152 | } | |
2153 | ||
2154 | return true; | |
a8ee227c JH |
2155 | } |
2156 | ||
1d65f45c RH |
2157 | /* Create the single EH edge from STMT to its nearest landing pad, |
2158 | if there is such a landing pad within the current function. */ | |
2159 | ||
6de9cd9a | 2160 | void |
726a989a | 2161 | make_eh_edges (gimple stmt) |
6de9cd9a | 2162 | { |
1d65f45c RH |
2163 | basic_block src, dst; |
2164 | eh_landing_pad lp; | |
2165 | int lp_nr; | |
6de9cd9a | 2166 | |
1d65f45c RH |
2167 | lp_nr = lookup_stmt_eh_lp (stmt); |
2168 | if (lp_nr <= 0) | |
2169 | return; | |
6de9cd9a | 2170 | |
1d65f45c RH |
2171 | lp = get_eh_landing_pad_from_number (lp_nr); |
2172 | gcc_assert (lp != NULL); | |
a203a221 | 2173 | |
1d65f45c RH |
2174 | src = gimple_bb (stmt); |
2175 | dst = label_to_block (lp->post_landing_pad); | |
2176 | make_edge (src, dst, EDGE_EH); | |
6de9cd9a DN |
2177 | } |
2178 | ||
1d65f45c RH |
2179 | /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree; |
2180 | do not actually perform the final edge redirection. | |
a3710436 | 2181 | |
1d65f45c RH |
2182 | CHANGE_REGION is true when we're being called from cleanup_empty_eh and |
2183 | we intend to change the destination EH region as well; this means | |
2184 | EH_LANDING_PAD_NR must already be set on the destination block label. | |
2185 | If false, we're being called from generic cfg manipulation code and we | |
2186 | should preserve our place within the region tree. */ | |
2187 | ||
2188 | static void | |
2189 | redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region) | |
a3710436 | 2190 | { |
1d65f45c RH |
2191 | eh_landing_pad old_lp, new_lp; |
2192 | basic_block old_bb; | |
2193 | gimple throw_stmt; | |
2194 | int old_lp_nr, new_lp_nr; | |
2195 | tree old_label, new_label; | |
2196 | edge_iterator ei; | |
2197 | edge e; | |
2198 | ||
2199 | old_bb = edge_in->dest; | |
2200 | old_label = gimple_block_label (old_bb); | |
2201 | old_lp_nr = EH_LANDING_PAD_NR (old_label); | |
2202 | gcc_assert (old_lp_nr > 0); | |
2203 | old_lp = get_eh_landing_pad_from_number (old_lp_nr); | |
2204 | ||
2205 | throw_stmt = last_stmt (edge_in->src); | |
2206 | gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr); | |
2207 | ||
2208 | new_label = gimple_block_label (new_bb); | |
a3710436 | 2209 | |
1d65f45c RH |
2210 | /* Look for an existing region that might be using NEW_BB already. */ |
2211 | new_lp_nr = EH_LANDING_PAD_NR (new_label); | |
2212 | if (new_lp_nr) | |
a3710436 | 2213 | { |
1d65f45c RH |
2214 | new_lp = get_eh_landing_pad_from_number (new_lp_nr); |
2215 | gcc_assert (new_lp); | |
b8698a0f | 2216 | |
1d65f45c RH |
2217 | /* Unless CHANGE_REGION is true, the new and old landing pad |
2218 | had better be associated with the same EH region. */ | |
2219 | gcc_assert (change_region || new_lp->region == old_lp->region); | |
a3710436 JH |
2220 | } |
2221 | else | |
2222 | { | |
1d65f45c RH |
2223 | new_lp = NULL; |
2224 | gcc_assert (!change_region); | |
a3710436 JH |
2225 | } |
2226 | ||
1d65f45c RH |
2227 | /* Notice when we redirect the last EH edge away from OLD_BB. */ |
2228 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
2229 | if (e != edge_in && (e->flags & EDGE_EH)) | |
2230 | break; | |
cc7220fd | 2231 | |
1d65f45c | 2232 | if (new_lp) |
cc7220fd | 2233 | { |
1d65f45c RH |
2234 | /* NEW_LP already exists. If there are still edges into OLD_LP, |
2235 | there's nothing to do with the EH tree. If there are no more | |
2236 | edges into OLD_LP, then we want to remove OLD_LP as it is unused. | |
2237 | If CHANGE_REGION is true, then our caller is expecting to remove | |
2238 | the landing pad. */ | |
2239 | if (e == NULL && !change_region) | |
2240 | remove_eh_landing_pad (old_lp); | |
cc7220fd | 2241 | } |
1d65f45c | 2242 | else |
cc7220fd | 2243 | { |
1d65f45c RH |
2244 | /* No correct landing pad exists. If there are no more edges |
2245 | into OLD_LP, then we can simply re-use the existing landing pad. | |
2246 | Otherwise, we have to create a new landing pad. */ | |
2247 | if (e == NULL) | |
2248 | { | |
2249 | EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0; | |
2250 | new_lp = old_lp; | |
2251 | } | |
2252 | else | |
2253 | new_lp = gen_eh_landing_pad (old_lp->region); | |
2254 | new_lp->post_landing_pad = new_label; | |
2255 | EH_LANDING_PAD_NR (new_label) = new_lp->index; | |
cc7220fd | 2256 | } |
1d65f45c RH |
2257 | |
2258 | /* Maybe move the throwing statement to the new region. */ | |
2259 | if (old_lp != new_lp) | |
cc7220fd | 2260 | { |
1d65f45c RH |
2261 | remove_stmt_from_eh_lp (throw_stmt); |
2262 | add_stmt_to_eh_lp (throw_stmt, new_lp->index); | |
cc7220fd | 2263 | } |
cc7220fd JH |
2264 | } |
2265 | ||
1d65f45c | 2266 | /* Redirect EH edge E to NEW_BB. */ |
726a989a | 2267 | |
1d65f45c RH |
2268 | edge |
2269 | redirect_eh_edge (edge edge_in, basic_block new_bb) | |
cc7220fd | 2270 | { |
1d65f45c RH |
2271 | redirect_eh_edge_1 (edge_in, new_bb, false); |
2272 | return ssa_redirect_edge (edge_in, new_bb); | |
2273 | } | |
cc7220fd | 2274 | |
1d65f45c RH |
2275 | /* This is a subroutine of gimple_redirect_edge_and_branch. Update the |
2276 | labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB. | |
2277 | The actual edge update will happen in the caller. */ | |
cc7220fd | 2278 | |
1d65f45c RH |
2279 | void |
2280 | redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb) | |
2281 | { | |
2282 | tree new_lab = gimple_block_label (new_bb); | |
2283 | bool any_changed = false; | |
2284 | basic_block old_bb; | |
2285 | eh_region r; | |
2286 | eh_catch c; | |
2287 | ||
2288 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); | |
2289 | switch (r->type) | |
cc7220fd | 2290 | { |
1d65f45c RH |
2291 | case ERT_TRY: |
2292 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
cc7220fd | 2293 | { |
1d65f45c RH |
2294 | old_bb = label_to_block (c->label); |
2295 | if (old_bb == e->dest) | |
2296 | { | |
2297 | c->label = new_lab; | |
2298 | any_changed = true; | |
2299 | } | |
cc7220fd | 2300 | } |
1d65f45c RH |
2301 | break; |
2302 | ||
2303 | case ERT_ALLOWED_EXCEPTIONS: | |
2304 | old_bb = label_to_block (r->u.allowed.label); | |
2305 | gcc_assert (old_bb == e->dest); | |
2306 | r->u.allowed.label = new_lab; | |
2307 | any_changed = true; | |
2308 | break; | |
2309 | ||
2310 | default: | |
2311 | gcc_unreachable (); | |
cc7220fd | 2312 | } |
726a989a | 2313 | |
1d65f45c | 2314 | gcc_assert (any_changed); |
cc7220fd | 2315 | } |
6de9cd9a | 2316 | \f |
726a989a RB |
2317 | /* Helper function for operation_could_trap_p and stmt_could_throw_p. */ |
2318 | ||
890065bf | 2319 | bool |
726a989a RB |
2320 | operation_could_trap_helper_p (enum tree_code op, |
2321 | bool fp_operation, | |
2322 | bool honor_trapv, | |
2323 | bool honor_nans, | |
2324 | bool honor_snans, | |
2325 | tree divisor, | |
2326 | bool *handled) | |
2327 | { | |
2328 | *handled = true; | |
2329 | switch (op) | |
2330 | { | |
2331 | case TRUNC_DIV_EXPR: | |
2332 | case CEIL_DIV_EXPR: | |
2333 | case FLOOR_DIV_EXPR: | |
2334 | case ROUND_DIV_EXPR: | |
2335 | case EXACT_DIV_EXPR: | |
2336 | case CEIL_MOD_EXPR: | |
2337 | case FLOOR_MOD_EXPR: | |
2338 | case ROUND_MOD_EXPR: | |
2339 | case TRUNC_MOD_EXPR: | |
2340 | case RDIV_EXPR: | |
2341 | if (honor_snans || honor_trapv) | |
2342 | return true; | |
2343 | if (fp_operation) | |
2344 | return flag_trapping_math; | |
2345 | if (!TREE_CONSTANT (divisor) || integer_zerop (divisor)) | |
2346 | return true; | |
2347 | return false; | |
2348 | ||
2349 | case LT_EXPR: | |
2350 | case LE_EXPR: | |
2351 | case GT_EXPR: | |
2352 | case GE_EXPR: | |
2353 | case LTGT_EXPR: | |
2354 | /* Some floating point comparisons may trap. */ | |
2355 | return honor_nans; | |
2356 | ||
2357 | case EQ_EXPR: | |
2358 | case NE_EXPR: | |
2359 | case UNORDERED_EXPR: | |
2360 | case ORDERED_EXPR: | |
2361 | case UNLT_EXPR: | |
2362 | case UNLE_EXPR: | |
2363 | case UNGT_EXPR: | |
2364 | case UNGE_EXPR: | |
2365 | case UNEQ_EXPR: | |
2366 | return honor_snans; | |
2367 | ||
2368 | case CONVERT_EXPR: | |
2369 | case FIX_TRUNC_EXPR: | |
2370 | /* Conversion of floating point might trap. */ | |
2371 | return honor_nans; | |
2372 | ||
2373 | case NEGATE_EXPR: | |
2374 | case ABS_EXPR: | |
2375 | case CONJ_EXPR: | |
2376 | /* These operations don't trap with floating point. */ | |
2377 | if (honor_trapv) | |
2378 | return true; | |
2379 | return false; | |
2380 | ||
2381 | case PLUS_EXPR: | |
2382 | case MINUS_EXPR: | |
2383 | case MULT_EXPR: | |
2384 | /* Any floating arithmetic may trap. */ | |
2385 | if (fp_operation && flag_trapping_math) | |
2386 | return true; | |
2387 | if (honor_trapv) | |
2388 | return true; | |
2389 | return false; | |
2390 | ||
f5e5b46c RG |
2391 | case COMPLEX_EXPR: |
2392 | case CONSTRUCTOR: | |
2393 | /* Constructing an object cannot trap. */ | |
2394 | return false; | |
2395 | ||
726a989a RB |
2396 | default: |
2397 | /* Any floating arithmetic may trap. */ | |
2398 | if (fp_operation && flag_trapping_math) | |
2399 | return true; | |
2400 | ||
2401 | *handled = false; | |
2402 | return false; | |
2403 | } | |
2404 | } | |
2405 | ||
2406 | /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied | |
2407 | on floating-point values. HONOR_TRAPV is true if OP is applied on integer | |
2408 | type operands that may trap. If OP is a division operator, DIVISOR contains | |
2409 | the value of the divisor. */ | |
2410 | ||
2411 | bool | |
2412 | operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv, | |
2413 | tree divisor) | |
2414 | { | |
2415 | bool honor_nans = (fp_operation && flag_trapping_math | |
2416 | && !flag_finite_math_only); | |
2417 | bool honor_snans = fp_operation && flag_signaling_nans != 0; | |
2418 | bool handled; | |
2419 | ||
2420 | if (TREE_CODE_CLASS (op) != tcc_comparison | |
2421 | && TREE_CODE_CLASS (op) != tcc_unary | |
2422 | && TREE_CODE_CLASS (op) != tcc_binary) | |
2423 | return false; | |
2424 | ||
2425 | return operation_could_trap_helper_p (op, fp_operation, honor_trapv, | |
2426 | honor_nans, honor_snans, divisor, | |
2427 | &handled); | |
2428 | } | |
2429 | ||
2430 | /* Return true if EXPR can trap, as in dereferencing an invalid pointer | |
1eaba2f2 RH |
2431 | location or floating point arithmetic. C.f. the rtl version, may_trap_p. |
2432 | This routine expects only GIMPLE lhs or rhs input. */ | |
6de9cd9a DN |
2433 | |
2434 | bool | |
2435 | tree_could_trap_p (tree expr) | |
2436 | { | |
726a989a | 2437 | enum tree_code code; |
1eaba2f2 | 2438 | bool fp_operation = false; |
9675412f | 2439 | bool honor_trapv = false; |
726a989a | 2440 | tree t, base, div = NULL_TREE; |
6de9cd9a | 2441 | |
726a989a RB |
2442 | if (!expr) |
2443 | return false; | |
1d65f45c | 2444 | |
726a989a RB |
2445 | code = TREE_CODE (expr); |
2446 | t = TREE_TYPE (expr); | |
2447 | ||
2448 | if (t) | |
1eaba2f2 | 2449 | { |
04b03edb RAE |
2450 | if (COMPARISON_CLASS_P (expr)) |
2451 | fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
2452 | else | |
2453 | fp_operation = FLOAT_TYPE_P (t); | |
726a989a | 2454 | honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t); |
1eaba2f2 RH |
2455 | } |
2456 | ||
726a989a RB |
2457 | if (TREE_CODE_CLASS (code) == tcc_binary) |
2458 | div = TREE_OPERAND (expr, 1); | |
2459 | if (operation_could_trap_p (code, fp_operation, honor_trapv, div)) | |
2460 | return true; | |
2461 | ||
d25cee4d | 2462 | restart: |
6de9cd9a DN |
2463 | switch (code) |
2464 | { | |
ac182688 | 2465 | case TARGET_MEM_REF: |
4d948885 RG |
2466 | if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR |
2467 | && !TMR_INDEX (expr) && !TMR_INDEX2 (expr)) | |
4b228e61 RG |
2468 | return false; |
2469 | return !TREE_THIS_NOTRAP (expr); | |
ac182688 | 2470 | |
6de9cd9a DN |
2471 | case COMPONENT_REF: |
2472 | case REALPART_EXPR: | |
2473 | case IMAGPART_EXPR: | |
2474 | case BIT_FIELD_REF: | |
483edb92 | 2475 | case VIEW_CONVERT_EXPR: |
d25cee4d RH |
2476 | case WITH_SIZE_EXPR: |
2477 | expr = TREE_OPERAND (expr, 0); | |
2478 | code = TREE_CODE (expr); | |
2479 | goto restart; | |
a7e5372d ZD |
2480 | |
2481 | case ARRAY_RANGE_REF: | |
11fc4275 EB |
2482 | base = TREE_OPERAND (expr, 0); |
2483 | if (tree_could_trap_p (base)) | |
a7e5372d | 2484 | return true; |
11fc4275 EB |
2485 | if (TREE_THIS_NOTRAP (expr)) |
2486 | return false; | |
11fc4275 | 2487 | return !range_in_array_bounds_p (expr); |
a7e5372d ZD |
2488 | |
2489 | case ARRAY_REF: | |
2490 | base = TREE_OPERAND (expr, 0); | |
a7e5372d ZD |
2491 | if (tree_could_trap_p (base)) |
2492 | return true; | |
a7e5372d ZD |
2493 | if (TREE_THIS_NOTRAP (expr)) |
2494 | return false; | |
a7e5372d | 2495 | return !in_array_bounds_p (expr); |
6de9cd9a | 2496 | |
70f34814 RG |
2497 | case MEM_REF: |
2498 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | |
2499 | return false; | |
2500 | /* Fallthru. */ | |
6de9cd9a | 2501 | case INDIRECT_REF: |
1eaba2f2 RH |
2502 | return !TREE_THIS_NOTRAP (expr); |
2503 | ||
2504 | case ASM_EXPR: | |
2505 | return TREE_THIS_VOLATILE (expr); | |
5cb2183e | 2506 | |
726a989a RB |
2507 | case CALL_EXPR: |
2508 | t = get_callee_fndecl (expr); | |
2509 | /* Assume that calls to weak functions may trap. */ | |
f2c3a8ce | 2510 | if (!t || !DECL_P (t)) |
1eaba2f2 | 2511 | return true; |
f2c3a8ce JJ |
2512 | if (DECL_WEAK (t)) |
2513 | return tree_could_trap_p (t); | |
2514 | return false; | |
2515 | ||
2516 | case FUNCTION_DECL: | |
2517 | /* Assume that accesses to weak functions may trap, unless we know | |
2518 | they are certainly defined in current TU or in some other | |
2519 | LTO partition. */ | |
2520 | if (DECL_WEAK (expr)) | |
2521 | { | |
2522 | struct cgraph_node *node; | |
2523 | if (!DECL_EXTERNAL (expr)) | |
2524 | return false; | |
2525 | node = cgraph_function_node (cgraph_get_node (expr), NULL); | |
960bfb69 | 2526 | if (node && node->symbol.in_other_partition) |
f2c3a8ce JJ |
2527 | return false; |
2528 | return true; | |
2529 | } | |
2530 | return false; | |
2531 | ||
2532 | case VAR_DECL: | |
2533 | /* Assume that accesses to weak vars may trap, unless we know | |
2534 | they are certainly defined in current TU or in some other | |
2535 | LTO partition. */ | |
2536 | if (DECL_WEAK (expr)) | |
2537 | { | |
2538 | struct varpool_node *node; | |
2539 | if (!DECL_EXTERNAL (expr)) | |
2540 | return false; | |
2541 | node = varpool_variable_node (varpool_get_node (expr), NULL); | |
960bfb69 | 2542 | if (node && node->symbol.in_other_partition) |
f2c3a8ce JJ |
2543 | return false; |
2544 | return true; | |
2545 | } | |
1eaba2f2 RH |
2546 | return false; |
2547 | ||
726a989a RB |
2548 | default: |
2549 | return false; | |
2550 | } | |
2551 | } | |
1eaba2f2 | 2552 | |
1eaba2f2 | 2553 | |
726a989a RB |
2554 | /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a |
2555 | an assignment or a conditional) may throw. */ | |
1eaba2f2 | 2556 | |
726a989a RB |
2557 | static bool |
2558 | stmt_could_throw_1_p (gimple stmt) | |
2559 | { | |
2560 | enum tree_code code = gimple_expr_code (stmt); | |
2561 | bool honor_nans = false; | |
2562 | bool honor_snans = false; | |
2563 | bool fp_operation = false; | |
2564 | bool honor_trapv = false; | |
2565 | tree t; | |
2566 | size_t i; | |
2567 | bool handled, ret; | |
9675412f | 2568 | |
726a989a RB |
2569 | if (TREE_CODE_CLASS (code) == tcc_comparison |
2570 | || TREE_CODE_CLASS (code) == tcc_unary | |
2571 | || TREE_CODE_CLASS (code) == tcc_binary) | |
2572 | { | |
dd46054a RG |
2573 | if (is_gimple_assign (stmt) |
2574 | && TREE_CODE_CLASS (code) == tcc_comparison) | |
2575 | t = TREE_TYPE (gimple_assign_rhs1 (stmt)); | |
2576 | else if (gimple_code (stmt) == GIMPLE_COND) | |
2577 | t = TREE_TYPE (gimple_cond_lhs (stmt)); | |
2578 | else | |
2579 | t = gimple_expr_type (stmt); | |
726a989a RB |
2580 | fp_operation = FLOAT_TYPE_P (t); |
2581 | if (fp_operation) | |
2582 | { | |
2583 | honor_nans = flag_trapping_math && !flag_finite_math_only; | |
2584 | honor_snans = flag_signaling_nans != 0; | |
2585 | } | |
2586 | else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t)) | |
2587 | honor_trapv = true; | |
2588 | } | |
2589 | ||
2590 | /* Check if the main expression may trap. */ | |
2591 | t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL; | |
2592 | ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv, | |
2593 | honor_nans, honor_snans, t, | |
2594 | &handled); | |
2595 | if (handled) | |
2596 | return ret; | |
2597 | ||
2598 | /* If the expression does not trap, see if any of the individual operands may | |
2599 | trap. */ | |
2600 | for (i = 0; i < gimple_num_ops (stmt); i++) | |
2601 | if (tree_could_trap_p (gimple_op (stmt, i))) | |
2602 | return true; | |
2603 | ||
2604 | return false; | |
2605 | } | |
2606 | ||
2607 | ||
2608 | /* Return true if statement STMT could throw an exception. */ | |
2609 | ||
2610 | bool | |
2611 | stmt_could_throw_p (gimple stmt) | |
2612 | { | |
726a989a RB |
2613 | if (!flag_exceptions) |
2614 | return false; | |
2615 | ||
2616 | /* The only statements that can throw an exception are assignments, | |
1d65f45c RH |
2617 | conditionals, calls, resx, and asms. */ |
2618 | switch (gimple_code (stmt)) | |
2619 | { | |
2620 | case GIMPLE_RESX: | |
2621 | return true; | |
726a989a | 2622 | |
1d65f45c RH |
2623 | case GIMPLE_CALL: |
2624 | return !gimple_call_nothrow_p (stmt); | |
726a989a | 2625 | |
1d65f45c RH |
2626 | case GIMPLE_ASSIGN: |
2627 | case GIMPLE_COND: | |
8f4f502f | 2628 | if (!cfun->can_throw_non_call_exceptions) |
1d65f45c RH |
2629 | return false; |
2630 | return stmt_could_throw_1_p (stmt); | |
726a989a | 2631 | |
1d65f45c | 2632 | case GIMPLE_ASM: |
8f4f502f | 2633 | if (!cfun->can_throw_non_call_exceptions) |
1d65f45c RH |
2634 | return false; |
2635 | return gimple_asm_volatile_p (stmt); | |
2636 | ||
2637 | default: | |
2638 | return false; | |
2639 | } | |
6de9cd9a DN |
2640 | } |
2641 | ||
726a989a RB |
2642 | |
2643 | /* Return true if expression T could throw an exception. */ | |
2644 | ||
6de9cd9a DN |
2645 | bool |
2646 | tree_could_throw_p (tree t) | |
2647 | { | |
2648 | if (!flag_exceptions) | |
2649 | return false; | |
726a989a | 2650 | if (TREE_CODE (t) == MODIFY_EXPR) |
6de9cd9a | 2651 | { |
8f4f502f | 2652 | if (cfun->can_throw_non_call_exceptions |
1d65f45c RH |
2653 | && tree_could_trap_p (TREE_OPERAND (t, 0))) |
2654 | return true; | |
726a989a | 2655 | t = TREE_OPERAND (t, 1); |
6de9cd9a DN |
2656 | } |
2657 | ||
d25cee4d RH |
2658 | if (TREE_CODE (t) == WITH_SIZE_EXPR) |
2659 | t = TREE_OPERAND (t, 0); | |
6de9cd9a DN |
2660 | if (TREE_CODE (t) == CALL_EXPR) |
2661 | return (call_expr_flags (t) & ECF_NOTHROW) == 0; | |
8f4f502f | 2662 | if (cfun->can_throw_non_call_exceptions) |
67c605a5 | 2663 | return tree_could_trap_p (t); |
6de9cd9a DN |
2664 | return false; |
2665 | } | |
2666 | ||
33977f81 JH |
2667 | /* Return true if STMT can throw an exception that is not caught within |
2668 | the current function (CFUN). */ | |
2669 | ||
2670 | bool | |
2671 | stmt_can_throw_external (gimple stmt) | |
2672 | { | |
1d65f45c | 2673 | int lp_nr; |
33977f81 JH |
2674 | |
2675 | if (!stmt_could_throw_p (stmt)) | |
2676 | return false; | |
2677 | ||
1d65f45c RH |
2678 | lp_nr = lookup_stmt_eh_lp (stmt); |
2679 | return lp_nr == 0; | |
33977f81 | 2680 | } |
726a989a RB |
2681 | |
2682 | /* Return true if STMT can throw an exception that is caught within | |
2683 | the current function (CFUN). */ | |
2684 | ||
6de9cd9a | 2685 | bool |
726a989a | 2686 | stmt_can_throw_internal (gimple stmt) |
6de9cd9a | 2687 | { |
1d65f45c | 2688 | int lp_nr; |
726a989a | 2689 | |
1d65f45c | 2690 | if (!stmt_could_throw_p (stmt)) |
6de9cd9a | 2691 | return false; |
726a989a | 2692 | |
1d65f45c RH |
2693 | lp_nr = lookup_stmt_eh_lp (stmt); |
2694 | return lp_nr > 0; | |
2695 | } | |
2696 | ||
2697 | /* Given a statement STMT in IFUN, if STMT can no longer throw, then | |
2698 | remove any entry it might have from the EH table. Return true if | |
2699 | any change was made. */ | |
2700 | ||
2701 | bool | |
2702 | maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt) | |
2703 | { | |
2704 | if (stmt_could_throw_p (stmt)) | |
2705 | return false; | |
2706 | return remove_stmt_from_eh_lp_fn (ifun, stmt); | |
6de9cd9a DN |
2707 | } |
2708 | ||
1d65f45c RH |
2709 | /* Likewise, but always use the current function. */ |
2710 | ||
2711 | bool | |
2712 | maybe_clean_eh_stmt (gimple stmt) | |
2713 | { | |
2714 | return maybe_clean_eh_stmt_fn (cfun, stmt); | |
2715 | } | |
6de9cd9a | 2716 | |
af47810a RH |
2717 | /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced |
2718 | OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT | |
2719 | in the table if it should be in there. Return TRUE if a replacement was | |
2720 | done that my require an EH edge purge. */ | |
2721 | ||
1d65f45c RH |
2722 | bool |
2723 | maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt) | |
1eaba2f2 | 2724 | { |
1d65f45c | 2725 | int lp_nr = lookup_stmt_eh_lp (old_stmt); |
af47810a | 2726 | |
1d65f45c | 2727 | if (lp_nr != 0) |
af47810a | 2728 | { |
726a989a | 2729 | bool new_stmt_could_throw = stmt_could_throw_p (new_stmt); |
af47810a RH |
2730 | |
2731 | if (new_stmt == old_stmt && new_stmt_could_throw) | |
2732 | return false; | |
2733 | ||
1d65f45c | 2734 | remove_stmt_from_eh_lp (old_stmt); |
af47810a RH |
2735 | if (new_stmt_could_throw) |
2736 | { | |
1d65f45c | 2737 | add_stmt_to_eh_lp (new_stmt, lp_nr); |
af47810a RH |
2738 | return false; |
2739 | } | |
2740 | else | |
2741 | return true; | |
2742 | } | |
2743 | ||
1eaba2f2 RH |
2744 | return false; |
2745 | } | |
1d65f45c | 2746 | |
073a8998 | 2747 | /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT |
1d65f45c RH |
2748 | in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP |
2749 | operand is the return value of duplicate_eh_regions. */ | |
2750 | ||
2751 | bool | |
2752 | maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt, | |
2753 | struct function *old_fun, gimple old_stmt, | |
2754 | struct pointer_map_t *map, int default_lp_nr) | |
2755 | { | |
2756 | int old_lp_nr, new_lp_nr; | |
2757 | void **slot; | |
2758 | ||
2759 | if (!stmt_could_throw_p (new_stmt)) | |
2760 | return false; | |
2761 | ||
2762 | old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt); | |
2763 | if (old_lp_nr == 0) | |
2764 | { | |
2765 | if (default_lp_nr == 0) | |
2766 | return false; | |
2767 | new_lp_nr = default_lp_nr; | |
2768 | } | |
2769 | else if (old_lp_nr > 0) | |
2770 | { | |
2771 | eh_landing_pad old_lp, new_lp; | |
2772 | ||
2773 | old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr); | |
2774 | slot = pointer_map_contains (map, old_lp); | |
2775 | new_lp = (eh_landing_pad) *slot; | |
2776 | new_lp_nr = new_lp->index; | |
2777 | } | |
2778 | else | |
2779 | { | |
2780 | eh_region old_r, new_r; | |
2781 | ||
2782 | old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr); | |
2783 | slot = pointer_map_contains (map, old_r); | |
2784 | new_r = (eh_region) *slot; | |
2785 | new_lp_nr = -new_r->index; | |
2786 | } | |
2787 | ||
2788 | add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr); | |
2789 | return true; | |
2790 | } | |
2791 | ||
2792 | /* Similar, but both OLD_STMT and NEW_STMT are within the current function, | |
2793 | and thus no remapping is required. */ | |
2794 | ||
2795 | bool | |
2796 | maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt) | |
2797 | { | |
2798 | int lp_nr; | |
2799 | ||
2800 | if (!stmt_could_throw_p (new_stmt)) | |
2801 | return false; | |
2802 | ||
2803 | lp_nr = lookup_stmt_eh_lp (old_stmt); | |
2804 | if (lp_nr == 0) | |
2805 | return false; | |
2806 | ||
2807 | add_stmt_to_eh_lp (new_stmt, lp_nr); | |
2808 | return true; | |
2809 | } | |
a24549d4 | 2810 | \f |
726a989a RB |
2811 | /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of |
2812 | GIMPLE_TRY) that are similar enough to be considered the same. Currently | |
2813 | this only handles handlers consisting of a single call, as that's the | |
2814 | important case for C++: a destructor call for a particular object showing | |
2815 | up in multiple handlers. */ | |
a24549d4 JM |
2816 | |
2817 | static bool | |
726a989a | 2818 | same_handler_p (gimple_seq oneh, gimple_seq twoh) |
a24549d4 | 2819 | { |
726a989a RB |
2820 | gimple_stmt_iterator gsi; |
2821 | gimple ones, twos; | |
2822 | unsigned int ai; | |
a24549d4 | 2823 | |
726a989a RB |
2824 | gsi = gsi_start (oneh); |
2825 | if (!gsi_one_before_end_p (gsi)) | |
a24549d4 | 2826 | return false; |
726a989a | 2827 | ones = gsi_stmt (gsi); |
a24549d4 | 2828 | |
726a989a RB |
2829 | gsi = gsi_start (twoh); |
2830 | if (!gsi_one_before_end_p (gsi)) | |
a24549d4 | 2831 | return false; |
726a989a RB |
2832 | twos = gsi_stmt (gsi); |
2833 | ||
2834 | if (!is_gimple_call (ones) | |
2835 | || !is_gimple_call (twos) | |
2836 | || gimple_call_lhs (ones) | |
2837 | || gimple_call_lhs (twos) | |
2838 | || gimple_call_chain (ones) | |
2839 | || gimple_call_chain (twos) | |
25583c4f | 2840 | || !gimple_call_same_target_p (ones, twos) |
726a989a | 2841 | || gimple_call_num_args (ones) != gimple_call_num_args (twos)) |
a24549d4 JM |
2842 | return false; |
2843 | ||
726a989a RB |
2844 | for (ai = 0; ai < gimple_call_num_args (ones); ++ai) |
2845 | if (!operand_equal_p (gimple_call_arg (ones, ai), | |
1d65f45c | 2846 | gimple_call_arg (twos, ai), 0)) |
a24549d4 JM |
2847 | return false; |
2848 | ||
2849 | return true; | |
2850 | } | |
2851 | ||
2852 | /* Optimize | |
2853 | try { A() } finally { try { ~B() } catch { ~A() } } | |
2854 | try { ... } finally { ~A() } | |
2855 | into | |
2856 | try { A() } catch { ~B() } | |
2857 | try { ~B() ... } finally { ~A() } | |
2858 | ||
2859 | This occurs frequently in C++, where A is a local variable and B is a | |
2860 | temporary used in the initializer for A. */ | |
2861 | ||
2862 | static void | |
726a989a | 2863 | optimize_double_finally (gimple one, gimple two) |
a24549d4 | 2864 | { |
726a989a RB |
2865 | gimple oneh; |
2866 | gimple_stmt_iterator gsi; | |
355a7673 | 2867 | gimple_seq cleanup; |
a24549d4 | 2868 | |
355a7673 MM |
2869 | cleanup = gimple_try_cleanup (one); |
2870 | gsi = gsi_start (cleanup); | |
726a989a | 2871 | if (!gsi_one_before_end_p (gsi)) |
a24549d4 JM |
2872 | return; |
2873 | ||
726a989a RB |
2874 | oneh = gsi_stmt (gsi); |
2875 | if (gimple_code (oneh) != GIMPLE_TRY | |
2876 | || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH) | |
a24549d4 JM |
2877 | return; |
2878 | ||
726a989a | 2879 | if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two))) |
a24549d4 | 2880 | { |
726a989a | 2881 | gimple_seq seq = gimple_try_eval (oneh); |
a24549d4 | 2882 | |
726a989a RB |
2883 | gimple_try_set_cleanup (one, seq); |
2884 | gimple_try_set_kind (one, GIMPLE_TRY_CATCH); | |
2885 | seq = copy_gimple_seq_and_replace_locals (seq); | |
2886 | gimple_seq_add_seq (&seq, gimple_try_eval (two)); | |
2887 | gimple_try_set_eval (two, seq); | |
a24549d4 JM |
2888 | } |
2889 | } | |
2890 | ||
2891 | /* Perform EH refactoring optimizations that are simpler to do when code | |
84fbffb2 | 2892 | flow has been lowered but EH structures haven't. */ |
a24549d4 JM |
2893 | |
2894 | static void | |
726a989a | 2895 | refactor_eh_r (gimple_seq seq) |
a24549d4 | 2896 | { |
726a989a RB |
2897 | gimple_stmt_iterator gsi; |
2898 | gimple one, two; | |
a24549d4 | 2899 | |
726a989a RB |
2900 | one = NULL; |
2901 | two = NULL; | |
2902 | gsi = gsi_start (seq); | |
2903 | while (1) | |
2904 | { | |
2905 | one = two; | |
2906 | if (gsi_end_p (gsi)) | |
2907 | two = NULL; | |
2908 | else | |
2909 | two = gsi_stmt (gsi); | |
2910 | if (one | |
2911 | && two | |
2912 | && gimple_code (one) == GIMPLE_TRY | |
2913 | && gimple_code (two) == GIMPLE_TRY | |
2914 | && gimple_try_kind (one) == GIMPLE_TRY_FINALLY | |
2915 | && gimple_try_kind (two) == GIMPLE_TRY_FINALLY) | |
2916 | optimize_double_finally (one, two); | |
2917 | if (one) | |
2918 | switch (gimple_code (one)) | |
a24549d4 | 2919 | { |
726a989a RB |
2920 | case GIMPLE_TRY: |
2921 | refactor_eh_r (gimple_try_eval (one)); | |
2922 | refactor_eh_r (gimple_try_cleanup (one)); | |
2923 | break; | |
2924 | case GIMPLE_CATCH: | |
2925 | refactor_eh_r (gimple_catch_handler (one)); | |
2926 | break; | |
2927 | case GIMPLE_EH_FILTER: | |
2928 | refactor_eh_r (gimple_eh_filter_failure (one)); | |
2929 | break; | |
0a35513e AH |
2930 | case GIMPLE_EH_ELSE: |
2931 | refactor_eh_r (gimple_eh_else_n_body (one)); | |
2932 | refactor_eh_r (gimple_eh_else_e_body (one)); | |
2933 | break; | |
726a989a RB |
2934 | default: |
2935 | break; | |
a24549d4 | 2936 | } |
726a989a RB |
2937 | if (two) |
2938 | gsi_next (&gsi); | |
2939 | else | |
2940 | break; | |
a24549d4 JM |
2941 | } |
2942 | } | |
2943 | ||
2944 | static unsigned | |
2945 | refactor_eh (void) | |
2946 | { | |
726a989a | 2947 | refactor_eh_r (gimple_body (current_function_decl)); |
a24549d4 JM |
2948 | return 0; |
2949 | } | |
2950 | ||
1d65f45c RH |
2951 | static bool |
2952 | gate_refactor_eh (void) | |
2953 | { | |
2954 | return flag_exceptions != 0; | |
2955 | } | |
2956 | ||
8ddbbcae | 2957 | struct gimple_opt_pass pass_refactor_eh = |
a24549d4 | 2958 | { |
8ddbbcae JH |
2959 | { |
2960 | GIMPLE_PASS, | |
a24549d4 | 2961 | "ehopt", /* name */ |
1d65f45c | 2962 | gate_refactor_eh, /* gate */ |
a24549d4 JM |
2963 | refactor_eh, /* execute */ |
2964 | NULL, /* sub */ | |
2965 | NULL, /* next */ | |
2966 | 0, /* static_pass_number */ | |
2967 | TV_TREE_EH, /* tv_id */ | |
2968 | PROP_gimple_lcf, /* properties_required */ | |
2969 | 0, /* properties_provided */ | |
2970 | 0, /* properties_destroyed */ | |
2971 | 0, /* todo_flags_start */ | |
22c5fa5f | 2972 | 0 /* todo_flags_finish */ |
8ddbbcae | 2973 | } |
a24549d4 | 2974 | }; |
1d65f45c RH |
2975 | \f |
2976 | /* At the end of gimple optimization, we can lower RESX. */ | |
a8da523f | 2977 | |
1d65f45c RH |
2978 | static bool |
2979 | lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map) | |
a8da523f | 2980 | { |
1d65f45c RH |
2981 | int lp_nr; |
2982 | eh_region src_r, dst_r; | |
2983 | gimple_stmt_iterator gsi; | |
2984 | gimple x; | |
2985 | tree fn, src_nr; | |
2986 | bool ret = false; | |
a8da523f | 2987 | |
1d65f45c RH |
2988 | lp_nr = lookup_stmt_eh_lp (stmt); |
2989 | if (lp_nr != 0) | |
2990 | dst_r = get_eh_region_from_lp_number (lp_nr); | |
2991 | else | |
2992 | dst_r = NULL; | |
a8da523f | 2993 | |
1d65f45c | 2994 | src_r = get_eh_region_from_number (gimple_resx_region (stmt)); |
1d65f45c | 2995 | gsi = gsi_last_bb (bb); |
a8da523f | 2996 | |
072c87d1 RH |
2997 | if (src_r == NULL) |
2998 | { | |
2999 | /* We can wind up with no source region when pass_cleanup_eh shows | |
3000 | that there are no entries into an eh region and deletes it, but | |
3001 | then the block that contains the resx isn't removed. This can | |
3002 | happen without optimization when the switch statement created by | |
3003 | lower_try_finally_switch isn't simplified to remove the eh case. | |
3004 | ||
3005 | Resolve this by expanding the resx node to an abort. */ | |
3006 | ||
e79983f4 | 3007 | fn = builtin_decl_implicit (BUILT_IN_TRAP); |
072c87d1 RH |
3008 | x = gimple_build_call (fn, 0); |
3009 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3010 | ||
3011 | while (EDGE_COUNT (bb->succs) > 0) | |
3012 | remove_edge (EDGE_SUCC (bb, 0)); | |
3013 | } | |
3014 | else if (dst_r) | |
1d65f45c RH |
3015 | { |
3016 | /* When we have a destination region, we resolve this by copying | |
3017 | the excptr and filter values into place, and changing the edge | |
3018 | to immediately after the landing pad. */ | |
3019 | edge e; | |
a8da523f | 3020 | |
1d65f45c RH |
3021 | if (lp_nr < 0) |
3022 | { | |
3023 | basic_block new_bb; | |
3024 | void **slot; | |
3025 | tree lab; | |
496a4ef5 | 3026 | |
1d65f45c RH |
3027 | /* We are resuming into a MUST_NOT_CALL region. Expand a call to |
3028 | the failure decl into a new block, if needed. */ | |
3029 | gcc_assert (dst_r->type == ERT_MUST_NOT_THROW); | |
a8da523f | 3030 | |
1d65f45c RH |
3031 | slot = pointer_map_contains (mnt_map, dst_r); |
3032 | if (slot == NULL) | |
3033 | { | |
3034 | gimple_stmt_iterator gsi2; | |
a8da523f | 3035 | |
1d65f45c | 3036 | new_bb = create_empty_bb (bb); |
7d776ee2 RG |
3037 | if (current_loops) |
3038 | add_bb_to_loop (new_bb, bb->loop_father); | |
1d65f45c RH |
3039 | lab = gimple_block_label (new_bb); |
3040 | gsi2 = gsi_start_bb (new_bb); | |
a8da523f | 3041 | |
1d65f45c RH |
3042 | fn = dst_r->u.must_not_throw.failure_decl; |
3043 | x = gimple_build_call (fn, 0); | |
3044 | gimple_set_location (x, dst_r->u.must_not_throw.failure_loc); | |
3045 | gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING); | |
4e6d1743 | 3046 | |
1d65f45c RH |
3047 | slot = pointer_map_insert (mnt_map, dst_r); |
3048 | *slot = lab; | |
3049 | } | |
3050 | else | |
3051 | { | |
3052 | lab = (tree) *slot; | |
3053 | new_bb = label_to_block (lab); | |
3054 | } | |
a8da523f | 3055 | |
1d65f45c RH |
3056 | gcc_assert (EDGE_COUNT (bb->succs) == 0); |
3057 | e = make_edge (bb, new_bb, EDGE_FALLTHRU); | |
3058 | e->count = bb->count; | |
3059 | e->probability = REG_BR_PROB_BASE; | |
3060 | } | |
3061 | else | |
3062 | { | |
3063 | edge_iterator ei; | |
413581ba | 3064 | tree dst_nr = build_int_cst (integer_type_node, dst_r->index); |
a8da523f | 3065 | |
e79983f4 | 3066 | fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES); |
413581ba | 3067 | src_nr = build_int_cst (integer_type_node, src_r->index); |
1d65f45c RH |
3068 | x = gimple_build_call (fn, 2, dst_nr, src_nr); |
3069 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
a8da523f | 3070 | |
1d65f45c RH |
3071 | /* Update the flags for the outgoing edge. */ |
3072 | e = single_succ_edge (bb); | |
3073 | gcc_assert (e->flags & EDGE_EH); | |
3074 | e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU; | |
a8da523f | 3075 | |
1d65f45c RH |
3076 | /* If there are no more EH users of the landing pad, delete it. */ |
3077 | FOR_EACH_EDGE (e, ei, e->dest->preds) | |
3078 | if (e->flags & EDGE_EH) | |
3079 | break; | |
3080 | if (e == NULL) | |
3081 | { | |
3082 | eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr); | |
3083 | remove_eh_landing_pad (lp); | |
3084 | } | |
3085 | } | |
a8da523f | 3086 | |
1d65f45c RH |
3087 | ret = true; |
3088 | } | |
3089 | else | |
3090 | { | |
3091 | tree var; | |
a8da523f | 3092 | |
1d65f45c RH |
3093 | /* When we don't have a destination region, this exception escapes |
3094 | up the call chain. We resolve this by generating a call to the | |
3095 | _Unwind_Resume library function. */ | |
a8da523f | 3096 | |
384c400a | 3097 | /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup |
1d65f45c | 3098 | with no arguments for C++ and Java. Check for that. */ |
384c400a RH |
3099 | if (src_r->use_cxa_end_cleanup) |
3100 | { | |
e79983f4 | 3101 | fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP); |
384c400a RH |
3102 | x = gimple_build_call (fn, 0); |
3103 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3104 | } | |
3105 | else | |
4e6d1743 | 3106 | { |
e79983f4 | 3107 | fn = builtin_decl_implicit (BUILT_IN_EH_POINTER); |
413581ba | 3108 | src_nr = build_int_cst (integer_type_node, src_r->index); |
1d65f45c RH |
3109 | x = gimple_build_call (fn, 1, src_nr); |
3110 | var = create_tmp_var (ptr_type_node, NULL); | |
3111 | var = make_ssa_name (var, x); | |
3112 | gimple_call_set_lhs (x, var); | |
3113 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3114 | ||
e79983f4 | 3115 | fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME); |
1d65f45c RH |
3116 | x = gimple_build_call (fn, 1, var); |
3117 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
4e6d1743 | 3118 | } |
a8da523f | 3119 | |
1d65f45c | 3120 | gcc_assert (EDGE_COUNT (bb->succs) == 0); |
4e6d1743 | 3121 | } |
496a4ef5 | 3122 | |
1d65f45c RH |
3123 | gsi_remove (&gsi, true); |
3124 | ||
3125 | return ret; | |
4e6d1743 JH |
3126 | } |
3127 | ||
1d65f45c RH |
3128 | static unsigned |
3129 | execute_lower_resx (void) | |
3130 | { | |
3131 | basic_block bb; | |
3132 | struct pointer_map_t *mnt_map; | |
3133 | bool dominance_invalidated = false; | |
3134 | bool any_rewritten = false; | |
4e6d1743 | 3135 | |
1d65f45c | 3136 | mnt_map = pointer_map_create (); |
4e6d1743 | 3137 | |
1d65f45c RH |
3138 | FOR_EACH_BB (bb) |
3139 | { | |
3140 | gimple last = last_stmt (bb); | |
3141 | if (last && is_gimple_resx (last)) | |
3142 | { | |
3143 | dominance_invalidated |= lower_resx (bb, last, mnt_map); | |
3144 | any_rewritten = true; | |
3145 | } | |
3146 | } | |
3147 | ||
3148 | pointer_map_destroy (mnt_map); | |
3149 | ||
3150 | if (dominance_invalidated) | |
3151 | { | |
3152 | free_dominance_info (CDI_DOMINATORS); | |
3153 | free_dominance_info (CDI_POST_DOMINATORS); | |
4e6d1743 | 3154 | } |
a8da523f | 3155 | |
1d65f45c RH |
3156 | return any_rewritten ? TODO_update_ssa_only_virtuals : 0; |
3157 | } | |
a8da523f | 3158 | |
1d65f45c | 3159 | static bool |
072c87d1 | 3160 | gate_lower_resx (void) |
1d65f45c | 3161 | { |
072c87d1 | 3162 | return flag_exceptions != 0; |
1d65f45c | 3163 | } |
4e6d1743 | 3164 | |
1d65f45c | 3165 | struct gimple_opt_pass pass_lower_resx = |
4e6d1743 | 3166 | { |
1d65f45c RH |
3167 | { |
3168 | GIMPLE_PASS, | |
3169 | "resx", /* name */ | |
072c87d1 | 3170 | gate_lower_resx, /* gate */ |
1d65f45c RH |
3171 | execute_lower_resx, /* execute */ |
3172 | NULL, /* sub */ | |
3173 | NULL, /* next */ | |
3174 | 0, /* static_pass_number */ | |
3175 | TV_TREE_EH, /* tv_id */ | |
3176 | PROP_gimple_lcf, /* properties_required */ | |
3177 | 0, /* properties_provided */ | |
3178 | 0, /* properties_destroyed */ | |
3179 | 0, /* todo_flags_start */ | |
22c5fa5f | 3180 | TODO_verify_flow /* todo_flags_finish */ |
1d65f45c | 3181 | } |
4e6d1743 JH |
3182 | }; |
3183 | ||
960f0c9d JJ |
3184 | /* Try to optimize var = {v} {CLOBBER} stmts followed just by |
3185 | external throw. */ | |
3186 | ||
3187 | static void | |
3188 | optimize_clobbers (basic_block bb) | |
3189 | { | |
3190 | gimple_stmt_iterator gsi = gsi_last_bb (bb); | |
6d1c2bd3 | 3191 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) |
960f0c9d JJ |
3192 | { |
3193 | gimple stmt = gsi_stmt (gsi); | |
3194 | if (is_gimple_debug (stmt)) | |
6d1c2bd3 JJ |
3195 | continue; |
3196 | if (!gimple_clobber_p (stmt) | |
3197 | || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME) | |
960f0c9d JJ |
3198 | return; |
3199 | unlink_stmt_vdef (stmt); | |
3200 | gsi_remove (&gsi, true); | |
3201 | release_defs (stmt); | |
3202 | } | |
3203 | } | |
1d65f45c | 3204 | |
ea85edfe JJ |
3205 | /* Try to sink var = {v} {CLOBBER} stmts followed just by |
3206 | internal throw to successor BB. */ | |
3207 | ||
3208 | static int | |
3209 | sink_clobbers (basic_block bb) | |
3210 | { | |
3211 | edge e; | |
3212 | edge_iterator ei; | |
3213 | gimple_stmt_iterator gsi, dgsi; | |
3214 | basic_block succbb; | |
3215 | bool any_clobbers = false; | |
3216 | ||
3217 | /* Only optimize if BB has a single EH successor and | |
3218 | all predecessor edges are EH too. */ | |
3219 | if (!single_succ_p (bb) | |
3220 | || (single_succ_edge (bb)->flags & EDGE_EH) == 0) | |
3221 | return 0; | |
3222 | ||
3223 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3224 | { | |
3225 | if ((e->flags & EDGE_EH) == 0) | |
3226 | return 0; | |
3227 | } | |
3228 | ||
3229 | /* And BB contains only CLOBBER stmts before the final | |
3230 | RESX. */ | |
3231 | gsi = gsi_last_bb (bb); | |
3232 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3233 | { | |
3234 | gimple stmt = gsi_stmt (gsi); | |
3235 | if (is_gimple_debug (stmt)) | |
3236 | continue; | |
3237 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3238 | break; | |
3239 | if (!gimple_clobber_p (stmt) | |
3240 | || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME) | |
3241 | return 0; | |
3242 | any_clobbers = true; | |
3243 | } | |
3244 | if (!any_clobbers) | |
3245 | return 0; | |
3246 | ||
3247 | succbb = single_succ (bb); | |
3248 | dgsi = gsi_after_labels (succbb); | |
3249 | gsi = gsi_last_bb (bb); | |
3250 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3251 | { | |
3252 | gimple stmt = gsi_stmt (gsi); | |
ea85edfe JJ |
3253 | if (is_gimple_debug (stmt)) |
3254 | continue; | |
3255 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3256 | break; | |
3257 | unlink_stmt_vdef (stmt); | |
3258 | gsi_remove (&gsi, false); | |
525174a2 RG |
3259 | /* Trigger the operand scanner to cause renaming for virtual |
3260 | operands for this statement. | |
3261 | ??? Given the simple structure of this code manually | |
3262 | figuring out the reaching definition should not be too hard. */ | |
3263 | if (gimple_vuse (stmt)) | |
3264 | gimple_set_vuse (stmt, NULL_TREE); | |
ea85edfe JJ |
3265 | gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT); |
3266 | } | |
3267 | ||
3268 | return TODO_update_ssa_only_virtuals; | |
3269 | } | |
3270 | ||
9f698956 AB |
3271 | /* At the end of inlining, we can lower EH_DISPATCH. Return true when |
3272 | we have found some duplicate labels and removed some edges. */ | |
4e6d1743 | 3273 | |
9f698956 | 3274 | static bool |
1d65f45c | 3275 | lower_eh_dispatch (basic_block src, gimple stmt) |
4e6d1743 | 3276 | { |
1d65f45c RH |
3277 | gimple_stmt_iterator gsi; |
3278 | int region_nr; | |
3279 | eh_region r; | |
3280 | tree filter, fn; | |
3281 | gimple x; | |
9f698956 | 3282 | bool redirected = false; |
4e6d1743 | 3283 | |
1d65f45c RH |
3284 | region_nr = gimple_eh_dispatch_region (stmt); |
3285 | r = get_eh_region_from_number (region_nr); | |
4e6d1743 | 3286 | |
1d65f45c | 3287 | gsi = gsi_last_bb (src); |
4e6d1743 | 3288 | |
1d65f45c | 3289 | switch (r->type) |
4e6d1743 | 3290 | { |
1d65f45c RH |
3291 | case ERT_TRY: |
3292 | { | |
3293 | VEC (tree, heap) *labels = NULL; | |
3294 | tree default_label = NULL; | |
3295 | eh_catch c; | |
3296 | edge_iterator ei; | |
3297 | edge e; | |
9f698956 | 3298 | struct pointer_set_t *seen_values = pointer_set_create (); |
1d65f45c RH |
3299 | |
3300 | /* Collect the labels for a switch. Zero the post_landing_pad | |
3301 | field becase we'll no longer have anything keeping these labels | |
073a8998 | 3302 | in existence and the optimizer will be free to merge these |
1d65f45c RH |
3303 | blocks at will. */ |
3304 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
3305 | { | |
3306 | tree tp_node, flt_node, lab = c->label; | |
9f698956 | 3307 | bool have_label = false; |
4e6d1743 | 3308 | |
1d65f45c RH |
3309 | c->label = NULL; |
3310 | tp_node = c->type_list; | |
3311 | flt_node = c->filter_list; | |
3312 | ||
3313 | if (tp_node == NULL) | |
3314 | { | |
3315 | default_label = lab; | |
3316 | break; | |
3317 | } | |
3318 | do | |
3319 | { | |
9f698956 AB |
3320 | /* Filter out duplicate labels that arise when this handler |
3321 | is shadowed by an earlier one. When no labels are | |
3322 | attached to the handler anymore, we remove | |
3323 | the corresponding edge and then we delete unreachable | |
3324 | blocks at the end of this pass. */ | |
3325 | if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node))) | |
3326 | { | |
3d528853 NF |
3327 | tree t = build_case_label (TREE_VALUE (flt_node), |
3328 | NULL, lab); | |
9f698956 AB |
3329 | VEC_safe_push (tree, heap, labels, t); |
3330 | pointer_set_insert (seen_values, TREE_VALUE (flt_node)); | |
3331 | have_label = true; | |
3332 | } | |
1d65f45c RH |
3333 | |
3334 | tp_node = TREE_CHAIN (tp_node); | |
3335 | flt_node = TREE_CHAIN (flt_node); | |
3336 | } | |
3337 | while (tp_node); | |
9f698956 AB |
3338 | if (! have_label) |
3339 | { | |
3340 | remove_edge (find_edge (src, label_to_block (lab))); | |
3341 | redirected = true; | |
3342 | } | |
1d65f45c RH |
3343 | } |
3344 | ||
3345 | /* Clean up the edge flags. */ | |
3346 | FOR_EACH_EDGE (e, ei, src->succs) | |
3347 | { | |
3348 | if (e->flags & EDGE_FALLTHRU) | |
3349 | { | |
3350 | /* If there was no catch-all, use the fallthru edge. */ | |
3351 | if (default_label == NULL) | |
3352 | default_label = gimple_block_label (e->dest); | |
3353 | e->flags &= ~EDGE_FALLTHRU; | |
3354 | } | |
3355 | } | |
3356 | gcc_assert (default_label != NULL); | |
3357 | ||
3358 | /* Don't generate a switch if there's only a default case. | |
3359 | This is common in the form of try { A; } catch (...) { B; }. */ | |
3360 | if (labels == NULL) | |
3361 | { | |
3362 | e = single_succ_edge (src); | |
3363 | e->flags |= EDGE_FALLTHRU; | |
3364 | } | |
3365 | else | |
3366 | { | |
e79983f4 | 3367 | fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
413581ba RG |
3368 | x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3369 | region_nr)); | |
1d65f45c RH |
3370 | filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL); |
3371 | filter = make_ssa_name (filter, x); | |
3372 | gimple_call_set_lhs (x, filter); | |
3373 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3374 | ||
3375 | /* Turn the default label into a default case. */ | |
3d528853 | 3376 | default_label = build_case_label (NULL, NULL, default_label); |
1d65f45c RH |
3377 | sort_case_labels (labels); |
3378 | ||
3379 | x = gimple_build_switch_vec (filter, default_label, labels); | |
3380 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3381 | ||
3382 | VEC_free (tree, heap, labels); | |
3383 | } | |
9f698956 | 3384 | pointer_set_destroy (seen_values); |
1d65f45c RH |
3385 | } |
3386 | break; | |
3387 | ||
3388 | case ERT_ALLOWED_EXCEPTIONS: | |
3389 | { | |
3390 | edge b_e = BRANCH_EDGE (src); | |
3391 | edge f_e = FALLTHRU_EDGE (src); | |
3392 | ||
e79983f4 | 3393 | fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
413581ba RG |
3394 | x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3395 | region_nr)); | |
1d65f45c RH |
3396 | filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL); |
3397 | filter = make_ssa_name (filter, x); | |
3398 | gimple_call_set_lhs (x, filter); | |
3399 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3400 | ||
3401 | r->u.allowed.label = NULL; | |
3402 | x = gimple_build_cond (EQ_EXPR, filter, | |
3403 | build_int_cst (TREE_TYPE (filter), | |
3404 | r->u.allowed.filter), | |
3405 | NULL_TREE, NULL_TREE); | |
3406 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3407 | ||
3408 | b_e->flags = b_e->flags | EDGE_TRUE_VALUE; | |
3409 | f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE; | |
3410 | } | |
3411 | break; | |
3412 | ||
3413 | default: | |
3414 | gcc_unreachable (); | |
4e6d1743 | 3415 | } |
1d65f45c RH |
3416 | |
3417 | /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */ | |
3418 | gsi_remove (&gsi, true); | |
9f698956 | 3419 | return redirected; |
4e6d1743 JH |
3420 | } |
3421 | ||
1d65f45c RH |
3422 | static unsigned |
3423 | execute_lower_eh_dispatch (void) | |
3424 | { | |
3425 | basic_block bb; | |
ea85edfe | 3426 | int flags = 0; |
9f698956 | 3427 | bool redirected = false; |
4e6d1743 | 3428 | |
1d65f45c | 3429 | assign_filter_values (); |
496a4ef5 | 3430 | |
1d65f45c RH |
3431 | FOR_EACH_BB (bb) |
3432 | { | |
3433 | gimple last = last_stmt (bb); | |
960f0c9d JJ |
3434 | if (last == NULL) |
3435 | continue; | |
3436 | if (gimple_code (last) == GIMPLE_EH_DISPATCH) | |
1d65f45c | 3437 | { |
9f698956 | 3438 | redirected |= lower_eh_dispatch (bb, last); |
ea85edfe JJ |
3439 | flags |= TODO_update_ssa_only_virtuals; |
3440 | } | |
3441 | else if (gimple_code (last) == GIMPLE_RESX) | |
3442 | { | |
3443 | if (stmt_can_throw_external (last)) | |
3444 | optimize_clobbers (bb); | |
3445 | else | |
3446 | flags |= sink_clobbers (bb); | |
1d65f45c RH |
3447 | } |
3448 | } | |
3449 | ||
9f698956 AB |
3450 | if (redirected) |
3451 | delete_unreachable_blocks (); | |
ea85edfe | 3452 | return flags; |
1d65f45c RH |
3453 | } |
3454 | ||
072c87d1 RH |
3455 | static bool |
3456 | gate_lower_eh_dispatch (void) | |
3457 | { | |
1f9081d1 | 3458 | return cfun->eh->region_tree != NULL; |
072c87d1 RH |
3459 | } |
3460 | ||
1d65f45c | 3461 | struct gimple_opt_pass pass_lower_eh_dispatch = |
4e6d1743 | 3462 | { |
1d65f45c RH |
3463 | { |
3464 | GIMPLE_PASS, | |
3465 | "ehdisp", /* name */ | |
072c87d1 | 3466 | gate_lower_eh_dispatch, /* gate */ |
1d65f45c RH |
3467 | execute_lower_eh_dispatch, /* execute */ |
3468 | NULL, /* sub */ | |
3469 | NULL, /* next */ | |
3470 | 0, /* static_pass_number */ | |
3471 | TV_TREE_EH, /* tv_id */ | |
3472 | PROP_gimple_lcf, /* properties_required */ | |
3473 | 0, /* properties_provided */ | |
3474 | 0, /* properties_destroyed */ | |
3475 | 0, /* todo_flags_start */ | |
22c5fa5f | 3476 | TODO_verify_flow /* todo_flags_finish */ |
1d65f45c RH |
3477 | } |
3478 | }; | |
3479 | \f | |
3480 | /* Walk statements, see what regions are really referenced and remove | |
3481 | those that are unused. */ | |
3482 | ||
3483 | static void | |
3484 | remove_unreachable_handlers (void) | |
3485 | { | |
3486 | sbitmap r_reachable, lp_reachable; | |
3487 | eh_region region; | |
3488 | eh_landing_pad lp; | |
3489 | basic_block bb; | |
3490 | int lp_nr, r_nr; | |
4e6d1743 | 3491 | |
1d65f45c RH |
3492 | r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array)); |
3493 | lp_reachable | |
3494 | = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array)); | |
3495 | sbitmap_zero (r_reachable); | |
3496 | sbitmap_zero (lp_reachable); | |
4e6d1743 | 3497 | |
1d65f45c | 3498 | FOR_EACH_BB (bb) |
4e6d1743 | 3499 | { |
57f93411 | 3500 | gimple_stmt_iterator gsi; |
1d65f45c RH |
3501 | |
3502 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3503 | { | |
3504 | gimple stmt = gsi_stmt (gsi); | |
3505 | lp_nr = lookup_stmt_eh_lp (stmt); | |
3506 | ||
3507 | /* Negative LP numbers are MUST_NOT_THROW regions which | |
3508 | are not considered BB enders. */ | |
3509 | if (lp_nr < 0) | |
3510 | SET_BIT (r_reachable, -lp_nr); | |
3511 | ||
3512 | /* Positive LP numbers are real landing pads, are are BB enders. */ | |
3513 | else if (lp_nr > 0) | |
3514 | { | |
3515 | gcc_assert (gsi_one_before_end_p (gsi)); | |
3516 | region = get_eh_region_from_lp_number (lp_nr); | |
3517 | SET_BIT (r_reachable, region->index); | |
3518 | SET_BIT (lp_reachable, lp_nr); | |
3519 | } | |
6ae70ea2 JJ |
3520 | |
3521 | /* Avoid removing regions referenced from RESX/EH_DISPATCH. */ | |
3522 | switch (gimple_code (stmt)) | |
3523 | { | |
3524 | case GIMPLE_RESX: | |
3525 | SET_BIT (r_reachable, gimple_resx_region (stmt)); | |
3526 | break; | |
3527 | case GIMPLE_EH_DISPATCH: | |
3528 | SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt)); | |
3529 | break; | |
3530 | default: | |
3531 | break; | |
3532 | } | |
1d65f45c | 3533 | } |
4e6d1743 | 3534 | } |
1d65f45c RH |
3535 | |
3536 | if (dump_file) | |
4e6d1743 | 3537 | { |
1d65f45c RH |
3538 | fprintf (dump_file, "Before removal of unreachable regions:\n"); |
3539 | dump_eh_tree (dump_file, cfun); | |
3540 | fprintf (dump_file, "Reachable regions: "); | |
3541 | dump_sbitmap_file (dump_file, r_reachable); | |
3542 | fprintf (dump_file, "Reachable landing pads: "); | |
3543 | dump_sbitmap_file (dump_file, lp_reachable); | |
4e6d1743 JH |
3544 | } |
3545 | ||
1d65f45c RH |
3546 | for (r_nr = 1; |
3547 | VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr) | |
3548 | if (region && !TEST_BIT (r_reachable, r_nr)) | |
3549 | { | |
3550 | if (dump_file) | |
3551 | fprintf (dump_file, "Removing unreachable region %d\n", r_nr); | |
3552 | remove_eh_handler (region); | |
3553 | } | |
4e6d1743 | 3554 | |
1d65f45c RH |
3555 | for (lp_nr = 1; |
3556 | VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr) | |
3557 | if (lp && !TEST_BIT (lp_reachable, lp_nr)) | |
3558 | { | |
3559 | if (dump_file) | |
3560 | fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr); | |
3561 | remove_eh_landing_pad (lp); | |
3562 | } | |
b8698a0f | 3563 | |
1d65f45c | 3564 | if (dump_file) |
4e6d1743 | 3565 | { |
1d65f45c RH |
3566 | fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n"); |
3567 | dump_eh_tree (dump_file, cfun); | |
3568 | fprintf (dump_file, "\n\n"); | |
4e6d1743 JH |
3569 | } |
3570 | ||
1d65f45c RH |
3571 | sbitmap_free (r_reachable); |
3572 | sbitmap_free (lp_reachable); | |
3573 | ||
3574 | #ifdef ENABLE_CHECKING | |
3575 | verify_eh_tree (cfun); | |
3576 | #endif | |
3577 | } | |
3578 | ||
99d8763e JJ |
3579 | /* Remove unreachable handlers if any landing pads have been removed after |
3580 | last ehcleanup pass (due to gimple_purge_dead_eh_edges). */ | |
3581 | ||
3582 | void | |
3583 | maybe_remove_unreachable_handlers (void) | |
3584 | { | |
3585 | eh_landing_pad lp; | |
3586 | int i; | |
3587 | ||
3588 | if (cfun->eh == NULL) | |
3589 | return; | |
3590 | ||
3591 | for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) | |
3592 | if (lp && lp->post_landing_pad) | |
3593 | { | |
3594 | if (label_to_block (lp->post_landing_pad) == NULL) | |
3595 | { | |
3596 | remove_unreachable_handlers (); | |
3597 | return; | |
3598 | } | |
3599 | } | |
3600 | } | |
3601 | ||
1d65f45c RH |
3602 | /* Remove regions that do not have landing pads. This assumes |
3603 | that remove_unreachable_handlers has already been run, and | |
3604 | that we've just manipulated the landing pads since then. */ | |
3605 | ||
3606 | static void | |
3607 | remove_unreachable_handlers_no_lp (void) | |
3608 | { | |
3609 | eh_region r; | |
3610 | int i; | |
1a47f99c MM |
3611 | sbitmap r_reachable; |
3612 | basic_block bb; | |
3613 | ||
3614 | r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array)); | |
3615 | sbitmap_zero (r_reachable); | |
3616 | ||
3617 | FOR_EACH_BB (bb) | |
3618 | { | |
3619 | gimple stmt = last_stmt (bb); | |
3620 | if (stmt) | |
3621 | /* Avoid removing regions referenced from RESX/EH_DISPATCH. */ | |
3622 | switch (gimple_code (stmt)) | |
3623 | { | |
3624 | case GIMPLE_RESX: | |
3625 | SET_BIT (r_reachable, gimple_resx_region (stmt)); | |
3626 | break; | |
3627 | case GIMPLE_EH_DISPATCH: | |
3628 | SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt)); | |
3629 | break; | |
3630 | default: | |
3631 | break; | |
3632 | } | |
3633 | } | |
1d65f45c RH |
3634 | |
3635 | for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i) | |
1a47f99c MM |
3636 | if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW |
3637 | && !TEST_BIT (r_reachable, i)) | |
1d65f45c RH |
3638 | { |
3639 | if (dump_file) | |
3640 | fprintf (dump_file, "Removing unreachable region %d\n", i); | |
3641 | remove_eh_handler (r); | |
3642 | } | |
1a47f99c MM |
3643 | |
3644 | sbitmap_free (r_reachable); | |
4e6d1743 JH |
3645 | } |
3646 | ||
1d65f45c RH |
3647 | /* Undo critical edge splitting on an EH landing pad. Earlier, we |
3648 | optimisticaly split all sorts of edges, including EH edges. The | |
3649 | optimization passes in between may not have needed them; if not, | |
3650 | we should undo the split. | |
3651 | ||
3652 | Recognize this case by having one EH edge incoming to the BB and | |
3653 | one normal edge outgoing; BB should be empty apart from the | |
3654 | post_landing_pad label. | |
3655 | ||
3656 | Note that this is slightly different from the empty handler case | |
3657 | handled by cleanup_empty_eh, in that the actual handler may yet | |
3658 | have actual code but the landing pad has been separated from the | |
3659 | handler. As such, cleanup_empty_eh relies on this transformation | |
3660 | having been done first. */ | |
a8da523f JH |
3661 | |
3662 | static bool | |
1d65f45c | 3663 | unsplit_eh (eh_landing_pad lp) |
a8da523f | 3664 | { |
1d65f45c RH |
3665 | basic_block bb = label_to_block (lp->post_landing_pad); |
3666 | gimple_stmt_iterator gsi; | |
3667 | edge e_in, e_out; | |
3668 | ||
3669 | /* Quickly check the edge counts on BB for singularity. */ | |
3670 | if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1) | |
3671 | return false; | |
3672 | e_in = EDGE_PRED (bb, 0); | |
3673 | e_out = EDGE_SUCC (bb, 0); | |
a8da523f | 3674 | |
1d65f45c RH |
3675 | /* Input edge must be EH and output edge must be normal. */ |
3676 | if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0) | |
3677 | return false; | |
3678 | ||
3333cd50 RG |
3679 | /* The block must be empty except for the labels and debug insns. */ |
3680 | gsi = gsi_after_labels (bb); | |
3681 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
3682 | gsi_next_nondebug (&gsi); | |
3683 | if (!gsi_end_p (gsi)) | |
1d65f45c RH |
3684 | return false; |
3685 | ||
3686 | /* The destination block must not already have a landing pad | |
3687 | for a different region. */ | |
3688 | for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
a8da523f | 3689 | { |
1d65f45c RH |
3690 | gimple stmt = gsi_stmt (gsi); |
3691 | tree lab; | |
3692 | int lp_nr; | |
a8da523f | 3693 | |
1d65f45c RH |
3694 | if (gimple_code (stmt) != GIMPLE_LABEL) |
3695 | break; | |
3696 | lab = gimple_label_label (stmt); | |
3697 | lp_nr = EH_LANDING_PAD_NR (lab); | |
3698 | if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | |
3699 | return false; | |
3700 | } | |
a8da523f | 3701 | |
f8fd49b5 RH |
3702 | /* The new destination block must not already be a destination of |
3703 | the source block, lest we merge fallthru and eh edges and get | |
3704 | all sorts of confused. */ | |
3705 | if (find_edge (e_in->src, e_out->dest)) | |
3706 | return false; | |
3707 | ||
d6063d7f RH |
3708 | /* ??? We can get degenerate phis due to cfg cleanups. I would have |
3709 | thought this should have been cleaned up by a phicprop pass, but | |
3710 | that doesn't appear to handle virtuals. Propagate by hand. */ | |
3711 | if (!gimple_seq_empty_p (phi_nodes (bb))) | |
3712 | { | |
3713 | for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); ) | |
3714 | { | |
3715 | gimple use_stmt, phi = gsi_stmt (gsi); | |
3716 | tree lhs = gimple_phi_result (phi); | |
3717 | tree rhs = gimple_phi_arg_def (phi, 0); | |
3718 | use_operand_p use_p; | |
3719 | imm_use_iterator iter; | |
3720 | ||
3721 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) | |
3722 | { | |
3723 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
3724 | SET_USE (use_p, rhs); | |
3725 | } | |
3726 | ||
3727 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
3728 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1; | |
3729 | ||
3730 | remove_phi_node (&gsi, true); | |
3731 | } | |
3732 | } | |
496a4ef5 | 3733 | |
1d65f45c RH |
3734 | if (dump_file && (dump_flags & TDF_DETAILS)) |
3735 | fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n", | |
3736 | lp->index, e_out->dest->index); | |
3737 | ||
3738 | /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving | |
3739 | a successor edge, humor it. But do the real CFG change with the | |
3740 | predecessor of E_OUT in order to preserve the ordering of arguments | |
3741 | to the PHI nodes in E_OUT->DEST. */ | |
3742 | redirect_eh_edge_1 (e_in, e_out->dest, false); | |
3743 | redirect_edge_pred (e_out, e_in->src); | |
3744 | e_out->flags = e_in->flags; | |
3745 | e_out->probability = e_in->probability; | |
3746 | e_out->count = e_in->count; | |
3747 | remove_edge (e_in); | |
496a4ef5 | 3748 | |
1d65f45c RH |
3749 | return true; |
3750 | } | |
496a4ef5 | 3751 | |
1d65f45c | 3752 | /* Examine each landing pad block and see if it matches unsplit_eh. */ |
496a4ef5 | 3753 | |
1d65f45c RH |
3754 | static bool |
3755 | unsplit_all_eh (void) | |
3756 | { | |
3757 | bool changed = false; | |
3758 | eh_landing_pad lp; | |
3759 | int i; | |
496a4ef5 | 3760 | |
1d65f45c RH |
3761 | for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) |
3762 | if (lp) | |
3763 | changed |= unsplit_eh (lp); | |
3764 | ||
3765 | return changed; | |
3766 | } | |
3767 | ||
3768 | /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming | |
3769 | to OLD_BB to NEW_BB; return true on success, false on failure. | |
3770 | ||
3771 | OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any | |
3772 | PHI variables from OLD_BB we can pick them up from OLD_BB_OUT. | |
3773 | Virtual PHIs may be deleted and marked for renaming. */ | |
3774 | ||
3775 | static bool | |
3776 | cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb, | |
d6063d7f | 3777 | edge old_bb_out, bool change_region) |
1d65f45c RH |
3778 | { |
3779 | gimple_stmt_iterator ngsi, ogsi; | |
3780 | edge_iterator ei; | |
3781 | edge e; | |
3782 | bitmap rename_virts; | |
3783 | bitmap ophi_handled; | |
3784 | ||
336ead04 JJ |
3785 | /* The destination block must not be a regular successor for any |
3786 | of the preds of the landing pad. Thus, avoid turning | |
3787 | <..> | |
3788 | | \ EH | |
3789 | | <..> | |
3790 | | / | |
3791 | <..> | |
3792 | into | |
3793 | <..> | |
3794 | | | EH | |
3795 | <..> | |
3796 | which CFG verification would choke on. See PR45172 and PR51089. */ | |
3797 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
3798 | if (find_edge (e->src, new_bb)) | |
3799 | return false; | |
3800 | ||
1d65f45c RH |
3801 | FOR_EACH_EDGE (e, ei, old_bb->preds) |
3802 | redirect_edge_var_map_clear (e); | |
3803 | ||
3804 | ophi_handled = BITMAP_ALLOC (NULL); | |
3805 | rename_virts = BITMAP_ALLOC (NULL); | |
3806 | ||
3807 | /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map | |
3808 | for the edges we're going to move. */ | |
3809 | for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi)) | |
3810 | { | |
3811 | gimple ophi, nphi = gsi_stmt (ngsi); | |
3812 | tree nresult, nop; | |
3813 | ||
3814 | nresult = gimple_phi_result (nphi); | |
3815 | nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx); | |
3816 | ||
3817 | /* Find the corresponding PHI in OLD_BB so we can forward-propagate | |
3818 | the source ssa_name. */ | |
3819 | ophi = NULL; | |
3820 | for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | |
3821 | { | |
3822 | ophi = gsi_stmt (ogsi); | |
3823 | if (gimple_phi_result (ophi) == nop) | |
3824 | break; | |
3825 | ophi = NULL; | |
a3710436 | 3826 | } |
496a4ef5 | 3827 | |
1d65f45c RH |
3828 | /* If we did find the corresponding PHI, copy those inputs. */ |
3829 | if (ophi) | |
a8da523f | 3830 | { |
3ffe07e1 JJ |
3831 | /* If NOP is used somewhere else beyond phis in new_bb, give up. */ |
3832 | if (!has_single_use (nop)) | |
3833 | { | |
3834 | imm_use_iterator imm_iter; | |
3835 | use_operand_p use_p; | |
3836 | ||
3837 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop) | |
3838 | { | |
3839 | if (!gimple_debug_bind_p (USE_STMT (use_p)) | |
3840 | && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI | |
3841 | || gimple_bb (USE_STMT (use_p)) != new_bb)) | |
3842 | goto fail; | |
3843 | } | |
3844 | } | |
1d65f45c RH |
3845 | bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop)); |
3846 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
496a4ef5 | 3847 | { |
1d65f45c RH |
3848 | location_t oloc; |
3849 | tree oop; | |
3850 | ||
3851 | if ((e->flags & EDGE_EH) == 0) | |
3852 | continue; | |
3853 | oop = gimple_phi_arg_def (ophi, e->dest_idx); | |
3854 | oloc = gimple_phi_arg_location (ophi, e->dest_idx); | |
9e227d60 | 3855 | redirect_edge_var_map_add (e, nresult, oop, oloc); |
496a4ef5 | 3856 | } |
1d65f45c RH |
3857 | } |
3858 | /* If we didn't find the PHI, but it's a VOP, remember to rename | |
3859 | it later, assuming all other tests succeed. */ | |
ea057359 | 3860 | else if (virtual_operand_p (nresult)) |
1d65f45c RH |
3861 | bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult)); |
3862 | /* If we didn't find the PHI, and it's a real variable, we know | |
3863 | from the fact that OLD_BB is tree_empty_eh_handler_p that the | |
3864 | variable is unchanged from input to the block and we can simply | |
3865 | re-use the input to NEW_BB from the OLD_BB_OUT edge. */ | |
3866 | else | |
3867 | { | |
3868 | location_t nloc | |
3869 | = gimple_phi_arg_location (nphi, old_bb_out->dest_idx); | |
3870 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
9e227d60 | 3871 | redirect_edge_var_map_add (e, nresult, nop, nloc); |
1d65f45c RH |
3872 | } |
3873 | } | |
3874 | ||
3875 | /* Second, verify that all PHIs from OLD_BB have been handled. If not, | |
3876 | we don't know what values from the other edges into NEW_BB to use. */ | |
3877 | for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | |
3878 | { | |
3879 | gimple ophi = gsi_stmt (ogsi); | |
3880 | tree oresult = gimple_phi_result (ophi); | |
3881 | if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult))) | |
3882 | goto fail; | |
3883 | } | |
3884 | ||
3885 | /* At this point we know that the merge will succeed. Remove the PHI | |
3886 | nodes for the virtuals that we want to rename. */ | |
3887 | if (!bitmap_empty_p (rename_virts)) | |
3888 | { | |
3889 | for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); ) | |
3890 | { | |
3891 | gimple nphi = gsi_stmt (ngsi); | |
3892 | tree nresult = gimple_phi_result (nphi); | |
3893 | if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult))) | |
496a4ef5 | 3894 | { |
1d65f45c RH |
3895 | mark_virtual_phi_result_for_renaming (nphi); |
3896 | remove_phi_node (&ngsi, true); | |
496a4ef5 JH |
3897 | } |
3898 | else | |
1d65f45c | 3899 | gsi_next (&ngsi); |
4e6d1743 | 3900 | } |
1d65f45c | 3901 | } |
4e6d1743 | 3902 | |
1d65f45c RH |
3903 | /* Finally, move the edges and update the PHIs. */ |
3904 | for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); ) | |
3905 | if (e->flags & EDGE_EH) | |
3906 | { | |
efa26eaa RG |
3907 | /* ??? CFG manipluation routines do not try to update loop |
3908 | form on edge redirection. Do so manually here for now. */ | |
3909 | /* If we redirect a loop entry or latch edge that will either create | |
3910 | a multiple entry loop or rotate the loop. If the loops merge | |
3911 | we may have created a loop with multiple latches. | |
3912 | All of this isn't easily fixed thus cancel the affected loop | |
3913 | and mark the other loop as possibly having multiple latches. */ | |
3914 | if (current_loops | |
3915 | && e->dest == e->dest->loop_father->header) | |
3916 | { | |
3917 | e->dest->loop_father->header = NULL; | |
3918 | e->dest->loop_father->latch = NULL; | |
3919 | new_bb->loop_father->latch = NULL; | |
3920 | loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES); | |
3921 | } | |
d6063d7f | 3922 | redirect_eh_edge_1 (e, new_bb, change_region); |
1d65f45c RH |
3923 | redirect_edge_succ (e, new_bb); |
3924 | flush_pending_stmts (e); | |
3925 | } | |
3926 | else | |
3927 | ei_next (&ei); | |
4e6d1743 | 3928 | |
1d65f45c RH |
3929 | BITMAP_FREE (ophi_handled); |
3930 | BITMAP_FREE (rename_virts); | |
3931 | return true; | |
3932 | ||
3933 | fail: | |
3934 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
3935 | redirect_edge_var_map_clear (e); | |
3936 | BITMAP_FREE (ophi_handled); | |
3937 | BITMAP_FREE (rename_virts); | |
3938 | return false; | |
3939 | } | |
3940 | ||
3941 | /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its | |
3942 | old region to NEW_REGION at BB. */ | |
3943 | ||
3944 | static void | |
3945 | cleanup_empty_eh_move_lp (basic_block bb, edge e_out, | |
3946 | eh_landing_pad lp, eh_region new_region) | |
3947 | { | |
3948 | gimple_stmt_iterator gsi; | |
3949 | eh_landing_pad *pp; | |
3950 | ||
3951 | for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp) | |
3952 | continue; | |
3953 | *pp = lp->next_lp; | |
3954 | ||
3955 | lp->region = new_region; | |
3956 | lp->next_lp = new_region->landing_pads; | |
3957 | new_region->landing_pads = lp; | |
3958 | ||
3959 | /* Delete the RESX that was matched within the empty handler block. */ | |
3960 | gsi = gsi_last_bb (bb); | |
3d3f2249 | 3961 | unlink_stmt_vdef (gsi_stmt (gsi)); |
1d65f45c RH |
3962 | gsi_remove (&gsi, true); |
3963 | ||
3964 | /* Clean up E_OUT for the fallthru. */ | |
3965 | e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU; | |
3966 | e_out->probability = REG_BR_PROB_BASE; | |
3967 | } | |
3968 | ||
3969 | /* A subroutine of cleanup_empty_eh. Handle more complex cases of | |
b8698a0f | 3970 | unsplitting than unsplit_eh was prepared to handle, e.g. when |
1d65f45c RH |
3971 | multiple incoming edges and phis are involved. */ |
3972 | ||
3973 | static bool | |
d6063d7f | 3974 | cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp) |
1d65f45c RH |
3975 | { |
3976 | gimple_stmt_iterator gsi; | |
1d65f45c RH |
3977 | tree lab; |
3978 | ||
3979 | /* We really ought not have totally lost everything following | |
3980 | a landing pad label. Given that BB is empty, there had better | |
3981 | be a successor. */ | |
3982 | gcc_assert (e_out != NULL); | |
3983 | ||
d6063d7f RH |
3984 | /* The destination block must not already have a landing pad |
3985 | for a different region. */ | |
1d65f45c RH |
3986 | lab = NULL; |
3987 | for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3988 | { | |
3989 | gimple stmt = gsi_stmt (gsi); | |
d6063d7f RH |
3990 | int lp_nr; |
3991 | ||
1d65f45c RH |
3992 | if (gimple_code (stmt) != GIMPLE_LABEL) |
3993 | break; | |
3994 | lab = gimple_label_label (stmt); | |
d6063d7f RH |
3995 | lp_nr = EH_LANDING_PAD_NR (lab); |
3996 | if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | |
3997 | return false; | |
1d65f45c | 3998 | } |
1d65f45c RH |
3999 | |
4000 | /* Attempt to move the PHIs into the successor block. */ | |
d6063d7f | 4001 | if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false)) |
1d65f45c RH |
4002 | { |
4003 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4004 | fprintf (dump_file, | |
d6063d7f RH |
4005 | "Unsplit EH landing pad %d to block %i " |
4006 | "(via cleanup_empty_eh).\n", | |
4007 | lp->index, e_out->dest->index); | |
1d65f45c RH |
4008 | return true; |
4009 | } | |
4010 | ||
4011 | return false; | |
4012 | } | |
4013 | ||
afaaa67d JJ |
4014 | /* Return true if edge E_FIRST is part of an empty infinite loop |
4015 | or leads to such a loop through a series of single successor | |
4016 | empty bbs. */ | |
4017 | ||
4018 | static bool | |
4019 | infinite_empty_loop_p (edge e_first) | |
4020 | { | |
4021 | bool inf_loop = false; | |
4022 | edge e; | |
4023 | ||
4024 | if (e_first->dest == e_first->src) | |
4025 | return true; | |
4026 | ||
4027 | e_first->src->aux = (void *) 1; | |
4028 | for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest)) | |
4029 | { | |
4030 | gimple_stmt_iterator gsi; | |
4031 | if (e->dest->aux) | |
4032 | { | |
4033 | inf_loop = true; | |
4034 | break; | |
4035 | } | |
4036 | e->dest->aux = (void *) 1; | |
4037 | gsi = gsi_after_labels (e->dest); | |
4038 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4039 | gsi_next_nondebug (&gsi); | |
4040 | if (!gsi_end_p (gsi)) | |
4041 | break; | |
4042 | } | |
4043 | e_first->src->aux = NULL; | |
4044 | for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest)) | |
4045 | e->dest->aux = NULL; | |
4046 | ||
4047 | return inf_loop; | |
4048 | } | |
4049 | ||
1d65f45c RH |
4050 | /* Examine the block associated with LP to determine if it's an empty |
4051 | handler for its EH region. If so, attempt to redirect EH edges to | |
4052 | an outer region. Return true the CFG was updated in any way. This | |
4053 | is similar to jump forwarding, just across EH edges. */ | |
4054 | ||
4055 | static bool | |
4056 | cleanup_empty_eh (eh_landing_pad lp) | |
4057 | { | |
4058 | basic_block bb = label_to_block (lp->post_landing_pad); | |
4059 | gimple_stmt_iterator gsi; | |
4060 | gimple resx; | |
4061 | eh_region new_region; | |
4062 | edge_iterator ei; | |
4063 | edge e, e_out; | |
4064 | bool has_non_eh_pred; | |
81bfd197 | 4065 | bool ret = false; |
1d65f45c RH |
4066 | int new_lp_nr; |
4067 | ||
4068 | /* There can be zero or one edges out of BB. This is the quickest test. */ | |
4069 | switch (EDGE_COUNT (bb->succs)) | |
4070 | { | |
4071 | case 0: | |
4072 | e_out = NULL; | |
4073 | break; | |
4074 | case 1: | |
4075 | e_out = EDGE_SUCC (bb, 0); | |
4076 | break; | |
4077 | default: | |
4078 | return false; | |
4079 | } | |
81bfd197 MM |
4080 | |
4081 | resx = last_stmt (bb); | |
4082 | if (resx && is_gimple_resx (resx)) | |
4083 | { | |
4084 | if (stmt_can_throw_external (resx)) | |
4085 | optimize_clobbers (bb); | |
4086 | else if (sink_clobbers (bb)) | |
4087 | ret = true; | |
4088 | } | |
4089 | ||
1d65f45c RH |
4090 | gsi = gsi_after_labels (bb); |
4091 | ||
4092 | /* Make sure to skip debug statements. */ | |
4093 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4094 | gsi_next_nondebug (&gsi); | |
4095 | ||
4096 | /* If the block is totally empty, look for more unsplitting cases. */ | |
4097 | if (gsi_end_p (gsi)) | |
0d228a52 RG |
4098 | { |
4099 | /* For the degenerate case of an infinite loop bail out. */ | |
afaaa67d | 4100 | if (infinite_empty_loop_p (e_out)) |
81bfd197 | 4101 | return ret; |
0d228a52 | 4102 | |
81bfd197 | 4103 | return ret | cleanup_empty_eh_unsplit (bb, e_out, lp); |
0d228a52 | 4104 | } |
1d65f45c | 4105 | |
1ee0d660 EB |
4106 | /* The block should consist only of a single RESX statement, modulo a |
4107 | preceding call to __builtin_stack_restore if there is no outgoing | |
4108 | edge, since the call can be eliminated in this case. */ | |
1d65f45c | 4109 | resx = gsi_stmt (gsi); |
1ee0d660 EB |
4110 | if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE)) |
4111 | { | |
4112 | gsi_next (&gsi); | |
4113 | resx = gsi_stmt (gsi); | |
4114 | } | |
1d65f45c | 4115 | if (!is_gimple_resx (resx)) |
81bfd197 | 4116 | return ret; |
1d65f45c RH |
4117 | gcc_assert (gsi_one_before_end_p (gsi)); |
4118 | ||
4119 | /* Determine if there are non-EH edges, or resx edges into the handler. */ | |
4120 | has_non_eh_pred = false; | |
4121 | FOR_EACH_EDGE (e, ei, bb->preds) | |
4122 | if (!(e->flags & EDGE_EH)) | |
4123 | has_non_eh_pred = true; | |
4124 | ||
4125 | /* Find the handler that's outer of the empty handler by looking at | |
4126 | where the RESX instruction was vectored. */ | |
4127 | new_lp_nr = lookup_stmt_eh_lp (resx); | |
4128 | new_region = get_eh_region_from_lp_number (new_lp_nr); | |
4129 | ||
4130 | /* If there's no destination region within the current function, | |
4131 | redirection is trivial via removing the throwing statements from | |
4132 | the EH region, removing the EH edges, and allowing the block | |
4133 | to go unreachable. */ | |
4134 | if (new_region == NULL) | |
4135 | { | |
4136 | gcc_assert (e_out == NULL); | |
4137 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
4138 | if (e->flags & EDGE_EH) | |
4139 | { | |
4140 | gimple stmt = last_stmt (e->src); | |
4141 | remove_stmt_from_eh_lp (stmt); | |
4142 | remove_edge (e); | |
4143 | } | |
4144 | else | |
4145 | ei_next (&ei); | |
4146 | goto succeed; | |
4147 | } | |
4148 | ||
4149 | /* If the destination region is a MUST_NOT_THROW, allow the runtime | |
4150 | to handle the abort and allow the blocks to go unreachable. */ | |
4151 | if (new_region->type == ERT_MUST_NOT_THROW) | |
4152 | { | |
4153 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
4154 | if (e->flags & EDGE_EH) | |
4155 | { | |
4156 | gimple stmt = last_stmt (e->src); | |
4157 | remove_stmt_from_eh_lp (stmt); | |
4158 | add_stmt_to_eh_lp (stmt, new_lp_nr); | |
4159 | remove_edge (e); | |
4160 | } | |
4161 | else | |
4162 | ei_next (&ei); | |
4163 | goto succeed; | |
4164 | } | |
4165 | ||
4166 | /* Try to redirect the EH edges and merge the PHIs into the destination | |
4167 | landing pad block. If the merge succeeds, we'll already have redirected | |
4168 | all the EH edges. The handler itself will go unreachable if there were | |
4169 | no normal edges. */ | |
d6063d7f | 4170 | if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true)) |
1d65f45c RH |
4171 | goto succeed; |
4172 | ||
4173 | /* Finally, if all input edges are EH edges, then we can (potentially) | |
4174 | reduce the number of transfers from the runtime by moving the landing | |
4175 | pad from the original region to the new region. This is a win when | |
4176 | we remove the last CLEANUP region along a particular exception | |
4177 | propagation path. Since nothing changes except for the region with | |
4178 | which the landing pad is associated, the PHI nodes do not need to be | |
4179 | adjusted at all. */ | |
4180 | if (!has_non_eh_pred) | |
4181 | { | |
4182 | cleanup_empty_eh_move_lp (bb, e_out, lp, new_region); | |
4183 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4184 | fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n", | |
4185 | lp->index, new_region->index); | |
4186 | ||
4187 | /* ??? The CFG didn't change, but we may have rendered the | |
4188 | old EH region unreachable. Trigger a cleanup there. */ | |
a8da523f JH |
4189 | return true; |
4190 | } | |
1d65f45c | 4191 | |
81bfd197 | 4192 | return ret; |
1d65f45c RH |
4193 | |
4194 | succeed: | |
4195 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4196 | fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index); | |
4197 | remove_eh_landing_pad (lp); | |
4198 | return true; | |
a8da523f JH |
4199 | } |
4200 | ||
1d65f45c RH |
4201 | /* Do a post-order traversal of the EH region tree. Examine each |
4202 | post_landing_pad block and see if we can eliminate it as empty. */ | |
4203 | ||
4204 | static bool | |
4205 | cleanup_all_empty_eh (void) | |
4206 | { | |
4207 | bool changed = false; | |
4208 | eh_landing_pad lp; | |
4209 | int i; | |
4210 | ||
4211 | for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i) | |
4212 | if (lp) | |
4213 | changed |= cleanup_empty_eh (lp); | |
4214 | ||
4215 | return changed; | |
4216 | } | |
a8da523f JH |
4217 | |
4218 | /* Perform cleanups and lowering of exception handling | |
4219 | 1) cleanups regions with handlers doing nothing are optimized out | |
4220 | 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out | |
4221 | 3) Info about regions that are containing instructions, and regions | |
4222 | reachable via local EH edges is collected | |
4223 | 4) Eh tree is pruned for regions no longer neccesary. | |
1d65f45c RH |
4224 | |
4225 | TODO: Push MUST_NOT_THROW regions to the root of the EH tree. | |
4226 | Unify those that have the same failure decl and locus. | |
4227 | */ | |
a8da523f JH |
4228 | |
4229 | static unsigned int | |
66a3e339 | 4230 | execute_cleanup_eh_1 (void) |
a8da523f | 4231 | { |
1d65f45c RH |
4232 | /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die |
4233 | looking up unreachable landing pads. */ | |
4234 | remove_unreachable_handlers (); | |
a8da523f | 4235 | |
1d65f45c RH |
4236 | /* Watch out for the region tree vanishing due to all unreachable. */ |
4237 | if (cfun->eh->region_tree && optimize) | |
a8da523f | 4238 | { |
1d65f45c | 4239 | bool changed = false; |
a8da523f | 4240 | |
1d65f45c RH |
4241 | changed |= unsplit_all_eh (); |
4242 | changed |= cleanup_all_empty_eh (); | |
4243 | ||
4244 | if (changed) | |
6d07ad98 JH |
4245 | { |
4246 | free_dominance_info (CDI_DOMINATORS); | |
4247 | free_dominance_info (CDI_POST_DOMINATORS); | |
a8da523f | 4248 | |
1d65f45c RH |
4249 | /* We delayed all basic block deletion, as we may have performed |
4250 | cleanups on EH edges while non-EH edges were still present. */ | |
4251 | delete_unreachable_blocks (); | |
a8da523f | 4252 | |
1d65f45c RH |
4253 | /* We manipulated the landing pads. Remove any region that no |
4254 | longer has a landing pad. */ | |
4255 | remove_unreachable_handlers_no_lp (); | |
4256 | ||
4257 | return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals; | |
4258 | } | |
a8da523f JH |
4259 | } |
4260 | ||
1d65f45c RH |
4261 | return 0; |
4262 | } | |
4263 | ||
66a3e339 RG |
4264 | static unsigned int |
4265 | execute_cleanup_eh (void) | |
4266 | { | |
1f9081d1 | 4267 | int ret = execute_cleanup_eh_1 (); |
66a3e339 RG |
4268 | |
4269 | /* If the function no longer needs an EH personality routine | |
4270 | clear it. This exposes cross-language inlining opportunities | |
4271 | and avoids references to a never defined personality routine. */ | |
4272 | if (DECL_FUNCTION_PERSONALITY (current_function_decl) | |
4273 | && function_needs_eh_personality (cfun) != eh_personality_lang) | |
4274 | DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE; | |
4275 | ||
4276 | return ret; | |
4277 | } | |
4278 | ||
1d65f45c RH |
4279 | static bool |
4280 | gate_cleanup_eh (void) | |
4281 | { | |
1f9081d1 | 4282 | return cfun->eh != NULL && cfun->eh->region_tree != NULL; |
a8da523f JH |
4283 | } |
4284 | ||
4285 | struct gimple_opt_pass pass_cleanup_eh = { | |
4286 | { | |
4287 | GIMPLE_PASS, | |
4288 | "ehcleanup", /* name */ | |
1d65f45c RH |
4289 | gate_cleanup_eh, /* gate */ |
4290 | execute_cleanup_eh, /* execute */ | |
a8da523f JH |
4291 | NULL, /* sub */ |
4292 | NULL, /* next */ | |
4293 | 0, /* static_pass_number */ | |
4294 | TV_TREE_EH, /* tv_id */ | |
4295 | PROP_gimple_lcf, /* properties_required */ | |
4296 | 0, /* properties_provided */ | |
4297 | 0, /* properties_destroyed */ | |
4298 | 0, /* todo_flags_start */ | |
22c5fa5f | 4299 | 0 /* todo_flags_finish */ |
a8da523f JH |
4300 | } |
4301 | }; | |
1d65f45c RH |
4302 | \f |
4303 | /* Verify that BB containing STMT as the last statement, has precisely the | |
4304 | edge that make_eh_edges would create. */ | |
4305 | ||
24e47c76 | 4306 | DEBUG_FUNCTION bool |
1d65f45c RH |
4307 | verify_eh_edges (gimple stmt) |
4308 | { | |
4309 | basic_block bb = gimple_bb (stmt); | |
4310 | eh_landing_pad lp = NULL; | |
4311 | int lp_nr; | |
4312 | edge_iterator ei; | |
4313 | edge e, eh_edge; | |
4314 | ||
4315 | lp_nr = lookup_stmt_eh_lp (stmt); | |
4316 | if (lp_nr > 0) | |
4317 | lp = get_eh_landing_pad_from_number (lp_nr); | |
4318 | ||
4319 | eh_edge = NULL; | |
4320 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4321 | { | |
4322 | if (e->flags & EDGE_EH) | |
4323 | { | |
4324 | if (eh_edge) | |
4325 | { | |
4326 | error ("BB %i has multiple EH edges", bb->index); | |
4327 | return true; | |
4328 | } | |
4329 | else | |
4330 | eh_edge = e; | |
4331 | } | |
4332 | } | |
4333 | ||
4334 | if (lp == NULL) | |
4335 | { | |
4336 | if (eh_edge) | |
4337 | { | |
4338 | error ("BB %i can not throw but has an EH edge", bb->index); | |
4339 | return true; | |
4340 | } | |
4341 | return false; | |
4342 | } | |
4343 | ||
4344 | if (!stmt_could_throw_p (stmt)) | |
4345 | { | |
4346 | error ("BB %i last statement has incorrectly set lp", bb->index); | |
4347 | return true; | |
4348 | } | |
4349 | ||
4350 | if (eh_edge == NULL) | |
4351 | { | |
4352 | error ("BB %i is missing an EH edge", bb->index); | |
4353 | return true; | |
4354 | } | |
4355 | ||
4356 | if (eh_edge->dest != label_to_block (lp->post_landing_pad)) | |
4357 | { | |
4358 | error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index); | |
4359 | return true; | |
4360 | } | |
4361 | ||
4362 | return false; | |
4363 | } | |
4364 | ||
4365 | /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */ | |
4366 | ||
24e47c76 | 4367 | DEBUG_FUNCTION bool |
1d65f45c RH |
4368 | verify_eh_dispatch_edge (gimple stmt) |
4369 | { | |
4370 | eh_region r; | |
4371 | eh_catch c; | |
4372 | basic_block src, dst; | |
4373 | bool want_fallthru = true; | |
4374 | edge_iterator ei; | |
4375 | edge e, fall_edge; | |
4376 | ||
4377 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); | |
4378 | src = gimple_bb (stmt); | |
4379 | ||
4380 | FOR_EACH_EDGE (e, ei, src->succs) | |
4381 | gcc_assert (e->aux == NULL); | |
4382 | ||
4383 | switch (r->type) | |
4384 | { | |
4385 | case ERT_TRY: | |
4386 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
4387 | { | |
4388 | dst = label_to_block (c->label); | |
4389 | e = find_edge (src, dst); | |
4390 | if (e == NULL) | |
4391 | { | |
4392 | error ("BB %i is missing an edge", src->index); | |
4393 | return true; | |
4394 | } | |
4395 | e->aux = (void *)e; | |
4396 | ||
4397 | /* A catch-all handler doesn't have a fallthru. */ | |
4398 | if (c->type_list == NULL) | |
4399 | { | |
4400 | want_fallthru = false; | |
4401 | break; | |
4402 | } | |
4403 | } | |
4404 | break; | |
4405 | ||
4406 | case ERT_ALLOWED_EXCEPTIONS: | |
4407 | dst = label_to_block (r->u.allowed.label); | |
4408 | e = find_edge (src, dst); | |
4409 | if (e == NULL) | |
4410 | { | |
4411 | error ("BB %i is missing an edge", src->index); | |
4412 | return true; | |
4413 | } | |
4414 | e->aux = (void *)e; | |
4415 | break; | |
4416 | ||
4417 | default: | |
4418 | gcc_unreachable (); | |
4419 | } | |
4420 | ||
4421 | fall_edge = NULL; | |
4422 | FOR_EACH_EDGE (e, ei, src->succs) | |
4423 | { | |
4424 | if (e->flags & EDGE_FALLTHRU) | |
4425 | { | |
4426 | if (fall_edge != NULL) | |
4427 | { | |
4428 | error ("BB %i too many fallthru edges", src->index); | |
4429 | return true; | |
4430 | } | |
4431 | fall_edge = e; | |
4432 | } | |
4433 | else if (e->aux) | |
4434 | e->aux = NULL; | |
4435 | else | |
4436 | { | |
4437 | error ("BB %i has incorrect edge", src->index); | |
4438 | return true; | |
4439 | } | |
4440 | } | |
4441 | if ((fall_edge != NULL) ^ want_fallthru) | |
4442 | { | |
4443 | error ("BB %i has incorrect fallthru edge", src->index); | |
4444 | return true; | |
4445 | } | |
4446 | ||
4447 | return false; | |
4448 | } |