]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* Exception handling semantics and decomposition for trees. |
5624e564 | 2 | Copyright (C) 2003-2015 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 8 | the Free Software Foundation; either version 3, or (at your option) |
6de9cd9a DN |
9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
957060b5 AM |
24 | #include "target.h" |
25 | #include "rtl.h" | |
6de9cd9a | 26 | #include "tree.h" |
c7131fb2 | 27 | #include "gimple.h" |
957060b5 AM |
28 | #include "cfghooks.h" |
29 | #include "tree-pass.h" | |
c7131fb2 | 30 | #include "ssa.h" |
957060b5 AM |
31 | #include "expmed.h" |
32 | #include "insn-config.h" | |
33 | #include "emit-rtl.h" | |
34 | #include "cgraph.h" | |
35 | #include "diagnostic-core.h" | |
c7131fb2 AM |
36 | #include "alias.h" |
37 | #include "fold-const.h" | |
36566b39 | 38 | #include "flags.h" |
36566b39 PK |
39 | #include "dojump.h" |
40 | #include "explow.h" | |
41 | #include "calls.h" | |
36566b39 PK |
42 | #include "varasm.h" |
43 | #include "stmt.h" | |
44 | #include "expr.h" | |
6de9cd9a | 45 | #include "except.h" |
60393bbc AM |
46 | #include "cfganal.h" |
47 | #include "cfgcleanup.h" | |
2fb9a547 AM |
48 | #include "internal-fn.h" |
49 | #include "tree-eh.h" | |
5be5c238 | 50 | #include "gimple-iterator.h" |
442b4905 | 51 | #include "tree-cfg.h" |
442b4905 | 52 | #include "tree-into-ssa.h" |
7a300452 | 53 | #include "tree-ssa.h" |
6de9cd9a | 54 | #include "tree-inline.h" |
6de9cd9a | 55 | #include "langhooks.h" |
7d776ee2 | 56 | #include "cfgloop.h" |
4484a35a | 57 | #include "gimple-low.h" |
726a989a RB |
58 | |
59 | /* In some instances a tree and a gimple need to be stored in a same table, | |
60 | i.e. in hash tables. This is a structure to do this. */ | |
355fe088 | 61 | typedef union {tree *tp; tree t; gimple *g;} treemple; |
6de9cd9a | 62 | |
6de9cd9a DN |
63 | /* Misc functions used in this file. */ |
64 | ||
1d65f45c | 65 | /* Remember and lookup EH landing pad data for arbitrary statements. |
6de9cd9a DN |
66 | Really this means any statement that could_throw_p. We could |
67 | stuff this information into the stmt_ann data structure, but: | |
68 | ||
69 | (1) We absolutely rely on this information being kept until | |
70 | we get to rtl. Once we're done with lowering here, if we lose | |
71 | the information there's no way to recover it! | |
72 | ||
19114537 | 73 | (2) There are many more statements that *cannot* throw as |
6de9cd9a DN |
74 | compared to those that can. We should be saving some amount |
75 | of space by only allocating memory for those that can throw. */ | |
76 | ||
1d65f45c | 77 | /* Add statement T in function IFUN to landing pad NUM. */ |
726a989a | 78 | |
481d1b81 | 79 | static void |
355fe088 | 80 | add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num) |
6de9cd9a | 81 | { |
1d65f45c | 82 | gcc_assert (num != 0); |
6de9cd9a | 83 | |
98f464e0 | 84 | if (!get_eh_throw_stmt_table (ifun)) |
355fe088 | 85 | set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31)); |
98f464e0 | 86 | |
b086d530 | 87 | gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num)); |
6de9cd9a | 88 | } |
1eaba2f2 | 89 | |
1d65f45c | 90 | /* Add statement T in the current function (cfun) to EH landing pad NUM. */ |
726a989a | 91 | |
b4660e5a | 92 | void |
355fe088 | 93 | add_stmt_to_eh_lp (gimple *t, int num) |
b4660e5a | 94 | { |
1d65f45c RH |
95 | add_stmt_to_eh_lp_fn (cfun, t, num); |
96 | } | |
97 | ||
98 | /* Add statement T to the single EH landing pad in REGION. */ | |
99 | ||
100 | static void | |
355fe088 | 101 | record_stmt_eh_region (eh_region region, gimple *t) |
1d65f45c RH |
102 | { |
103 | if (region == NULL) | |
104 | return; | |
105 | if (region->type == ERT_MUST_NOT_THROW) | |
106 | add_stmt_to_eh_lp_fn (cfun, t, -region->index); | |
107 | else | |
108 | { | |
109 | eh_landing_pad lp = region->landing_pads; | |
110 | if (lp == NULL) | |
111 | lp = gen_eh_landing_pad (region); | |
112 | else | |
113 | gcc_assert (lp->next_lp == NULL); | |
114 | add_stmt_to_eh_lp_fn (cfun, t, lp->index); | |
115 | } | |
b4660e5a JH |
116 | } |
117 | ||
726a989a | 118 | |
1d65f45c | 119 | /* Remove statement T in function IFUN from its EH landing pad. */ |
726a989a | 120 | |
1eaba2f2 | 121 | bool |
355fe088 | 122 | remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t) |
1eaba2f2 | 123 | { |
b4660e5a | 124 | if (!get_eh_throw_stmt_table (ifun)) |
1eaba2f2 RH |
125 | return false; |
126 | ||
b086d530 | 127 | if (!get_eh_throw_stmt_table (ifun)->get (t)) |
1eaba2f2 | 128 | return false; |
b086d530 TS |
129 | |
130 | get_eh_throw_stmt_table (ifun)->remove (t); | |
131 | return true; | |
1eaba2f2 RH |
132 | } |
133 | ||
726a989a | 134 | |
1d65f45c RH |
135 | /* Remove statement T in the current function (cfun) from its |
136 | EH landing pad. */ | |
726a989a | 137 | |
b4660e5a | 138 | bool |
355fe088 | 139 | remove_stmt_from_eh_lp (gimple *t) |
b4660e5a | 140 | { |
1d65f45c | 141 | return remove_stmt_from_eh_lp_fn (cfun, t); |
b4660e5a JH |
142 | } |
143 | ||
726a989a | 144 | /* Determine if statement T is inside an EH region in function IFUN. |
1d65f45c RH |
145 | Positive numbers indicate a landing pad index; negative numbers |
146 | indicate a MUST_NOT_THROW region index; zero indicates that the | |
147 | statement is not recorded in the region table. */ | |
726a989a | 148 | |
6de9cd9a | 149 | int |
355fe088 | 150 | lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t) |
6de9cd9a | 151 | { |
1d65f45c RH |
152 | if (ifun->eh->throw_stmt_table == NULL) |
153 | return 0; | |
6de9cd9a | 154 | |
b086d530 TS |
155 | int *lp_nr = ifun->eh->throw_stmt_table->get (t); |
156 | return lp_nr ? *lp_nr : 0; | |
6de9cd9a DN |
157 | } |
158 | ||
1d65f45c | 159 | /* Likewise, but always use the current function. */ |
726a989a | 160 | |
b4660e5a | 161 | int |
355fe088 | 162 | lookup_stmt_eh_lp (gimple *t) |
b4660e5a JH |
163 | { |
164 | /* We can get called from initialized data when -fnon-call-exceptions | |
165 | is on; prevent crash. */ | |
166 | if (!cfun) | |
1d65f45c RH |
167 | return 0; |
168 | return lookup_stmt_eh_lp_fn (cfun, t); | |
b4660e5a | 169 | } |
6de9cd9a | 170 | |
726a989a | 171 | /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY |
6de9cd9a DN |
172 | nodes and LABEL_DECL nodes. We will use this during the second phase to |
173 | determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */ | |
174 | ||
175 | struct finally_tree_node | |
176 | { | |
726a989a RB |
177 | /* When storing a GIMPLE_TRY, we have to record a gimple. However |
178 | when deciding whether a GOTO to a certain LABEL_DECL (which is a | |
179 | tree) leaves the TRY block, its necessary to record a tree in | |
180 | this field. Thus a treemple is used. */ | |
1d65f45c | 181 | treemple child; |
538dd0b7 | 182 | gtry *parent; |
6de9cd9a DN |
183 | }; |
184 | ||
4a8fb1a1 LC |
185 | /* Hashtable helpers. */ |
186 | ||
95fbe13e | 187 | struct finally_tree_hasher : free_ptr_hash <finally_tree_node> |
4a8fb1a1 | 188 | { |
67f58944 TS |
189 | static inline hashval_t hash (const finally_tree_node *); |
190 | static inline bool equal (const finally_tree_node *, | |
191 | const finally_tree_node *); | |
4a8fb1a1 LC |
192 | }; |
193 | ||
194 | inline hashval_t | |
67f58944 | 195 | finally_tree_hasher::hash (const finally_tree_node *v) |
4a8fb1a1 LC |
196 | { |
197 | return (intptr_t)v->child.t >> 4; | |
198 | } | |
199 | ||
200 | inline bool | |
67f58944 TS |
201 | finally_tree_hasher::equal (const finally_tree_node *v, |
202 | const finally_tree_node *c) | |
4a8fb1a1 LC |
203 | { |
204 | return v->child.t == c->child.t; | |
205 | } | |
206 | ||
6de9cd9a | 207 | /* Note that this table is *not* marked GTY. It is short-lived. */ |
c203e8a7 | 208 | static hash_table<finally_tree_hasher> *finally_tree; |
6de9cd9a DN |
209 | |
210 | static void | |
538dd0b7 | 211 | record_in_finally_tree (treemple child, gtry *parent) |
6de9cd9a DN |
212 | { |
213 | struct finally_tree_node *n; | |
4a8fb1a1 | 214 | finally_tree_node **slot; |
6de9cd9a | 215 | |
858904db | 216 | n = XNEW (struct finally_tree_node); |
6de9cd9a DN |
217 | n->child = child; |
218 | n->parent = parent; | |
219 | ||
c203e8a7 | 220 | slot = finally_tree->find_slot (n, INSERT); |
1e128c5f | 221 | gcc_assert (!*slot); |
6de9cd9a DN |
222 | *slot = n; |
223 | } | |
224 | ||
225 | static void | |
355fe088 | 226 | collect_finally_tree (gimple *stmt, gtry *region); |
726a989a | 227 | |
1d65f45c | 228 | /* Go through the gimple sequence. Works with collect_finally_tree to |
726a989a RB |
229 | record all GIMPLE_LABEL and GIMPLE_TRY statements. */ |
230 | ||
231 | static void | |
538dd0b7 | 232 | collect_finally_tree_1 (gimple_seq seq, gtry *region) |
6de9cd9a | 233 | { |
726a989a | 234 | gimple_stmt_iterator gsi; |
6de9cd9a | 235 | |
726a989a RB |
236 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) |
237 | collect_finally_tree (gsi_stmt (gsi), region); | |
238 | } | |
6de9cd9a | 239 | |
726a989a | 240 | static void |
355fe088 | 241 | collect_finally_tree (gimple *stmt, gtry *region) |
726a989a RB |
242 | { |
243 | treemple temp; | |
244 | ||
245 | switch (gimple_code (stmt)) | |
246 | { | |
247 | case GIMPLE_LABEL: | |
538dd0b7 | 248 | temp.t = gimple_label_label (as_a <glabel *> (stmt)); |
726a989a RB |
249 | record_in_finally_tree (temp, region); |
250 | break; | |
6de9cd9a | 251 | |
726a989a RB |
252 | case GIMPLE_TRY: |
253 | if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | |
254 | { | |
255 | temp.g = stmt; | |
256 | record_in_finally_tree (temp, region); | |
538dd0b7 DM |
257 | collect_finally_tree_1 (gimple_try_eval (stmt), |
258 | as_a <gtry *> (stmt)); | |
726a989a RB |
259 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); |
260 | } | |
261 | else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) | |
262 | { | |
263 | collect_finally_tree_1 (gimple_try_eval (stmt), region); | |
264 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | |
265 | } | |
266 | break; | |
6de9cd9a | 267 | |
726a989a | 268 | case GIMPLE_CATCH: |
538dd0b7 DM |
269 | collect_finally_tree_1 (gimple_catch_handler ( |
270 | as_a <gcatch *> (stmt)), | |
271 | region); | |
726a989a | 272 | break; |
6de9cd9a | 273 | |
726a989a RB |
274 | case GIMPLE_EH_FILTER: |
275 | collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region); | |
6de9cd9a DN |
276 | break; |
277 | ||
0a35513e | 278 | case GIMPLE_EH_ELSE: |
538dd0b7 DM |
279 | { |
280 | geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
281 | collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region); | |
282 | collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region); | |
283 | } | |
0a35513e AH |
284 | break; |
285 | ||
6de9cd9a DN |
286 | default: |
287 | /* A type, a decl, or some kind of statement that we're not | |
288 | interested in. Don't walk them. */ | |
289 | break; | |
290 | } | |
291 | } | |
292 | ||
726a989a | 293 | |
6de9cd9a DN |
294 | /* Use the finally tree to determine if a jump from START to TARGET |
295 | would leave the try_finally node that START lives in. */ | |
296 | ||
297 | static bool | |
355fe088 | 298 | outside_finally_tree (treemple start, gimple *target) |
6de9cd9a DN |
299 | { |
300 | struct finally_tree_node n, *p; | |
301 | ||
302 | do | |
303 | { | |
304 | n.child = start; | |
c203e8a7 | 305 | p = finally_tree->find (&n); |
6de9cd9a DN |
306 | if (!p) |
307 | return true; | |
726a989a | 308 | start.g = p->parent; |
6de9cd9a | 309 | } |
726a989a | 310 | while (start.g != target); |
6de9cd9a DN |
311 | |
312 | return false; | |
313 | } | |
726a989a RB |
314 | |
315 | /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY | |
316 | nodes into a set of gotos, magic labels, and eh regions. | |
6de9cd9a DN |
317 | The eh region creation is straight-forward, but frobbing all the gotos |
318 | and such into shape isn't. */ | |
319 | ||
b8698a0f | 320 | /* The sequence into which we record all EH stuff. This will be |
1d65f45c RH |
321 | placed at the end of the function when we're all done. */ |
322 | static gimple_seq eh_seq; | |
323 | ||
324 | /* Record whether an EH region contains something that can throw, | |
325 | indexed by EH region number. */ | |
b7da9fd4 | 326 | static bitmap eh_region_may_contain_throw_map; |
1d65f45c | 327 | |
026c3cfd | 328 | /* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN |
24b97832 ILT |
329 | statements that are seen to escape this GIMPLE_TRY_FINALLY node. |
330 | The idea is to record a gimple statement for everything except for | |
331 | the conditionals, which get their labels recorded. Since labels are | |
332 | of type 'tree', we need this node to store both gimple and tree | |
333 | objects. REPL_STMT is the sequence used to replace the goto/return | |
334 | statement. CONT_STMT is used to store the statement that allows | |
335 | the return/goto to jump to the original destination. */ | |
336 | ||
337 | struct goto_queue_node | |
338 | { | |
339 | treemple stmt; | |
820055a0 | 340 | location_t location; |
24b97832 | 341 | gimple_seq repl_stmt; |
355fe088 | 342 | gimple *cont_stmt; |
24b97832 ILT |
343 | int index; |
344 | /* This is used when index >= 0 to indicate that stmt is a label (as | |
345 | opposed to a goto stmt). */ | |
346 | int is_label; | |
347 | }; | |
348 | ||
6de9cd9a DN |
349 | /* State of the world while lowering. */ |
350 | ||
351 | struct leh_state | |
352 | { | |
19114537 | 353 | /* What's "current" while constructing the eh region tree. These |
6de9cd9a DN |
354 | correspond to variables of the same name in cfun->eh, which we |
355 | don't have easy access to. */ | |
1d65f45c RH |
356 | eh_region cur_region; |
357 | ||
358 | /* What's "current" for the purposes of __builtin_eh_pointer. For | |
359 | a CATCH, this is the associated TRY. For an EH_FILTER, this is | |
360 | the associated ALLOWED_EXCEPTIONS, etc. */ | |
361 | eh_region ehp_region; | |
6de9cd9a DN |
362 | |
363 | /* Processing of TRY_FINALLY requires a bit more state. This is | |
364 | split out into a separate structure so that we don't have to | |
365 | copy so much when processing other nodes. */ | |
366 | struct leh_tf_state *tf; | |
367 | }; | |
368 | ||
369 | struct leh_tf_state | |
370 | { | |
726a989a RB |
371 | /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The |
372 | try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain | |
373 | this so that outside_finally_tree can reliably reference the tree used | |
374 | in the collect_finally_tree data structures. */ | |
538dd0b7 DM |
375 | gtry *try_finally_expr; |
376 | gtry *top_p; | |
1d65f45c | 377 | |
726a989a RB |
378 | /* While lowering a top_p usually it is expanded into multiple statements, |
379 | thus we need the following field to store them. */ | |
380 | gimple_seq top_p_seq; | |
6de9cd9a DN |
381 | |
382 | /* The state outside this try_finally node. */ | |
383 | struct leh_state *outer; | |
384 | ||
385 | /* The exception region created for it. */ | |
1d65f45c | 386 | eh_region region; |
6de9cd9a | 387 | |
24b97832 ILT |
388 | /* The goto queue. */ |
389 | struct goto_queue_node *goto_queue; | |
6de9cd9a DN |
390 | size_t goto_queue_size; |
391 | size_t goto_queue_active; | |
392 | ||
fa10beec | 393 | /* Pointer map to help in searching goto_queue when it is large. */ |
355fe088 | 394 | hash_map<gimple *, goto_queue_node *> *goto_queue_map; |
0f547d3d | 395 | |
6de9cd9a | 396 | /* The set of unique labels seen as entries in the goto queue. */ |
9771b263 | 397 | vec<tree> dest_array; |
6de9cd9a DN |
398 | |
399 | /* A label to be added at the end of the completed transformed | |
400 | sequence. It will be set if may_fallthru was true *at one time*, | |
401 | though subsequent transformations may have cleared that flag. */ | |
402 | tree fallthru_label; | |
403 | ||
6de9cd9a DN |
404 | /* True if it is possible to fall out the bottom of the try block. |
405 | Cleared if the fallthru is converted to a goto. */ | |
406 | bool may_fallthru; | |
407 | ||
726a989a | 408 | /* True if any entry in goto_queue is a GIMPLE_RETURN. */ |
6de9cd9a DN |
409 | bool may_return; |
410 | ||
411 | /* True if the finally block can receive an exception edge. | |
412 | Cleared if the exception case is handled by code duplication. */ | |
413 | bool may_throw; | |
414 | }; | |
415 | ||
538dd0b7 | 416 | static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *); |
6de9cd9a | 417 | |
6de9cd9a DN |
418 | /* Search for STMT in the goto queue. Return the replacement, |
419 | or null if the statement isn't in the queue. */ | |
420 | ||
0f547d3d SE |
421 | #define LARGE_GOTO_QUEUE 20 |
422 | ||
355a7673 | 423 | static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq); |
726a989a RB |
424 | |
425 | static gimple_seq | |
426 | find_goto_replacement (struct leh_tf_state *tf, treemple stmt) | |
6de9cd9a | 427 | { |
0f547d3d | 428 | unsigned int i; |
0f547d3d SE |
429 | |
430 | if (tf->goto_queue_active < LARGE_GOTO_QUEUE) | |
431 | { | |
432 | for (i = 0; i < tf->goto_queue_active; i++) | |
726a989a | 433 | if ( tf->goto_queue[i].stmt.g == stmt.g) |
0f547d3d SE |
434 | return tf->goto_queue[i].repl_stmt; |
435 | return NULL; | |
436 | } | |
437 | ||
438 | /* If we have a large number of entries in the goto_queue, create a | |
439 | pointer map and use that for searching. */ | |
440 | ||
441 | if (!tf->goto_queue_map) | |
442 | { | |
355fe088 | 443 | tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>; |
0f547d3d SE |
444 | for (i = 0; i < tf->goto_queue_active; i++) |
445 | { | |
b787e7a2 TS |
446 | bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g, |
447 | &tf->goto_queue[i]); | |
448 | gcc_assert (!existed); | |
0f547d3d SE |
449 | } |
450 | } | |
451 | ||
b787e7a2 | 452 | goto_queue_node **slot = tf->goto_queue_map->get (stmt.g); |
0f547d3d | 453 | if (slot != NULL) |
b787e7a2 | 454 | return ((*slot)->repl_stmt); |
0f547d3d SE |
455 | |
456 | return NULL; | |
6de9cd9a DN |
457 | } |
458 | ||
459 | /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a | |
726a989a | 460 | lowered GIMPLE_COND. If, by chance, the replacement is a simple goto, |
6de9cd9a | 461 | then we can just splat it in, otherwise we add the new stmts immediately |
726a989a | 462 | after the GIMPLE_COND and redirect. */ |
6de9cd9a DN |
463 | |
464 | static void | |
465 | replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf, | |
726a989a | 466 | gimple_stmt_iterator *gsi) |
6de9cd9a | 467 | { |
726a989a | 468 | tree label; |
82d6e6fc | 469 | gimple_seq new_seq; |
726a989a | 470 | treemple temp; |
c2255bc4 | 471 | location_t loc = gimple_location (gsi_stmt (*gsi)); |
6de9cd9a | 472 | |
726a989a | 473 | temp.tp = tp; |
82d6e6fc KG |
474 | new_seq = find_goto_replacement (tf, temp); |
475 | if (!new_seq) | |
6de9cd9a DN |
476 | return; |
477 | ||
82d6e6fc KG |
478 | if (gimple_seq_singleton_p (new_seq) |
479 | && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO) | |
6de9cd9a | 480 | { |
82d6e6fc | 481 | *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq)); |
6de9cd9a DN |
482 | return; |
483 | } | |
484 | ||
c2255bc4 | 485 | label = create_artificial_label (loc); |
726a989a RB |
486 | /* Set the new label for the GIMPLE_COND */ |
487 | *tp = label; | |
6de9cd9a | 488 | |
726a989a | 489 | gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING); |
82d6e6fc | 490 | gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING); |
6de9cd9a DN |
491 | } |
492 | ||
19114537 | 493 | /* The real work of replace_goto_queue. Returns with TSI updated to |
6de9cd9a DN |
494 | point to the next statement. */ |
495 | ||
355a7673 | 496 | static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *); |
6de9cd9a DN |
497 | |
498 | static void | |
355fe088 | 499 | replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf, |
726a989a | 500 | gimple_stmt_iterator *gsi) |
6de9cd9a | 501 | { |
726a989a RB |
502 | gimple_seq seq; |
503 | treemple temp; | |
504 | temp.g = NULL; | |
505 | ||
506 | switch (gimple_code (stmt)) | |
6de9cd9a | 507 | { |
726a989a RB |
508 | case GIMPLE_GOTO: |
509 | case GIMPLE_RETURN: | |
510 | temp.g = stmt; | |
511 | seq = find_goto_replacement (tf, temp); | |
512 | if (seq) | |
6de9cd9a | 513 | { |
726a989a RB |
514 | gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT); |
515 | gsi_remove (gsi, false); | |
6de9cd9a DN |
516 | return; |
517 | } | |
518 | break; | |
519 | ||
726a989a RB |
520 | case GIMPLE_COND: |
521 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi); | |
522 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi); | |
6de9cd9a DN |
523 | break; |
524 | ||
726a989a | 525 | case GIMPLE_TRY: |
355a7673 MM |
526 | replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf); |
527 | replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf); | |
6de9cd9a | 528 | break; |
726a989a | 529 | case GIMPLE_CATCH: |
538dd0b7 DM |
530 | replace_goto_queue_stmt_list (gimple_catch_handler_ptr ( |
531 | as_a <gcatch *> (stmt)), | |
532 | tf); | |
6de9cd9a | 533 | break; |
726a989a | 534 | case GIMPLE_EH_FILTER: |
355a7673 | 535 | replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf); |
6de9cd9a | 536 | break; |
0a35513e | 537 | case GIMPLE_EH_ELSE: |
538dd0b7 DM |
538 | { |
539 | geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
540 | replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt), | |
541 | tf); | |
542 | replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt), | |
543 | tf); | |
544 | } | |
0a35513e | 545 | break; |
6de9cd9a | 546 | |
6de9cd9a DN |
547 | default: |
548 | /* These won't have gotos in them. */ | |
549 | break; | |
550 | } | |
551 | ||
726a989a | 552 | gsi_next (gsi); |
6de9cd9a DN |
553 | } |
554 | ||
726a989a | 555 | /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */ |
6de9cd9a DN |
556 | |
557 | static void | |
355a7673 | 558 | replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf) |
6de9cd9a | 559 | { |
355a7673 | 560 | gimple_stmt_iterator gsi = gsi_start (*seq); |
726a989a RB |
561 | |
562 | while (!gsi_end_p (gsi)) | |
563 | replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi); | |
6de9cd9a DN |
564 | } |
565 | ||
566 | /* Replace all goto queue members. */ | |
567 | ||
568 | static void | |
569 | replace_goto_queue (struct leh_tf_state *tf) | |
570 | { | |
8287d24a EB |
571 | if (tf->goto_queue_active == 0) |
572 | return; | |
355a7673 MM |
573 | replace_goto_queue_stmt_list (&tf->top_p_seq, tf); |
574 | replace_goto_queue_stmt_list (&eh_seq, tf); | |
6de9cd9a DN |
575 | } |
576 | ||
726a989a RB |
577 | /* Add a new record to the goto queue contained in TF. NEW_STMT is the |
578 | data to be added, IS_LABEL indicates whether NEW_STMT is a label or | |
579 | a gimple return. */ | |
6de9cd9a DN |
580 | |
581 | static void | |
726a989a RB |
582 | record_in_goto_queue (struct leh_tf_state *tf, |
583 | treemple new_stmt, | |
584 | int index, | |
820055a0 DC |
585 | bool is_label, |
586 | location_t location) | |
6de9cd9a | 587 | { |
6de9cd9a | 588 | size_t active, size; |
726a989a | 589 | struct goto_queue_node *q; |
6de9cd9a | 590 | |
0f547d3d SE |
591 | gcc_assert (!tf->goto_queue_map); |
592 | ||
6de9cd9a DN |
593 | active = tf->goto_queue_active; |
594 | size = tf->goto_queue_size; | |
595 | if (active >= size) | |
596 | { | |
597 | size = (size ? size * 2 : 32); | |
598 | tf->goto_queue_size = size; | |
599 | tf->goto_queue | |
858904db | 600 | = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size); |
6de9cd9a DN |
601 | } |
602 | ||
603 | q = &tf->goto_queue[active]; | |
604 | tf->goto_queue_active = active + 1; | |
19114537 | 605 | |
6de9cd9a | 606 | memset (q, 0, sizeof (*q)); |
726a989a | 607 | q->stmt = new_stmt; |
6de9cd9a | 608 | q->index = index; |
820055a0 | 609 | q->location = location; |
726a989a RB |
610 | q->is_label = is_label; |
611 | } | |
612 | ||
613 | /* Record the LABEL label in the goto queue contained in TF. | |
614 | TF is not null. */ | |
615 | ||
616 | static void | |
820055a0 DC |
617 | record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label, |
618 | location_t location) | |
726a989a RB |
619 | { |
620 | int index; | |
621 | treemple temp, new_stmt; | |
622 | ||
623 | if (!label) | |
624 | return; | |
625 | ||
626 | /* Computed and non-local gotos do not get processed. Given | |
627 | their nature we can neither tell whether we've escaped the | |
628 | finally block nor redirect them if we knew. */ | |
629 | if (TREE_CODE (label) != LABEL_DECL) | |
630 | return; | |
631 | ||
632 | /* No need to record gotos that don't leave the try block. */ | |
633 | temp.t = label; | |
634 | if (!outside_finally_tree (temp, tf->try_finally_expr)) | |
635 | return; | |
636 | ||
9771b263 | 637 | if (! tf->dest_array.exists ()) |
726a989a | 638 | { |
9771b263 DN |
639 | tf->dest_array.create (10); |
640 | tf->dest_array.quick_push (label); | |
726a989a RB |
641 | index = 0; |
642 | } | |
643 | else | |
644 | { | |
9771b263 | 645 | int n = tf->dest_array.length (); |
726a989a | 646 | for (index = 0; index < n; ++index) |
9771b263 | 647 | if (tf->dest_array[index] == label) |
726a989a RB |
648 | break; |
649 | if (index == n) | |
9771b263 | 650 | tf->dest_array.safe_push (label); |
726a989a RB |
651 | } |
652 | ||
653 | /* In the case of a GOTO we want to record the destination label, | |
654 | since with a GIMPLE_COND we have an easy access to the then/else | |
655 | labels. */ | |
656 | new_stmt = stmt; | |
820055a0 | 657 | record_in_goto_queue (tf, new_stmt, index, true, location); |
726a989a RB |
658 | } |
659 | ||
660 | /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally | |
661 | node, and if so record that fact in the goto queue associated with that | |
662 | try_finally node. */ | |
663 | ||
664 | static void | |
355fe088 | 665 | maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt) |
726a989a RB |
666 | { |
667 | struct leh_tf_state *tf = state->tf; | |
668 | treemple new_stmt; | |
669 | ||
670 | if (!tf) | |
671 | return; | |
672 | ||
673 | switch (gimple_code (stmt)) | |
674 | { | |
675 | case GIMPLE_COND: | |
538dd0b7 DM |
676 | { |
677 | gcond *cond_stmt = as_a <gcond *> (stmt); | |
678 | new_stmt.tp = gimple_op_ptr (cond_stmt, 2); | |
679 | record_in_goto_queue_label (tf, new_stmt, | |
680 | gimple_cond_true_label (cond_stmt), | |
681 | EXPR_LOCATION (*new_stmt.tp)); | |
682 | new_stmt.tp = gimple_op_ptr (cond_stmt, 3); | |
683 | record_in_goto_queue_label (tf, new_stmt, | |
684 | gimple_cond_false_label (cond_stmt), | |
685 | EXPR_LOCATION (*new_stmt.tp)); | |
686 | } | |
726a989a RB |
687 | break; |
688 | case GIMPLE_GOTO: | |
689 | new_stmt.g = stmt; | |
820055a0 DC |
690 | record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt), |
691 | gimple_location (stmt)); | |
726a989a RB |
692 | break; |
693 | ||
694 | case GIMPLE_RETURN: | |
695 | tf->may_return = true; | |
696 | new_stmt.g = stmt; | |
820055a0 | 697 | record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt)); |
726a989a RB |
698 | break; |
699 | ||
700 | default: | |
701 | gcc_unreachable (); | |
702 | } | |
6de9cd9a DN |
703 | } |
704 | ||
726a989a | 705 | |
b2b29377 | 706 | #if CHECKING_P |
726a989a | 707 | /* We do not process GIMPLE_SWITCHes for now. As long as the original source |
6de9cd9a | 708 | was in fact structured, and we've not yet done jump threading, then none |
726a989a | 709 | of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */ |
6de9cd9a DN |
710 | |
711 | static void | |
538dd0b7 DM |
712 | verify_norecord_switch_expr (struct leh_state *state, |
713 | gswitch *switch_expr) | |
6de9cd9a DN |
714 | { |
715 | struct leh_tf_state *tf = state->tf; | |
716 | size_t i, n; | |
6de9cd9a DN |
717 | |
718 | if (!tf) | |
719 | return; | |
720 | ||
726a989a | 721 | n = gimple_switch_num_labels (switch_expr); |
6de9cd9a DN |
722 | |
723 | for (i = 0; i < n; ++i) | |
724 | { | |
726a989a RB |
725 | treemple temp; |
726 | tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i)); | |
727 | temp.t = lab; | |
728 | gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr)); | |
6de9cd9a DN |
729 | } |
730 | } | |
731 | #else | |
732 | #define verify_norecord_switch_expr(state, switch_expr) | |
733 | #endif | |
734 | ||
8d686507 ILT |
735 | /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is |
736 | non-null, insert it before the new branch. */ | |
6de9cd9a DN |
737 | |
738 | static void | |
8d686507 | 739 | do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod) |
6de9cd9a | 740 | { |
355fe088 | 741 | gimple *x; |
726a989a | 742 | |
8d686507 | 743 | /* In the case of a return, the queue node must be a gimple statement. */ |
726a989a RB |
744 | gcc_assert (!q->is_label); |
745 | ||
8d686507 | 746 | /* Note that the return value may have already been computed, e.g., |
6de9cd9a | 747 | |
8d686507 ILT |
748 | int x; |
749 | int foo (void) | |
6de9cd9a | 750 | { |
8d686507 ILT |
751 | x = 0; |
752 | try { | |
753 | return x; | |
754 | } finally { | |
755 | x++; | |
756 | } | |
6de9cd9a | 757 | } |
8d686507 ILT |
758 | |
759 | should return 0, not 1. We don't have to do anything to make | |
760 | this happens because the return value has been placed in the | |
761 | RESULT_DECL already. */ | |
762 | ||
763 | q->cont_stmt = q->stmt.g; | |
726a989a | 764 | |
6de9cd9a | 765 | if (mod) |
726a989a | 766 | gimple_seq_add_seq (&q->repl_stmt, mod); |
6de9cd9a | 767 | |
726a989a | 768 | x = gimple_build_goto (finlab); |
29f5bccb | 769 | gimple_set_location (x, q->location); |
726a989a | 770 | gimple_seq_add_stmt (&q->repl_stmt, x); |
6de9cd9a DN |
771 | } |
772 | ||
726a989a | 773 | /* Similar, but easier, for GIMPLE_GOTO. */ |
6de9cd9a DN |
774 | |
775 | static void | |
726a989a RB |
776 | do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, |
777 | struct leh_tf_state *tf) | |
6de9cd9a | 778 | { |
538dd0b7 | 779 | ggoto *x; |
726a989a RB |
780 | |
781 | gcc_assert (q->is_label); | |
726a989a | 782 | |
9771b263 | 783 | q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]); |
6de9cd9a | 784 | |
6de9cd9a | 785 | if (mod) |
726a989a | 786 | gimple_seq_add_seq (&q->repl_stmt, mod); |
6de9cd9a | 787 | |
726a989a | 788 | x = gimple_build_goto (finlab); |
29f5bccb | 789 | gimple_set_location (x, q->location); |
726a989a | 790 | gimple_seq_add_stmt (&q->repl_stmt, x); |
6de9cd9a DN |
791 | } |
792 | ||
1d65f45c RH |
793 | /* Emit a standard landing pad sequence into SEQ for REGION. */ |
794 | ||
795 | static void | |
796 | emit_post_landing_pad (gimple_seq *seq, eh_region region) | |
797 | { | |
798 | eh_landing_pad lp = region->landing_pads; | |
538dd0b7 | 799 | glabel *x; |
1d65f45c RH |
800 | |
801 | if (lp == NULL) | |
802 | lp = gen_eh_landing_pad (region); | |
803 | ||
804 | lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION); | |
805 | EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index; | |
806 | ||
807 | x = gimple_build_label (lp->post_landing_pad); | |
808 | gimple_seq_add_stmt (seq, x); | |
809 | } | |
810 | ||
811 | /* Emit a RESX statement into SEQ for REGION. */ | |
812 | ||
813 | static void | |
814 | emit_resx (gimple_seq *seq, eh_region region) | |
815 | { | |
538dd0b7 | 816 | gresx *x = gimple_build_resx (region->index); |
1d65f45c RH |
817 | gimple_seq_add_stmt (seq, x); |
818 | if (region->outer) | |
819 | record_stmt_eh_region (region->outer, x); | |
820 | } | |
821 | ||
822 | /* Emit an EH_DISPATCH statement into SEQ for REGION. */ | |
823 | ||
824 | static void | |
825 | emit_eh_dispatch (gimple_seq *seq, eh_region region) | |
826 | { | |
538dd0b7 | 827 | geh_dispatch *x = gimple_build_eh_dispatch (region->index); |
1d65f45c RH |
828 | gimple_seq_add_stmt (seq, x); |
829 | } | |
830 | ||
831 | /* Note that the current EH region may contain a throw, or a | |
832 | call to a function which itself may contain a throw. */ | |
833 | ||
834 | static void | |
835 | note_eh_region_may_contain_throw (eh_region region) | |
836 | { | |
fcaa4ca4 | 837 | while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index)) |
1d65f45c | 838 | { |
6788475a JJ |
839 | if (region->type == ERT_MUST_NOT_THROW) |
840 | break; | |
1d65f45c RH |
841 | region = region->outer; |
842 | if (region == NULL) | |
843 | break; | |
844 | } | |
845 | } | |
846 | ||
b7da9fd4 RH |
847 | /* Check if REGION has been marked as containing a throw. If REGION is |
848 | NULL, this predicate is false. */ | |
849 | ||
850 | static inline bool | |
851 | eh_region_may_contain_throw (eh_region r) | |
852 | { | |
853 | return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index); | |
854 | } | |
855 | ||
6de9cd9a DN |
856 | /* We want to transform |
857 | try { body; } catch { stuff; } | |
858 | to | |
3b445b24 | 859 | normal_sequence: |
1d65f45c RH |
860 | body; |
861 | over: | |
3b445b24 | 862 | eh_sequence: |
1d65f45c RH |
863 | landing_pad: |
864 | stuff; | |
865 | goto over; | |
866 | ||
867 | TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad | |
6de9cd9a DN |
868 | should be placed before the second operand, or NULL. OVER is |
869 | an existing label that should be put at the exit, or NULL. */ | |
870 | ||
726a989a | 871 | static gimple_seq |
538dd0b7 | 872 | frob_into_branch_around (gtry *tp, eh_region region, tree over) |
6de9cd9a | 873 | { |
355fe088 | 874 | gimple *x; |
726a989a | 875 | gimple_seq cleanup, result; |
c2255bc4 | 876 | location_t loc = gimple_location (tp); |
6de9cd9a | 877 | |
726a989a RB |
878 | cleanup = gimple_try_cleanup (tp); |
879 | result = gimple_try_eval (tp); | |
6de9cd9a | 880 | |
1d65f45c RH |
881 | if (region) |
882 | emit_post_landing_pad (&eh_seq, region); | |
883 | ||
884 | if (gimple_seq_may_fallthru (cleanup)) | |
6de9cd9a DN |
885 | { |
886 | if (!over) | |
c2255bc4 | 887 | over = create_artificial_label (loc); |
726a989a | 888 | x = gimple_build_goto (over); |
29f5bccb | 889 | gimple_set_location (x, loc); |
1d65f45c | 890 | gimple_seq_add_stmt (&cleanup, x); |
6de9cd9a | 891 | } |
1d65f45c | 892 | gimple_seq_add_seq (&eh_seq, cleanup); |
6de9cd9a DN |
893 | |
894 | if (over) | |
895 | { | |
726a989a RB |
896 | x = gimple_build_label (over); |
897 | gimple_seq_add_stmt (&result, x); | |
6de9cd9a | 898 | } |
726a989a | 899 | return result; |
6de9cd9a DN |
900 | } |
901 | ||
902 | /* A subroutine of lower_try_finally. Duplicate the tree rooted at T. | |
903 | Make sure to record all new labels found. */ | |
904 | ||
726a989a | 905 | static gimple_seq |
820055a0 DC |
906 | lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state, |
907 | location_t loc) | |
6de9cd9a | 908 | { |
538dd0b7 | 909 | gtry *region = NULL; |
726a989a | 910 | gimple_seq new_seq; |
820055a0 | 911 | gimple_stmt_iterator gsi; |
6de9cd9a | 912 | |
726a989a | 913 | new_seq = copy_gimple_seq_and_replace_locals (seq); |
6de9cd9a | 914 | |
820055a0 | 915 | for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi)) |
62d4d60c | 916 | { |
355fe088 | 917 | gimple *stmt = gsi_stmt (gsi); |
cc6fbd80 EB |
918 | /* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating |
919 | it on the EH paths. When it is not eliminated, make it transparent in | |
920 | the debug info. */ | |
921 | if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
922 | gimple_set_location (stmt, UNKNOWN_LOCATION); | |
923 | else if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION) | |
62d4d60c DC |
924 | { |
925 | tree block = gimple_block (stmt); | |
926 | gimple_set_location (stmt, loc); | |
927 | gimple_set_block (stmt, block); | |
928 | } | |
929 | } | |
820055a0 | 930 | |
6de9cd9a DN |
931 | if (outer_state->tf) |
932 | region = outer_state->tf->try_finally_expr; | |
726a989a | 933 | collect_finally_tree_1 (new_seq, region); |
6de9cd9a | 934 | |
726a989a | 935 | return new_seq; |
6de9cd9a DN |
936 | } |
937 | ||
938 | /* A subroutine of lower_try_finally. Create a fallthru label for | |
939 | the given try_finally state. The only tricky bit here is that | |
940 | we have to make sure to record the label in our outer context. */ | |
941 | ||
942 | static tree | |
943 | lower_try_finally_fallthru_label (struct leh_tf_state *tf) | |
944 | { | |
945 | tree label = tf->fallthru_label; | |
726a989a RB |
946 | treemple temp; |
947 | ||
6de9cd9a DN |
948 | if (!label) |
949 | { | |
c2255bc4 | 950 | label = create_artificial_label (gimple_location (tf->try_finally_expr)); |
6de9cd9a DN |
951 | tf->fallthru_label = label; |
952 | if (tf->outer->tf) | |
726a989a RB |
953 | { |
954 | temp.t = label; | |
955 | record_in_finally_tree (temp, tf->outer->tf->try_finally_expr); | |
956 | } | |
6de9cd9a DN |
957 | } |
958 | return label; | |
959 | } | |
960 | ||
0a35513e AH |
961 | /* A subroutine of lower_try_finally. If FINALLY consits of a |
962 | GIMPLE_EH_ELSE node, return it. */ | |
963 | ||
538dd0b7 | 964 | static inline geh_else * |
0a35513e AH |
965 | get_eh_else (gimple_seq finally) |
966 | { | |
355fe088 | 967 | gimple *x = gimple_seq_first_stmt (finally); |
0a35513e AH |
968 | if (gimple_code (x) == GIMPLE_EH_ELSE) |
969 | { | |
970 | gcc_assert (gimple_seq_singleton_p (finally)); | |
538dd0b7 | 971 | return as_a <geh_else *> (x); |
0a35513e AH |
972 | } |
973 | return NULL; | |
974 | } | |
975 | ||
3b06d379 SB |
976 | /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions |
977 | langhook returns non-null, then the language requires that the exception | |
978 | path out of a try_finally be treated specially. To wit: the code within | |
979 | the finally block may not itself throw an exception. We have two choices | |
980 | here. First we can duplicate the finally block and wrap it in a | |
981 | must_not_throw region. Second, we can generate code like | |
6de9cd9a DN |
982 | |
983 | try { | |
984 | finally_block; | |
985 | } catch { | |
986 | if (fintmp == eh_edge) | |
987 | protect_cleanup_actions; | |
988 | } | |
989 | ||
990 | where "fintmp" is the temporary used in the switch statement generation | |
991 | alternative considered below. For the nonce, we always choose the first | |
19114537 | 992 | option. |
6de9cd9a | 993 | |
3f117656 | 994 | THIS_STATE may be null if this is a try-cleanup, not a try-finally. */ |
6de9cd9a DN |
995 | |
996 | static void | |
997 | honor_protect_cleanup_actions (struct leh_state *outer_state, | |
998 | struct leh_state *this_state, | |
999 | struct leh_tf_state *tf) | |
1000 | { | |
1d65f45c | 1001 | tree protect_cleanup_actions; |
726a989a | 1002 | gimple_stmt_iterator gsi; |
6de9cd9a | 1003 | bool finally_may_fallthru; |
726a989a | 1004 | gimple_seq finally; |
355fe088 | 1005 | gimple *x; |
538dd0b7 DM |
1006 | geh_mnt *eh_mnt; |
1007 | gtry *try_stmt; | |
1008 | geh_else *eh_else; | |
6de9cd9a DN |
1009 | |
1010 | /* First check for nothing to do. */ | |
3b06d379 | 1011 | if (lang_hooks.eh_protect_cleanup_actions == NULL) |
1d65f45c | 1012 | return; |
3b06d379 | 1013 | protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions (); |
1d65f45c RH |
1014 | if (protect_cleanup_actions == NULL) |
1015 | return; | |
6de9cd9a | 1016 | |
726a989a | 1017 | finally = gimple_try_cleanup (tf->top_p); |
0a35513e | 1018 | eh_else = get_eh_else (finally); |
6de9cd9a DN |
1019 | |
1020 | /* Duplicate the FINALLY block. Only need to do this for try-finally, | |
0a35513e AH |
1021 | and not for cleanups. If we've got an EH_ELSE, extract it now. */ |
1022 | if (eh_else) | |
1023 | { | |
1024 | finally = gimple_eh_else_e_body (eh_else); | |
1025 | gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else)); | |
1026 | } | |
1027 | else if (this_state) | |
820055a0 | 1028 | finally = lower_try_finally_dup_block (finally, outer_state, |
5368224f | 1029 | gimple_location (tf->try_finally_expr)); |
0a35513e | 1030 | finally_may_fallthru = gimple_seq_may_fallthru (finally); |
6de9cd9a | 1031 | |
33b45227 JM |
1032 | /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP |
1033 | set, the handler of the TRY_CATCH_EXPR is another cleanup which ought | |
1034 | to be in an enclosing scope, but needs to be implemented at this level | |
1035 | to avoid a nesting violation (see wrap_temporary_cleanups in | |
1036 | cp/decl.c). Since it's logically at an outer level, we should call | |
1037 | terminate before we get to it, so strip it away before adding the | |
1038 | MUST_NOT_THROW filter. */ | |
726a989a RB |
1039 | gsi = gsi_start (finally); |
1040 | x = gsi_stmt (gsi); | |
1d65f45c | 1041 | if (gimple_code (x) == GIMPLE_TRY |
726a989a RB |
1042 | && gimple_try_kind (x) == GIMPLE_TRY_CATCH |
1043 | && gimple_try_catch_is_cleanup (x)) | |
33b45227 | 1044 | { |
726a989a RB |
1045 | gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT); |
1046 | gsi_remove (&gsi, false); | |
33b45227 JM |
1047 | } |
1048 | ||
6de9cd9a | 1049 | /* Wrap the block with protect_cleanup_actions as the action. */ |
538dd0b7 DM |
1050 | eh_mnt = gimple_build_eh_must_not_throw (protect_cleanup_actions); |
1051 | try_stmt = gimple_build_try (finally, gimple_seq_alloc_with_stmt (eh_mnt), | |
1052 | GIMPLE_TRY_CATCH); | |
1053 | finally = lower_eh_must_not_throw (outer_state, try_stmt); | |
1d65f45c RH |
1054 | |
1055 | /* Drop all of this into the exception sequence. */ | |
1056 | emit_post_landing_pad (&eh_seq, tf->region); | |
1057 | gimple_seq_add_seq (&eh_seq, finally); | |
1058 | if (finally_may_fallthru) | |
1059 | emit_resx (&eh_seq, tf->region); | |
6de9cd9a DN |
1060 | |
1061 | /* Having now been handled, EH isn't to be considered with | |
1062 | the rest of the outgoing edges. */ | |
1063 | tf->may_throw = false; | |
1064 | } | |
1065 | ||
1066 | /* A subroutine of lower_try_finally. We have determined that there is | |
1067 | no fallthru edge out of the finally block. This means that there is | |
1068 | no outgoing edge corresponding to any incoming edge. Restructure the | |
1069 | try_finally node for this special case. */ | |
1070 | ||
1071 | static void | |
726a989a RB |
1072 | lower_try_finally_nofallthru (struct leh_state *state, |
1073 | struct leh_tf_state *tf) | |
6de9cd9a | 1074 | { |
8d686507 | 1075 | tree lab; |
355fe088 | 1076 | gimple *x; |
538dd0b7 | 1077 | geh_else *eh_else; |
726a989a | 1078 | gimple_seq finally; |
6de9cd9a DN |
1079 | struct goto_queue_node *q, *qe; |
1080 | ||
1d65f45c | 1081 | lab = create_artificial_label (gimple_location (tf->try_finally_expr)); |
6de9cd9a | 1082 | |
726a989a RB |
1083 | /* We expect that tf->top_p is a GIMPLE_TRY. */ |
1084 | finally = gimple_try_cleanup (tf->top_p); | |
1085 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
6de9cd9a | 1086 | |
726a989a RB |
1087 | x = gimple_build_label (lab); |
1088 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1089 | |
6de9cd9a DN |
1090 | q = tf->goto_queue; |
1091 | qe = q + tf->goto_queue_active; | |
1092 | for (; q < qe; ++q) | |
1093 | if (q->index < 0) | |
8d686507 | 1094 | do_return_redirection (q, lab, NULL); |
6de9cd9a | 1095 | else |
726a989a | 1096 | do_goto_redirection (q, lab, NULL, tf); |
6de9cd9a DN |
1097 | |
1098 | replace_goto_queue (tf); | |
1099 | ||
0a35513e AH |
1100 | /* Emit the finally block into the stream. Lower EH_ELSE at this time. */ |
1101 | eh_else = get_eh_else (finally); | |
1102 | if (eh_else) | |
1103 | { | |
1104 | finally = gimple_eh_else_n_body (eh_else); | |
355a7673 | 1105 | lower_eh_constructs_1 (state, &finally); |
0a35513e | 1106 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
1d65f45c | 1107 | |
0a35513e AH |
1108 | if (tf->may_throw) |
1109 | { | |
1110 | finally = gimple_eh_else_e_body (eh_else); | |
355a7673 | 1111 | lower_eh_constructs_1 (state, &finally); |
0a35513e AH |
1112 | |
1113 | emit_post_landing_pad (&eh_seq, tf->region); | |
1114 | gimple_seq_add_seq (&eh_seq, finally); | |
1115 | } | |
1116 | } | |
1117 | else | |
1d65f45c | 1118 | { |
355a7673 | 1119 | lower_eh_constructs_1 (state, &finally); |
0a35513e | 1120 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
1d65f45c | 1121 | |
0a35513e AH |
1122 | if (tf->may_throw) |
1123 | { | |
1124 | emit_post_landing_pad (&eh_seq, tf->region); | |
1125 | ||
1126 | x = gimple_build_goto (lab); | |
29f5bccb | 1127 | gimple_set_location (x, gimple_location (tf->try_finally_expr)); |
0a35513e AH |
1128 | gimple_seq_add_stmt (&eh_seq, x); |
1129 | } | |
1d65f45c | 1130 | } |
6de9cd9a DN |
1131 | } |
1132 | ||
1133 | /* A subroutine of lower_try_finally. We have determined that there is | |
1134 | exactly one destination of the finally block. Restructure the | |
1135 | try_finally node for this special case. */ | |
1136 | ||
1137 | static void | |
1138 | lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf) | |
1139 | { | |
1140 | struct goto_queue_node *q, *qe; | |
538dd0b7 DM |
1141 | geh_else *eh_else; |
1142 | glabel *label_stmt; | |
355fe088 | 1143 | gimple *x; |
726a989a | 1144 | gimple_seq finally; |
e368f44f | 1145 | gimple_stmt_iterator gsi; |
726a989a | 1146 | tree finally_label; |
c2255bc4 | 1147 | location_t loc = gimple_location (tf->try_finally_expr); |
6de9cd9a | 1148 | |
726a989a RB |
1149 | finally = gimple_try_cleanup (tf->top_p); |
1150 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
6de9cd9a | 1151 | |
0a35513e AH |
1152 | /* Since there's only one destination, and the destination edge can only |
1153 | either be EH or non-EH, that implies that all of our incoming edges | |
1154 | are of the same type. Therefore we can lower EH_ELSE immediately. */ | |
538dd0b7 DM |
1155 | eh_else = get_eh_else (finally); |
1156 | if (eh_else) | |
0a35513e AH |
1157 | { |
1158 | if (tf->may_throw) | |
538dd0b7 | 1159 | finally = gimple_eh_else_e_body (eh_else); |
0a35513e | 1160 | else |
538dd0b7 | 1161 | finally = gimple_eh_else_n_body (eh_else); |
0a35513e AH |
1162 | } |
1163 | ||
355a7673 | 1164 | lower_eh_constructs_1 (state, &finally); |
6de9cd9a | 1165 | |
e368f44f DC |
1166 | for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi)) |
1167 | { | |
355fe088 | 1168 | gimple *stmt = gsi_stmt (gsi); |
e368f44f DC |
1169 | if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION) |
1170 | { | |
1171 | tree block = gimple_block (stmt); | |
1172 | gimple_set_location (stmt, gimple_location (tf->try_finally_expr)); | |
1173 | gimple_set_block (stmt, block); | |
1174 | } | |
1175 | } | |
1176 | ||
6de9cd9a DN |
1177 | if (tf->may_throw) |
1178 | { | |
1179 | /* Only reachable via the exception edge. Add the given label to | |
1180 | the head of the FINALLY block. Append a RESX at the end. */ | |
1d65f45c RH |
1181 | emit_post_landing_pad (&eh_seq, tf->region); |
1182 | gimple_seq_add_seq (&eh_seq, finally); | |
1183 | emit_resx (&eh_seq, tf->region); | |
6de9cd9a DN |
1184 | return; |
1185 | } | |
1186 | ||
1187 | if (tf->may_fallthru) | |
1188 | { | |
1189 | /* Only reachable via the fallthru edge. Do nothing but let | |
1190 | the two blocks run together; we'll fall out the bottom. */ | |
726a989a | 1191 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1192 | return; |
1193 | } | |
1194 | ||
c2255bc4 | 1195 | finally_label = create_artificial_label (loc); |
538dd0b7 DM |
1196 | label_stmt = gimple_build_label (finally_label); |
1197 | gimple_seq_add_stmt (&tf->top_p_seq, label_stmt); | |
6de9cd9a | 1198 | |
726a989a | 1199 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1200 | |
1201 | q = tf->goto_queue; | |
1202 | qe = q + tf->goto_queue_active; | |
1203 | ||
1204 | if (tf->may_return) | |
1205 | { | |
1206 | /* Reachable by return expressions only. Redirect them. */ | |
6de9cd9a | 1207 | for (; q < qe; ++q) |
8d686507 | 1208 | do_return_redirection (q, finally_label, NULL); |
6de9cd9a DN |
1209 | replace_goto_queue (tf); |
1210 | } | |
1211 | else | |
1212 | { | |
1213 | /* Reachable by goto expressions only. Redirect them. */ | |
1214 | for (; q < qe; ++q) | |
726a989a | 1215 | do_goto_redirection (q, finally_label, NULL, tf); |
6de9cd9a | 1216 | replace_goto_queue (tf); |
19114537 | 1217 | |
9771b263 | 1218 | if (tf->dest_array[0] == tf->fallthru_label) |
6de9cd9a DN |
1219 | { |
1220 | /* Reachable by goto to fallthru label only. Redirect it | |
1221 | to the new label (already created, sadly), and do not | |
1222 | emit the final branch out, or the fallthru label. */ | |
1223 | tf->fallthru_label = NULL; | |
1224 | return; | |
1225 | } | |
1226 | } | |
1227 | ||
726a989a RB |
1228 | /* Place the original return/goto to the original destination |
1229 | immediately after the finally block. */ | |
1230 | x = tf->goto_queue[0].cont_stmt; | |
1231 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
1232 | maybe_record_in_goto_queue (state, x); | |
6de9cd9a DN |
1233 | } |
1234 | ||
1235 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1236 | and outgoing from the finally block. Implement this by duplicating the | |
1237 | finally block for every destination. */ | |
1238 | ||
1239 | static void | |
1240 | lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf) | |
1241 | { | |
726a989a RB |
1242 | gimple_seq finally; |
1243 | gimple_seq new_stmt; | |
1244 | gimple_seq seq; | |
355fe088 | 1245 | gimple *x; |
538dd0b7 | 1246 | geh_else *eh_else; |
726a989a | 1247 | tree tmp; |
c2255bc4 | 1248 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
6de9cd9a | 1249 | |
726a989a | 1250 | finally = gimple_try_cleanup (tf->top_p); |
0a35513e AH |
1251 | |
1252 | /* Notice EH_ELSE, and simplify some of the remaining code | |
1253 | by considering FINALLY to be the normal return path only. */ | |
1254 | eh_else = get_eh_else (finally); | |
1255 | if (eh_else) | |
1256 | finally = gimple_eh_else_n_body (eh_else); | |
1257 | ||
726a989a RB |
1258 | tf->top_p_seq = gimple_try_eval (tf->top_p); |
1259 | new_stmt = NULL; | |
6de9cd9a DN |
1260 | |
1261 | if (tf->may_fallthru) | |
1262 | { | |
820055a0 | 1263 | seq = lower_try_finally_dup_block (finally, state, tf_loc); |
355a7673 | 1264 | lower_eh_constructs_1 (state, &seq); |
726a989a | 1265 | gimple_seq_add_seq (&new_stmt, seq); |
6de9cd9a | 1266 | |
726a989a RB |
1267 | tmp = lower_try_finally_fallthru_label (tf); |
1268 | x = gimple_build_goto (tmp); | |
29f5bccb | 1269 | gimple_set_location (x, tf_loc); |
726a989a | 1270 | gimple_seq_add_stmt (&new_stmt, x); |
6de9cd9a DN |
1271 | } |
1272 | ||
1273 | if (tf->may_throw) | |
1274 | { | |
0a35513e AH |
1275 | /* We don't need to copy the EH path of EH_ELSE, |
1276 | since it is only emitted once. */ | |
1277 | if (eh_else) | |
1278 | seq = gimple_eh_else_e_body (eh_else); | |
1279 | else | |
820055a0 | 1280 | seq = lower_try_finally_dup_block (finally, state, tf_loc); |
355a7673 | 1281 | lower_eh_constructs_1 (state, &seq); |
6de9cd9a | 1282 | |
288f5b2e RH |
1283 | emit_post_landing_pad (&eh_seq, tf->region); |
1284 | gimple_seq_add_seq (&eh_seq, seq); | |
1d65f45c | 1285 | emit_resx (&eh_seq, tf->region); |
6de9cd9a DN |
1286 | } |
1287 | ||
1288 | if (tf->goto_queue) | |
1289 | { | |
1290 | struct goto_queue_node *q, *qe; | |
dd58eb5a | 1291 | int return_index, index; |
858904db | 1292 | struct labels_s |
dd58eb5a AO |
1293 | { |
1294 | struct goto_queue_node *q; | |
1295 | tree label; | |
1296 | } *labels; | |
6de9cd9a | 1297 | |
9771b263 | 1298 | return_index = tf->dest_array.length (); |
858904db | 1299 | labels = XCNEWVEC (struct labels_s, return_index + 1); |
6de9cd9a DN |
1300 | |
1301 | q = tf->goto_queue; | |
1302 | qe = q + tf->goto_queue_active; | |
1303 | for (; q < qe; q++) | |
1304 | { | |
dd58eb5a AO |
1305 | index = q->index < 0 ? return_index : q->index; |
1306 | ||
1307 | if (!labels[index].q) | |
1308 | labels[index].q = q; | |
1309 | } | |
1310 | ||
1311 | for (index = 0; index < return_index + 1; index++) | |
1312 | { | |
1313 | tree lab; | |
1314 | ||
1315 | q = labels[index].q; | |
1316 | if (! q) | |
1317 | continue; | |
1318 | ||
c2255bc4 AH |
1319 | lab = labels[index].label |
1320 | = create_artificial_label (tf_loc); | |
6de9cd9a DN |
1321 | |
1322 | if (index == return_index) | |
8d686507 | 1323 | do_return_redirection (q, lab, NULL); |
6de9cd9a | 1324 | else |
726a989a | 1325 | do_goto_redirection (q, lab, NULL, tf); |
6de9cd9a | 1326 | |
726a989a RB |
1327 | x = gimple_build_label (lab); |
1328 | gimple_seq_add_stmt (&new_stmt, x); | |
6de9cd9a | 1329 | |
820055a0 | 1330 | seq = lower_try_finally_dup_block (finally, state, q->location); |
355a7673 | 1331 | lower_eh_constructs_1 (state, &seq); |
726a989a | 1332 | gimple_seq_add_seq (&new_stmt, seq); |
6de9cd9a | 1333 | |
726a989a | 1334 | gimple_seq_add_stmt (&new_stmt, q->cont_stmt); |
dd58eb5a | 1335 | maybe_record_in_goto_queue (state, q->cont_stmt); |
6de9cd9a | 1336 | } |
dd58eb5a AO |
1337 | |
1338 | for (q = tf->goto_queue; q < qe; q++) | |
1339 | { | |
1340 | tree lab; | |
1341 | ||
1342 | index = q->index < 0 ? return_index : q->index; | |
1343 | ||
1344 | if (labels[index].q == q) | |
1345 | continue; | |
1346 | ||
1347 | lab = labels[index].label; | |
1348 | ||
1349 | if (index == return_index) | |
8d686507 | 1350 | do_return_redirection (q, lab, NULL); |
dd58eb5a | 1351 | else |
726a989a | 1352 | do_goto_redirection (q, lab, NULL, tf); |
dd58eb5a | 1353 | } |
1d65f45c | 1354 | |
6de9cd9a DN |
1355 | replace_goto_queue (tf); |
1356 | free (labels); | |
1357 | } | |
1358 | ||
1359 | /* Need to link new stmts after running replace_goto_queue due | |
1360 | to not wanting to process the same goto stmts twice. */ | |
726a989a | 1361 | gimple_seq_add_seq (&tf->top_p_seq, new_stmt); |
6de9cd9a DN |
1362 | } |
1363 | ||
1364 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1365 | and outgoing from the finally block. Implement this by instrumenting | |
1366 | each incoming edge and creating a switch statement at the end of the | |
1367 | finally block that branches to the appropriate destination. */ | |
1368 | ||
1369 | static void | |
1370 | lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf) | |
1371 | { | |
1372 | struct goto_queue_node *q, *qe; | |
726a989a | 1373 | tree finally_tmp, finally_label; |
6de9cd9a DN |
1374 | int return_index, eh_index, fallthru_index; |
1375 | int nlabels, ndests, j, last_case_index; | |
726a989a | 1376 | tree last_case; |
9771b263 | 1377 | vec<tree> case_label_vec; |
355a7673 | 1378 | gimple_seq switch_body = NULL; |
355fe088 | 1379 | gimple *x; |
538dd0b7 | 1380 | geh_else *eh_else; |
726a989a | 1381 | tree tmp; |
355fe088 | 1382 | gimple *switch_stmt; |
726a989a | 1383 | gimple_seq finally; |
355fe088 | 1384 | hash_map<tree, gimple *> *cont_map = NULL; |
c2255bc4 | 1385 | /* The location of the TRY_FINALLY stmt. */ |
d40eb158 | 1386 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
c2255bc4 AH |
1387 | /* The location of the finally block. */ |
1388 | location_t finally_loc; | |
726a989a | 1389 | |
0a35513e AH |
1390 | finally = gimple_try_cleanup (tf->top_p); |
1391 | eh_else = get_eh_else (finally); | |
6de9cd9a DN |
1392 | |
1393 | /* Mash the TRY block to the head of the chain. */ | |
726a989a | 1394 | tf->top_p_seq = gimple_try_eval (tf->top_p); |
6de9cd9a | 1395 | |
c2255bc4 AH |
1396 | /* The location of the finally is either the last stmt in the finally |
1397 | block or the location of the TRY_FINALLY itself. */ | |
0118b919 EB |
1398 | x = gimple_seq_last_stmt (finally); |
1399 | finally_loc = x ? gimple_location (x) : tf_loc; | |
c2255bc4 | 1400 | |
6de9cd9a | 1401 | /* Prepare for switch statement generation. */ |
9771b263 | 1402 | nlabels = tf->dest_array.length (); |
6de9cd9a DN |
1403 | return_index = nlabels; |
1404 | eh_index = return_index + tf->may_return; | |
0a35513e | 1405 | fallthru_index = eh_index + (tf->may_throw && !eh_else); |
6de9cd9a DN |
1406 | ndests = fallthru_index + tf->may_fallthru; |
1407 | ||
1408 | finally_tmp = create_tmp_var (integer_type_node, "finally_tmp"); | |
c2255bc4 | 1409 | finally_label = create_artificial_label (finally_loc); |
6de9cd9a | 1410 | |
9771b263 | 1411 | /* We use vec::quick_push on case_label_vec throughout this function, |
726a989a RB |
1412 | since we know the size in advance and allocate precisely as muce |
1413 | space as needed. */ | |
9771b263 | 1414 | case_label_vec.create (ndests); |
6de9cd9a DN |
1415 | last_case = NULL; |
1416 | last_case_index = 0; | |
1417 | ||
1418 | /* Begin inserting code for getting to the finally block. Things | |
1419 | are done in this order to correspond to the sequence the code is | |
073a8998 | 1420 | laid out. */ |
6de9cd9a DN |
1421 | |
1422 | if (tf->may_fallthru) | |
1423 | { | |
1d65f45c | 1424 | x = gimple_build_assign (finally_tmp, |
413581ba RG |
1425 | build_int_cst (integer_type_node, |
1426 | fallthru_index)); | |
726a989a | 1427 | gimple_seq_add_stmt (&tf->top_p_seq, x); |
6de9cd9a | 1428 | |
3d528853 NF |
1429 | tmp = build_int_cst (integer_type_node, fallthru_index); |
1430 | last_case = build_case_label (tmp, NULL, | |
1431 | create_artificial_label (tf_loc)); | |
9771b263 | 1432 | case_label_vec.quick_push (last_case); |
6de9cd9a DN |
1433 | last_case_index++; |
1434 | ||
726a989a RB |
1435 | x = gimple_build_label (CASE_LABEL (last_case)); |
1436 | gimple_seq_add_stmt (&switch_body, x); | |
6de9cd9a | 1437 | |
726a989a RB |
1438 | tmp = lower_try_finally_fallthru_label (tf); |
1439 | x = gimple_build_goto (tmp); | |
29f5bccb | 1440 | gimple_set_location (x, tf_loc); |
726a989a | 1441 | gimple_seq_add_stmt (&switch_body, x); |
6de9cd9a DN |
1442 | } |
1443 | ||
0a35513e AH |
1444 | /* For EH_ELSE, emit the exception path (plus resx) now, then |
1445 | subsequently we only need consider the normal path. */ | |
1446 | if (eh_else) | |
1447 | { | |
1448 | if (tf->may_throw) | |
1449 | { | |
1450 | finally = gimple_eh_else_e_body (eh_else); | |
355a7673 | 1451 | lower_eh_constructs_1 (state, &finally); |
0a35513e AH |
1452 | |
1453 | emit_post_landing_pad (&eh_seq, tf->region); | |
1454 | gimple_seq_add_seq (&eh_seq, finally); | |
1455 | emit_resx (&eh_seq, tf->region); | |
1456 | } | |
1457 | ||
1458 | finally = gimple_eh_else_n_body (eh_else); | |
1459 | } | |
1460 | else if (tf->may_throw) | |
6de9cd9a | 1461 | { |
1d65f45c | 1462 | emit_post_landing_pad (&eh_seq, tf->region); |
6de9cd9a | 1463 | |
1d65f45c | 1464 | x = gimple_build_assign (finally_tmp, |
413581ba | 1465 | build_int_cst (integer_type_node, eh_index)); |
1d65f45c RH |
1466 | gimple_seq_add_stmt (&eh_seq, x); |
1467 | ||
1468 | x = gimple_build_goto (finally_label); | |
29f5bccb | 1469 | gimple_set_location (x, tf_loc); |
1d65f45c | 1470 | gimple_seq_add_stmt (&eh_seq, x); |
6de9cd9a | 1471 | |
3d528853 NF |
1472 | tmp = build_int_cst (integer_type_node, eh_index); |
1473 | last_case = build_case_label (tmp, NULL, | |
1474 | create_artificial_label (tf_loc)); | |
9771b263 | 1475 | case_label_vec.quick_push (last_case); |
6de9cd9a DN |
1476 | last_case_index++; |
1477 | ||
726a989a | 1478 | x = gimple_build_label (CASE_LABEL (last_case)); |
1d65f45c RH |
1479 | gimple_seq_add_stmt (&eh_seq, x); |
1480 | emit_resx (&eh_seq, tf->region); | |
6de9cd9a DN |
1481 | } |
1482 | ||
726a989a RB |
1483 | x = gimple_build_label (finally_label); |
1484 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
6de9cd9a | 1485 | |
efa7882f | 1486 | lower_eh_constructs_1 (state, &finally); |
726a989a | 1487 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
6de9cd9a DN |
1488 | |
1489 | /* Redirect each incoming goto edge. */ | |
1490 | q = tf->goto_queue; | |
1491 | qe = q + tf->goto_queue_active; | |
1492 | j = last_case_index + tf->may_return; | |
726a989a RB |
1493 | /* Prepare the assignments to finally_tmp that are executed upon the |
1494 | entrance through a particular edge. */ | |
6de9cd9a DN |
1495 | for (; q < qe; ++q) |
1496 | { | |
355a7673 | 1497 | gimple_seq mod = NULL; |
726a989a RB |
1498 | int switch_id; |
1499 | unsigned int case_index; | |
1500 | ||
6de9cd9a DN |
1501 | if (q->index < 0) |
1502 | { | |
726a989a | 1503 | x = gimple_build_assign (finally_tmp, |
413581ba RG |
1504 | build_int_cst (integer_type_node, |
1505 | return_index)); | |
726a989a | 1506 | gimple_seq_add_stmt (&mod, x); |
8d686507 | 1507 | do_return_redirection (q, finally_label, mod); |
6de9cd9a DN |
1508 | switch_id = return_index; |
1509 | } | |
1510 | else | |
1511 | { | |
726a989a | 1512 | x = gimple_build_assign (finally_tmp, |
413581ba | 1513 | build_int_cst (integer_type_node, q->index)); |
726a989a RB |
1514 | gimple_seq_add_stmt (&mod, x); |
1515 | do_goto_redirection (q, finally_label, mod, tf); | |
6de9cd9a DN |
1516 | switch_id = q->index; |
1517 | } | |
1518 | ||
1519 | case_index = j + q->index; | |
9771b263 | 1520 | if (case_label_vec.length () <= case_index || !case_label_vec[case_index]) |
726a989a RB |
1521 | { |
1522 | tree case_lab; | |
3d528853 NF |
1523 | tmp = build_int_cst (integer_type_node, switch_id); |
1524 | case_lab = build_case_label (tmp, NULL, | |
1525 | create_artificial_label (tf_loc)); | |
726a989a | 1526 | /* We store the cont_stmt in the pointer map, so that we can recover |
ffa03772 | 1527 | it in the loop below. */ |
726a989a | 1528 | if (!cont_map) |
355fe088 | 1529 | cont_map = new hash_map<tree, gimple *>; |
b787e7a2 | 1530 | cont_map->put (case_lab, q->cont_stmt); |
9771b263 | 1531 | case_label_vec.quick_push (case_lab); |
726a989a | 1532 | } |
dd58eb5a AO |
1533 | } |
1534 | for (j = last_case_index; j < last_case_index + nlabels; j++) | |
1535 | { | |
355fe088 | 1536 | gimple *cont_stmt; |
dd58eb5a | 1537 | |
9771b263 | 1538 | last_case = case_label_vec[j]; |
dd58eb5a AO |
1539 | |
1540 | gcc_assert (last_case); | |
726a989a | 1541 | gcc_assert (cont_map); |
dd58eb5a | 1542 | |
b787e7a2 | 1543 | cont_stmt = *cont_map->get (last_case); |
dd58eb5a | 1544 | |
ffa03772 | 1545 | x = gimple_build_label (CASE_LABEL (last_case)); |
726a989a RB |
1546 | gimple_seq_add_stmt (&switch_body, x); |
1547 | gimple_seq_add_stmt (&switch_body, cont_stmt); | |
dd58eb5a | 1548 | maybe_record_in_goto_queue (state, cont_stmt); |
6de9cd9a | 1549 | } |
726a989a | 1550 | if (cont_map) |
b787e7a2 | 1551 | delete cont_map; |
726a989a | 1552 | |
6de9cd9a | 1553 | replace_goto_queue (tf); |
6de9cd9a | 1554 | |
0f1f6967 SB |
1555 | /* Make sure that the last case is the default label, as one is required. |
1556 | Then sort the labels, which is also required in GIMPLE. */ | |
6de9cd9a | 1557 | CASE_LOW (last_case) = NULL; |
e9ff9caf RB |
1558 | tree tem = case_label_vec.pop (); |
1559 | gcc_assert (tem == last_case); | |
0f1f6967 | 1560 | sort_case_labels (case_label_vec); |
6de9cd9a | 1561 | |
726a989a RB |
1562 | /* Build the switch statement, setting last_case to be the default |
1563 | label. */ | |
fd8d363e SB |
1564 | switch_stmt = gimple_build_switch (finally_tmp, last_case, |
1565 | case_label_vec); | |
c2255bc4 | 1566 | gimple_set_location (switch_stmt, finally_loc); |
726a989a RB |
1567 | |
1568 | /* Need to link SWITCH_STMT after running replace_goto_queue | |
1569 | due to not wanting to process the same goto stmts twice. */ | |
1570 | gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt); | |
1571 | gimple_seq_add_seq (&tf->top_p_seq, switch_body); | |
6de9cd9a DN |
1572 | } |
1573 | ||
1574 | /* Decide whether or not we are going to duplicate the finally block. | |
1575 | There are several considerations. | |
1576 | ||
1577 | First, if this is Java, then the finally block contains code | |
1578 | written by the user. It has line numbers associated with it, | |
1579 | so duplicating the block means it's difficult to set a breakpoint. | |
1580 | Since controlling code generation via -g is verboten, we simply | |
1581 | never duplicate code without optimization. | |
1582 | ||
1583 | Second, we'd like to prevent egregious code growth. One way to | |
1584 | do this is to estimate the size of the finally block, multiply | |
1585 | that by the number of copies we'd need to make, and compare against | |
1586 | the estimate of the size of the switch machinery we'd have to add. */ | |
1587 | ||
1588 | static bool | |
0a35513e | 1589 | decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally) |
6de9cd9a DN |
1590 | { |
1591 | int f_estimate, sw_estimate; | |
538dd0b7 | 1592 | geh_else *eh_else; |
0a35513e AH |
1593 | |
1594 | /* If there's an EH_ELSE involved, the exception path is separate | |
1595 | and really doesn't come into play for this computation. */ | |
1596 | eh_else = get_eh_else (finally); | |
1597 | if (eh_else) | |
1598 | { | |
1599 | ndests -= may_throw; | |
1600 | finally = gimple_eh_else_n_body (eh_else); | |
1601 | } | |
6de9cd9a DN |
1602 | |
1603 | if (!optimize) | |
bccc50d4 JJ |
1604 | { |
1605 | gimple_stmt_iterator gsi; | |
1606 | ||
1607 | if (ndests == 1) | |
1608 | return true; | |
1609 | ||
1610 | for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1611 | { | |
cc6fbd80 EB |
1612 | /* Duplicate __builtin_stack_restore in the hope of eliminating it |
1613 | on the EH paths and, consequently, useless cleanups. */ | |
355fe088 | 1614 | gimple *stmt = gsi_stmt (gsi); |
cc6fbd80 EB |
1615 | if (!is_gimple_debug (stmt) |
1616 | && !gimple_clobber_p (stmt) | |
1617 | && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
bccc50d4 JJ |
1618 | return false; |
1619 | } | |
1620 | return true; | |
1621 | } | |
6de9cd9a DN |
1622 | |
1623 | /* Finally estimate N times, plus N gotos. */ | |
fcb901cd | 1624 | f_estimate = estimate_num_insns_seq (finally, &eni_size_weights); |
6de9cd9a DN |
1625 | f_estimate = (f_estimate + 1) * ndests; |
1626 | ||
1627 | /* Switch statement (cost 10), N variable assignments, N gotos. */ | |
1628 | sw_estimate = 10 + 2 * ndests; | |
1629 | ||
1630 | /* Optimize for size clearly wants our best guess. */ | |
efd8f750 | 1631 | if (optimize_function_for_size_p (cfun)) |
6de9cd9a DN |
1632 | return f_estimate < sw_estimate; |
1633 | ||
1634 | /* ??? These numbers are completely made up so far. */ | |
1635 | if (optimize > 1) | |
7465ed07 | 1636 | return f_estimate < 100 || f_estimate < sw_estimate * 2; |
6de9cd9a | 1637 | else |
7465ed07 | 1638 | return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3; |
6de9cd9a DN |
1639 | } |
1640 | ||
d3f28910 JM |
1641 | /* REG is the enclosing region for a possible cleanup region, or the region |
1642 | itself. Returns TRUE if such a region would be unreachable. | |
1643 | ||
1644 | Cleanup regions within a must-not-throw region aren't actually reachable | |
1645 | even if there are throwing stmts within them, because the personality | |
1646 | routine will call terminate before unwinding. */ | |
1647 | ||
1648 | static bool | |
1649 | cleanup_is_dead_in (eh_region reg) | |
1650 | { | |
1651 | while (reg && reg->type == ERT_CLEANUP) | |
1652 | reg = reg->outer; | |
1653 | return (reg && reg->type == ERT_MUST_NOT_THROW); | |
1654 | } | |
726a989a RB |
1655 | |
1656 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes | |
6de9cd9a | 1657 | to a sequence of labels and blocks, plus the exception region trees |
19114537 | 1658 | that record all the magic. This is complicated by the need to |
6de9cd9a DN |
1659 | arrange for the FINALLY block to be executed on all exits. */ |
1660 | ||
726a989a | 1661 | static gimple_seq |
538dd0b7 | 1662 | lower_try_finally (struct leh_state *state, gtry *tp) |
6de9cd9a DN |
1663 | { |
1664 | struct leh_tf_state this_tf; | |
1665 | struct leh_state this_state; | |
1666 | int ndests; | |
e19d1f06 | 1667 | gimple_seq old_eh_seq; |
6de9cd9a DN |
1668 | |
1669 | /* Process the try block. */ | |
1670 | ||
1671 | memset (&this_tf, 0, sizeof (this_tf)); | |
726a989a | 1672 | this_tf.try_finally_expr = tp; |
6de9cd9a DN |
1673 | this_tf.top_p = tp; |
1674 | this_tf.outer = state; | |
481d1b81 | 1675 | if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region)) |
d3f28910 JM |
1676 | { |
1677 | this_tf.region = gen_eh_region_cleanup (state->cur_region); | |
1678 | this_state.cur_region = this_tf.region; | |
1679 | } | |
6de9cd9a | 1680 | else |
d3f28910 JM |
1681 | { |
1682 | this_tf.region = NULL; | |
1683 | this_state.cur_region = state->cur_region; | |
1684 | } | |
6de9cd9a | 1685 | |
1d65f45c | 1686 | this_state.ehp_region = state->ehp_region; |
6de9cd9a DN |
1687 | this_state.tf = &this_tf; |
1688 | ||
e19d1f06 RH |
1689 | old_eh_seq = eh_seq; |
1690 | eh_seq = NULL; | |
1691 | ||
355a7673 | 1692 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a DN |
1693 | |
1694 | /* Determine if the try block is escaped through the bottom. */ | |
726a989a | 1695 | this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
6de9cd9a DN |
1696 | |
1697 | /* Determine if any exceptions are possible within the try block. */ | |
d3f28910 | 1698 | if (this_tf.region) |
b7da9fd4 | 1699 | this_tf.may_throw = eh_region_may_contain_throw (this_tf.region); |
6de9cd9a | 1700 | if (this_tf.may_throw) |
1d65f45c | 1701 | honor_protect_cleanup_actions (state, &this_state, &this_tf); |
6de9cd9a | 1702 | |
6de9cd9a DN |
1703 | /* Determine how many edges (still) reach the finally block. Or rather, |
1704 | how many destinations are reached by the finally block. Use this to | |
1705 | determine how we process the finally block itself. */ | |
1706 | ||
9771b263 | 1707 | ndests = this_tf.dest_array.length (); |
6de9cd9a DN |
1708 | ndests += this_tf.may_fallthru; |
1709 | ndests += this_tf.may_return; | |
1710 | ndests += this_tf.may_throw; | |
1711 | ||
1712 | /* If the FINALLY block is not reachable, dike it out. */ | |
1713 | if (ndests == 0) | |
726a989a RB |
1714 | { |
1715 | gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp)); | |
1716 | gimple_try_set_cleanup (tp, NULL); | |
1717 | } | |
6de9cd9a DN |
1718 | /* If the finally block doesn't fall through, then any destination |
1719 | we might try to impose there isn't reached either. There may be | |
1720 | some minor amount of cleanup and redirection still needed. */ | |
726a989a | 1721 | else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp))) |
6de9cd9a DN |
1722 | lower_try_finally_nofallthru (state, &this_tf); |
1723 | ||
1724 | /* We can easily special-case redirection to a single destination. */ | |
1725 | else if (ndests == 1) | |
1726 | lower_try_finally_onedest (state, &this_tf); | |
0a35513e AH |
1727 | else if (decide_copy_try_finally (ndests, this_tf.may_throw, |
1728 | gimple_try_cleanup (tp))) | |
6de9cd9a DN |
1729 | lower_try_finally_copy (state, &this_tf); |
1730 | else | |
1731 | lower_try_finally_switch (state, &this_tf); | |
1732 | ||
1733 | /* If someone requested we add a label at the end of the transformed | |
1734 | block, do so. */ | |
1735 | if (this_tf.fallthru_label) | |
1736 | { | |
726a989a | 1737 | /* This must be reached only if ndests == 0. */ |
355fe088 | 1738 | gimple *x = gimple_build_label (this_tf.fallthru_label); |
726a989a | 1739 | gimple_seq_add_stmt (&this_tf.top_p_seq, x); |
6de9cd9a DN |
1740 | } |
1741 | ||
9771b263 | 1742 | this_tf.dest_array.release (); |
04695783 | 1743 | free (this_tf.goto_queue); |
0f547d3d | 1744 | if (this_tf.goto_queue_map) |
b787e7a2 | 1745 | delete this_tf.goto_queue_map; |
726a989a | 1746 | |
e19d1f06 RH |
1747 | /* If there was an old (aka outer) eh_seq, append the current eh_seq. |
1748 | If there was no old eh_seq, then the append is trivially already done. */ | |
1749 | if (old_eh_seq) | |
1750 | { | |
1751 | if (eh_seq == NULL) | |
1752 | eh_seq = old_eh_seq; | |
1753 | else | |
1754 | { | |
1755 | gimple_seq new_eh_seq = eh_seq; | |
1756 | eh_seq = old_eh_seq; | |
c3284718 | 1757 | gimple_seq_add_seq (&eh_seq, new_eh_seq); |
e19d1f06 RH |
1758 | } |
1759 | } | |
1760 | ||
726a989a | 1761 | return this_tf.top_p_seq; |
6de9cd9a DN |
1762 | } |
1763 | ||
726a989a RB |
1764 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a |
1765 | list of GIMPLE_CATCH to a sequence of labels and blocks, plus the | |
1766 | exception region trees that records all the magic. */ | |
6de9cd9a | 1767 | |
726a989a | 1768 | static gimple_seq |
538dd0b7 | 1769 | lower_catch (struct leh_state *state, gtry *tp) |
6de9cd9a | 1770 | { |
b7da9fd4 RH |
1771 | eh_region try_region = NULL; |
1772 | struct leh_state this_state = *state; | |
726a989a | 1773 | gimple_stmt_iterator gsi; |
6de9cd9a | 1774 | tree out_label; |
355a7673 | 1775 | gimple_seq new_seq, cleanup; |
355fe088 | 1776 | gimple *x; |
c2255bc4 | 1777 | location_t try_catch_loc = gimple_location (tp); |
6de9cd9a | 1778 | |
b7da9fd4 RH |
1779 | if (flag_exceptions) |
1780 | { | |
1781 | try_region = gen_eh_region_try (state->cur_region); | |
1782 | this_state.cur_region = try_region; | |
1783 | } | |
6de9cd9a | 1784 | |
355a7673 | 1785 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1786 | |
b7da9fd4 | 1787 | if (!eh_region_may_contain_throw (try_region)) |
1d65f45c RH |
1788 | return gimple_try_eval (tp); |
1789 | ||
1790 | new_seq = NULL; | |
1791 | emit_eh_dispatch (&new_seq, try_region); | |
1792 | emit_resx (&new_seq, try_region); | |
1793 | ||
1794 | this_state.cur_region = state->cur_region; | |
1795 | this_state.ehp_region = try_region; | |
6de9cd9a | 1796 | |
3b445b24 JJ |
1797 | /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup |
1798 | itself, so that e.g. for coverage purposes the nested cleanups don't | |
1799 | appear before the cleanup body. See PR64634 for details. */ | |
1800 | gimple_seq old_eh_seq = eh_seq; | |
1801 | eh_seq = NULL; | |
1802 | ||
6de9cd9a | 1803 | out_label = NULL; |
355a7673 MM |
1804 | cleanup = gimple_try_cleanup (tp); |
1805 | for (gsi = gsi_start (cleanup); | |
1d65f45c RH |
1806 | !gsi_end_p (gsi); |
1807 | gsi_next (&gsi)) | |
6de9cd9a | 1808 | { |
1d65f45c | 1809 | eh_catch c; |
538dd0b7 | 1810 | gcatch *catch_stmt; |
1d65f45c | 1811 | gimple_seq handler; |
6de9cd9a | 1812 | |
538dd0b7 DM |
1813 | catch_stmt = as_a <gcatch *> (gsi_stmt (gsi)); |
1814 | c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt)); | |
6de9cd9a | 1815 | |
538dd0b7 | 1816 | handler = gimple_catch_handler (catch_stmt); |
355a7673 | 1817 | lower_eh_constructs_1 (&this_state, &handler); |
6de9cd9a | 1818 | |
1d65f45c RH |
1819 | c->label = create_artificial_label (UNKNOWN_LOCATION); |
1820 | x = gimple_build_label (c->label); | |
1821 | gimple_seq_add_stmt (&new_seq, x); | |
6de9cd9a | 1822 | |
1d65f45c | 1823 | gimple_seq_add_seq (&new_seq, handler); |
6de9cd9a | 1824 | |
1d65f45c | 1825 | if (gimple_seq_may_fallthru (new_seq)) |
6de9cd9a DN |
1826 | { |
1827 | if (!out_label) | |
c2255bc4 | 1828 | out_label = create_artificial_label (try_catch_loc); |
6de9cd9a | 1829 | |
726a989a | 1830 | x = gimple_build_goto (out_label); |
1d65f45c | 1831 | gimple_seq_add_stmt (&new_seq, x); |
6de9cd9a | 1832 | } |
d815d34e MM |
1833 | if (!c->type_list) |
1834 | break; | |
6de9cd9a DN |
1835 | } |
1836 | ||
1d65f45c RH |
1837 | gimple_try_set_cleanup (tp, new_seq); |
1838 | ||
3b445b24 JJ |
1839 | gimple_seq new_eh_seq = eh_seq; |
1840 | eh_seq = old_eh_seq; | |
1841 | gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label); | |
1842 | gimple_seq_add_seq (&eh_seq, new_eh_seq); | |
1843 | return ret_seq; | |
6de9cd9a DN |
1844 | } |
1845 | ||
726a989a RB |
1846 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a |
1847 | GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception | |
6de9cd9a DN |
1848 | region trees that record all the magic. */ |
1849 | ||
726a989a | 1850 | static gimple_seq |
538dd0b7 | 1851 | lower_eh_filter (struct leh_state *state, gtry *tp) |
6de9cd9a | 1852 | { |
b7da9fd4 RH |
1853 | struct leh_state this_state = *state; |
1854 | eh_region this_region = NULL; | |
355fe088 | 1855 | gimple *inner, *x; |
1d65f45c | 1856 | gimple_seq new_seq; |
19114537 | 1857 | |
726a989a RB |
1858 | inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
1859 | ||
b7da9fd4 RH |
1860 | if (flag_exceptions) |
1861 | { | |
1862 | this_region = gen_eh_region_allowed (state->cur_region, | |
1863 | gimple_eh_filter_types (inner)); | |
1864 | this_state.cur_region = this_region; | |
1865 | } | |
19114537 | 1866 | |
355a7673 | 1867 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1868 | |
b7da9fd4 | 1869 | if (!eh_region_may_contain_throw (this_region)) |
1d65f45c RH |
1870 | return gimple_try_eval (tp); |
1871 | ||
1872 | new_seq = NULL; | |
1873 | this_state.cur_region = state->cur_region; | |
1874 | this_state.ehp_region = this_region; | |
1875 | ||
1876 | emit_eh_dispatch (&new_seq, this_region); | |
1877 | emit_resx (&new_seq, this_region); | |
1878 | ||
1879 | this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION); | |
1880 | x = gimple_build_label (this_region->u.allowed.label); | |
1881 | gimple_seq_add_stmt (&new_seq, x); | |
1882 | ||
355a7673 | 1883 | lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner)); |
1d65f45c RH |
1884 | gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner)); |
1885 | ||
1886 | gimple_try_set_cleanup (tp, new_seq); | |
6de9cd9a | 1887 | |
1d65f45c RH |
1888 | return frob_into_branch_around (tp, this_region, NULL); |
1889 | } | |
1890 | ||
1891 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with | |
1892 | an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks, | |
1893 | plus the exception region trees that record all the magic. */ | |
1894 | ||
1895 | static gimple_seq | |
538dd0b7 | 1896 | lower_eh_must_not_throw (struct leh_state *state, gtry *tp) |
1d65f45c | 1897 | { |
b7da9fd4 | 1898 | struct leh_state this_state = *state; |
1d65f45c | 1899 | |
b7da9fd4 RH |
1900 | if (flag_exceptions) |
1901 | { | |
355fe088 | 1902 | gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
b7da9fd4 | 1903 | eh_region this_region; |
1d65f45c | 1904 | |
b7da9fd4 RH |
1905 | this_region = gen_eh_region_must_not_throw (state->cur_region); |
1906 | this_region->u.must_not_throw.failure_decl | |
538dd0b7 DM |
1907 | = gimple_eh_must_not_throw_fndecl ( |
1908 | as_a <geh_mnt *> (inner)); | |
c16fd676 RB |
1909 | this_region->u.must_not_throw.failure_loc |
1910 | = LOCATION_LOCUS (gimple_location (tp)); | |
1d65f45c | 1911 | |
b7da9fd4 RH |
1912 | /* In order to get mangling applied to this decl, we must mark it |
1913 | used now. Otherwise, pass_ipa_free_lang_data won't think it | |
1914 | needs to happen. */ | |
1915 | TREE_USED (this_region->u.must_not_throw.failure_decl) = 1; | |
1d65f45c | 1916 | |
b7da9fd4 RH |
1917 | this_state.cur_region = this_region; |
1918 | } | |
6de9cd9a | 1919 | |
355a7673 | 1920 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1921 | |
1d65f45c | 1922 | return gimple_try_eval (tp); |
6de9cd9a DN |
1923 | } |
1924 | ||
1925 | /* Implement a cleanup expression. This is similar to try-finally, | |
1926 | except that we only execute the cleanup block for exception edges. */ | |
1927 | ||
726a989a | 1928 | static gimple_seq |
538dd0b7 | 1929 | lower_cleanup (struct leh_state *state, gtry *tp) |
6de9cd9a | 1930 | { |
b7da9fd4 RH |
1931 | struct leh_state this_state = *state; |
1932 | eh_region this_region = NULL; | |
6de9cd9a | 1933 | struct leh_tf_state fake_tf; |
726a989a | 1934 | gimple_seq result; |
d3f28910 | 1935 | bool cleanup_dead = cleanup_is_dead_in (state->cur_region); |
6de9cd9a | 1936 | |
d3f28910 | 1937 | if (flag_exceptions && !cleanup_dead) |
6de9cd9a | 1938 | { |
b7da9fd4 RH |
1939 | this_region = gen_eh_region_cleanup (state->cur_region); |
1940 | this_state.cur_region = this_region; | |
6de9cd9a DN |
1941 | } |
1942 | ||
355a7673 | 1943 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
6de9cd9a | 1944 | |
d3f28910 | 1945 | if (cleanup_dead || !eh_region_may_contain_throw (this_region)) |
1d65f45c | 1946 | return gimple_try_eval (tp); |
6de9cd9a DN |
1947 | |
1948 | /* Build enough of a try-finally state so that we can reuse | |
1949 | honor_protect_cleanup_actions. */ | |
1950 | memset (&fake_tf, 0, sizeof (fake_tf)); | |
c2255bc4 | 1951 | fake_tf.top_p = fake_tf.try_finally_expr = tp; |
6de9cd9a DN |
1952 | fake_tf.outer = state; |
1953 | fake_tf.region = this_region; | |
726a989a | 1954 | fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
6de9cd9a DN |
1955 | fake_tf.may_throw = true; |
1956 | ||
6de9cd9a DN |
1957 | honor_protect_cleanup_actions (state, NULL, &fake_tf); |
1958 | ||
1959 | if (fake_tf.may_throw) | |
1960 | { | |
1961 | /* In this case honor_protect_cleanup_actions had nothing to do, | |
1962 | and we should process this normally. */ | |
355a7673 | 1963 | lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp)); |
1d65f45c RH |
1964 | result = frob_into_branch_around (tp, this_region, |
1965 | fake_tf.fallthru_label); | |
6de9cd9a DN |
1966 | } |
1967 | else | |
1968 | { | |
1969 | /* In this case honor_protect_cleanup_actions did nearly all of | |
1970 | the work. All we have left is to append the fallthru_label. */ | |
1971 | ||
726a989a | 1972 | result = gimple_try_eval (tp); |
6de9cd9a DN |
1973 | if (fake_tf.fallthru_label) |
1974 | { | |
355fe088 | 1975 | gimple *x = gimple_build_label (fake_tf.fallthru_label); |
726a989a | 1976 | gimple_seq_add_stmt (&result, x); |
6de9cd9a DN |
1977 | } |
1978 | } | |
726a989a | 1979 | return result; |
6de9cd9a DN |
1980 | } |
1981 | ||
1d65f45c | 1982 | /* Main loop for lowering eh constructs. Also moves gsi to the next |
726a989a | 1983 | statement. */ |
6de9cd9a DN |
1984 | |
1985 | static void | |
726a989a | 1986 | lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) |
6de9cd9a | 1987 | { |
726a989a | 1988 | gimple_seq replace; |
355fe088 TS |
1989 | gimple *x; |
1990 | gimple *stmt = gsi_stmt (*gsi); | |
6de9cd9a | 1991 | |
726a989a | 1992 | switch (gimple_code (stmt)) |
6de9cd9a | 1993 | { |
726a989a | 1994 | case GIMPLE_CALL: |
1d65f45c RH |
1995 | { |
1996 | tree fndecl = gimple_call_fndecl (stmt); | |
1997 | tree rhs, lhs; | |
1998 | ||
1999 | if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) | |
2000 | switch (DECL_FUNCTION_CODE (fndecl)) | |
2001 | { | |
2002 | case BUILT_IN_EH_POINTER: | |
2003 | /* The front end may have generated a call to | |
2004 | __builtin_eh_pointer (0) within a catch region. Replace | |
2005 | this zero argument with the current catch region number. */ | |
2006 | if (state->ehp_region) | |
2007 | { | |
413581ba RG |
2008 | tree nr = build_int_cst (integer_type_node, |
2009 | state->ehp_region->index); | |
1d65f45c RH |
2010 | gimple_call_set_arg (stmt, 0, nr); |
2011 | } | |
2012 | else | |
2013 | { | |
2014 | /* The user has dome something silly. Remove it. */ | |
9a9d280e | 2015 | rhs = null_pointer_node; |
1d65f45c RH |
2016 | goto do_replace; |
2017 | } | |
2018 | break; | |
2019 | ||
2020 | case BUILT_IN_EH_FILTER: | |
2021 | /* ??? This should never appear, but since it's a builtin it | |
2022 | is accessible to abuse by users. Just remove it and | |
2023 | replace the use with the arbitrary value zero. */ | |
2024 | rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0); | |
2025 | do_replace: | |
2026 | lhs = gimple_call_lhs (stmt); | |
2027 | x = gimple_build_assign (lhs, rhs); | |
2028 | gsi_insert_before (gsi, x, GSI_SAME_STMT); | |
2029 | /* FALLTHRU */ | |
2030 | ||
2031 | case BUILT_IN_EH_COPY_VALUES: | |
2032 | /* Likewise this should not appear. Remove it. */ | |
2033 | gsi_remove (gsi, true); | |
2034 | return; | |
2035 | ||
2036 | default: | |
2037 | break; | |
2038 | } | |
2039 | } | |
2040 | /* FALLTHRU */ | |
2041 | ||
726a989a | 2042 | case GIMPLE_ASSIGN: |
ba4d8f9d RG |
2043 | /* If the stmt can throw use a new temporary for the assignment |
2044 | to a LHS. This makes sure the old value of the LHS is | |
87cd4259 | 2045 | available on the EH edge. Only do so for statements that |
073a8998 | 2046 | potentially fall through (no noreturn calls e.g.), otherwise |
87cd4259 | 2047 | this new assignment might create fake fallthru regions. */ |
ba4d8f9d RG |
2048 | if (stmt_could_throw_p (stmt) |
2049 | && gimple_has_lhs (stmt) | |
87cd4259 | 2050 | && gimple_stmt_may_fallthru (stmt) |
ba4d8f9d RG |
2051 | && !tree_could_throw_p (gimple_get_lhs (stmt)) |
2052 | && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt)))) | |
2053 | { | |
2054 | tree lhs = gimple_get_lhs (stmt); | |
b731b390 | 2055 | tree tmp = create_tmp_var (TREE_TYPE (lhs)); |
355fe088 | 2056 | gimple *s = gimple_build_assign (lhs, tmp); |
ba4d8f9d RG |
2057 | gimple_set_location (s, gimple_location (stmt)); |
2058 | gimple_set_block (s, gimple_block (stmt)); | |
2059 | gimple_set_lhs (stmt, tmp); | |
2060 | if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE | |
2061 | || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE) | |
2062 | DECL_GIMPLE_REG_P (tmp) = 1; | |
2063 | gsi_insert_after (gsi, s, GSI_SAME_STMT); | |
2064 | } | |
6de9cd9a | 2065 | /* Look for things that can throw exceptions, and record them. */ |
726a989a | 2066 | if (state->cur_region && stmt_could_throw_p (stmt)) |
6de9cd9a | 2067 | { |
726a989a | 2068 | record_stmt_eh_region (state->cur_region, stmt); |
6de9cd9a | 2069 | note_eh_region_may_contain_throw (state->cur_region); |
6de9cd9a DN |
2070 | } |
2071 | break; | |
2072 | ||
726a989a RB |
2073 | case GIMPLE_COND: |
2074 | case GIMPLE_GOTO: | |
2075 | case GIMPLE_RETURN: | |
2076 | maybe_record_in_goto_queue (state, stmt); | |
6de9cd9a DN |
2077 | break; |
2078 | ||
726a989a | 2079 | case GIMPLE_SWITCH: |
538dd0b7 | 2080 | verify_norecord_switch_expr (state, as_a <gswitch *> (stmt)); |
6de9cd9a DN |
2081 | break; |
2082 | ||
726a989a | 2083 | case GIMPLE_TRY: |
538dd0b7 DM |
2084 | { |
2085 | gtry *try_stmt = as_a <gtry *> (stmt); | |
2086 | if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) | |
2087 | replace = lower_try_finally (state, try_stmt); | |
2088 | else | |
2089 | { | |
2090 | x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt)); | |
2091 | if (!x) | |
6728ee79 | 2092 | { |
538dd0b7 DM |
2093 | replace = gimple_try_eval (try_stmt); |
2094 | lower_eh_constructs_1 (state, &replace); | |
2095 | } | |
2096 | else | |
2097 | switch (gimple_code (x)) | |
2098 | { | |
6728ee79 | 2099 | case GIMPLE_CATCH: |
538dd0b7 DM |
2100 | replace = lower_catch (state, try_stmt); |
2101 | break; | |
6728ee79 | 2102 | case GIMPLE_EH_FILTER: |
538dd0b7 DM |
2103 | replace = lower_eh_filter (state, try_stmt); |
2104 | break; | |
6728ee79 | 2105 | case GIMPLE_EH_MUST_NOT_THROW: |
538dd0b7 DM |
2106 | replace = lower_eh_must_not_throw (state, try_stmt); |
2107 | break; | |
0a35513e | 2108 | case GIMPLE_EH_ELSE: |
538dd0b7 DM |
2109 | /* This code is only valid with GIMPLE_TRY_FINALLY. */ |
2110 | gcc_unreachable (); | |
6728ee79 | 2111 | default: |
538dd0b7 DM |
2112 | replace = lower_cleanup (state, try_stmt); |
2113 | break; | |
2114 | } | |
2115 | } | |
2116 | } | |
726a989a RB |
2117 | |
2118 | /* Remove the old stmt and insert the transformed sequence | |
2119 | instead. */ | |
2120 | gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT); | |
2121 | gsi_remove (gsi, true); | |
2122 | ||
2123 | /* Return since we don't want gsi_next () */ | |
2124 | return; | |
6de9cd9a | 2125 | |
0a35513e AH |
2126 | case GIMPLE_EH_ELSE: |
2127 | /* We should be eliminating this in lower_try_finally et al. */ | |
2128 | gcc_unreachable (); | |
2129 | ||
6de9cd9a DN |
2130 | default: |
2131 | /* A type, a decl, or some kind of statement that we're not | |
2132 | interested in. Don't walk them. */ | |
2133 | break; | |
2134 | } | |
726a989a RB |
2135 | |
2136 | gsi_next (gsi); | |
2137 | } | |
2138 | ||
2139 | /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */ | |
2140 | ||
2141 | static void | |
355a7673 | 2142 | lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq) |
726a989a RB |
2143 | { |
2144 | gimple_stmt_iterator gsi; | |
355a7673 | 2145 | for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);) |
726a989a | 2146 | lower_eh_constructs_2 (state, &gsi); |
6de9cd9a DN |
2147 | } |
2148 | ||
17795822 TS |
2149 | namespace { |
2150 | ||
2151 | const pass_data pass_data_lower_eh = | |
be55bfe6 TS |
2152 | { |
2153 | GIMPLE_PASS, /* type */ | |
2154 | "eh", /* name */ | |
2155 | OPTGROUP_NONE, /* optinfo_flags */ | |
be55bfe6 TS |
2156 | TV_TREE_EH, /* tv_id */ |
2157 | PROP_gimple_lcf, /* properties_required */ | |
2158 | PROP_gimple_leh, /* properties_provided */ | |
2159 | 0, /* properties_destroyed */ | |
2160 | 0, /* todo_flags_start */ | |
2161 | 0, /* todo_flags_finish */ | |
2162 | }; | |
2163 | ||
17795822 | 2164 | class pass_lower_eh : public gimple_opt_pass |
be55bfe6 TS |
2165 | { |
2166 | public: | |
2167 | pass_lower_eh (gcc::context *ctxt) | |
2168 | : gimple_opt_pass (pass_data_lower_eh, ctxt) | |
2169 | {} | |
2170 | ||
2171 | /* opt_pass methods: */ | |
2172 | virtual unsigned int execute (function *); | |
2173 | ||
2174 | }; // class pass_lower_eh | |
2175 | ||
2176 | unsigned int | |
2177 | pass_lower_eh::execute (function *fun) | |
6de9cd9a DN |
2178 | { |
2179 | struct leh_state null_state; | |
1d65f45c | 2180 | gimple_seq bodyp; |
726a989a | 2181 | |
1d65f45c RH |
2182 | bodyp = gimple_body (current_function_decl); |
2183 | if (bodyp == NULL) | |
2184 | return 0; | |
6de9cd9a | 2185 | |
c203e8a7 | 2186 | finally_tree = new hash_table<finally_tree_hasher> (31); |
b7da9fd4 | 2187 | eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL); |
1d65f45c | 2188 | memset (&null_state, 0, sizeof (null_state)); |
6de9cd9a | 2189 | |
726a989a | 2190 | collect_finally_tree_1 (bodyp, NULL); |
355a7673 MM |
2191 | lower_eh_constructs_1 (&null_state, &bodyp); |
2192 | gimple_set_body (current_function_decl, bodyp); | |
6de9cd9a | 2193 | |
1d65f45c RH |
2194 | /* We assume there's a return statement, or something, at the end of |
2195 | the function, and thus ploping the EH sequence afterward won't | |
2196 | change anything. */ | |
2197 | gcc_assert (!gimple_seq_may_fallthru (bodyp)); | |
2198 | gimple_seq_add_seq (&bodyp, eh_seq); | |
2199 | ||
2200 | /* We assume that since BODYP already existed, adding EH_SEQ to it | |
2201 | didn't change its value, and we don't have to re-set the function. */ | |
2202 | gcc_assert (bodyp == gimple_body (current_function_decl)); | |
6de9cd9a | 2203 | |
c203e8a7 TS |
2204 | delete finally_tree; |
2205 | finally_tree = NULL; | |
b7da9fd4 | 2206 | BITMAP_FREE (eh_region_may_contain_throw_map); |
1d65f45c | 2207 | eh_seq = NULL; |
f9417da1 RG |
2208 | |
2209 | /* If this function needs a language specific EH personality routine | |
2210 | and the frontend didn't already set one do so now. */ | |
be55bfe6 | 2211 | if (function_needs_eh_personality (fun) == eh_personality_lang |
f9417da1 RG |
2212 | && !DECL_FUNCTION_PERSONALITY (current_function_decl)) |
2213 | DECL_FUNCTION_PERSONALITY (current_function_decl) | |
2214 | = lang_hooks.eh_personality (); | |
2215 | ||
c2924966 | 2216 | return 0; |
6de9cd9a DN |
2217 | } |
2218 | ||
17795822 TS |
2219 | } // anon namespace |
2220 | ||
27a4cd48 DM |
2221 | gimple_opt_pass * |
2222 | make_pass_lower_eh (gcc::context *ctxt) | |
2223 | { | |
2224 | return new pass_lower_eh (ctxt); | |
2225 | } | |
6de9cd9a | 2226 | \f |
1d65f45c RH |
2227 | /* Create the multiple edges from an EH_DISPATCH statement to all of |
2228 | the possible handlers for its EH region. Return true if there's | |
2229 | no fallthru edge; false if there is. */ | |
6de9cd9a | 2230 | |
1d65f45c | 2231 | bool |
538dd0b7 | 2232 | make_eh_dispatch_edges (geh_dispatch *stmt) |
6de9cd9a | 2233 | { |
1d65f45c RH |
2234 | eh_region r; |
2235 | eh_catch c; | |
6de9cd9a DN |
2236 | basic_block src, dst; |
2237 | ||
1d65f45c | 2238 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); |
726a989a | 2239 | src = gimple_bb (stmt); |
6de9cd9a | 2240 | |
1d65f45c RH |
2241 | switch (r->type) |
2242 | { | |
2243 | case ERT_TRY: | |
2244 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
2245 | { | |
2246 | dst = label_to_block (c->label); | |
2247 | make_edge (src, dst, 0); | |
19114537 | 2248 | |
1d65f45c RH |
2249 | /* A catch-all handler doesn't have a fallthru. */ |
2250 | if (c->type_list == NULL) | |
2251 | return false; | |
2252 | } | |
2253 | break; | |
a8ee227c | 2254 | |
1d65f45c RH |
2255 | case ERT_ALLOWED_EXCEPTIONS: |
2256 | dst = label_to_block (r->u.allowed.label); | |
2257 | make_edge (src, dst, 0); | |
2258 | break; | |
2259 | ||
2260 | default: | |
2261 | gcc_unreachable (); | |
2262 | } | |
2263 | ||
2264 | return true; | |
a8ee227c JH |
2265 | } |
2266 | ||
1d65f45c RH |
2267 | /* Create the single EH edge from STMT to its nearest landing pad, |
2268 | if there is such a landing pad within the current function. */ | |
2269 | ||
6de9cd9a | 2270 | void |
355fe088 | 2271 | make_eh_edges (gimple *stmt) |
6de9cd9a | 2272 | { |
1d65f45c RH |
2273 | basic_block src, dst; |
2274 | eh_landing_pad lp; | |
2275 | int lp_nr; | |
6de9cd9a | 2276 | |
1d65f45c RH |
2277 | lp_nr = lookup_stmt_eh_lp (stmt); |
2278 | if (lp_nr <= 0) | |
2279 | return; | |
6de9cd9a | 2280 | |
1d65f45c RH |
2281 | lp = get_eh_landing_pad_from_number (lp_nr); |
2282 | gcc_assert (lp != NULL); | |
a203a221 | 2283 | |
1d65f45c RH |
2284 | src = gimple_bb (stmt); |
2285 | dst = label_to_block (lp->post_landing_pad); | |
2286 | make_edge (src, dst, EDGE_EH); | |
6de9cd9a DN |
2287 | } |
2288 | ||
1d65f45c RH |
2289 | /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree; |
2290 | do not actually perform the final edge redirection. | |
a3710436 | 2291 | |
1d65f45c RH |
2292 | CHANGE_REGION is true when we're being called from cleanup_empty_eh and |
2293 | we intend to change the destination EH region as well; this means | |
2294 | EH_LANDING_PAD_NR must already be set on the destination block label. | |
2295 | If false, we're being called from generic cfg manipulation code and we | |
2296 | should preserve our place within the region tree. */ | |
2297 | ||
2298 | static void | |
2299 | redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region) | |
a3710436 | 2300 | { |
1d65f45c RH |
2301 | eh_landing_pad old_lp, new_lp; |
2302 | basic_block old_bb; | |
355fe088 | 2303 | gimple *throw_stmt; |
1d65f45c RH |
2304 | int old_lp_nr, new_lp_nr; |
2305 | tree old_label, new_label; | |
2306 | edge_iterator ei; | |
2307 | edge e; | |
2308 | ||
2309 | old_bb = edge_in->dest; | |
2310 | old_label = gimple_block_label (old_bb); | |
2311 | old_lp_nr = EH_LANDING_PAD_NR (old_label); | |
2312 | gcc_assert (old_lp_nr > 0); | |
2313 | old_lp = get_eh_landing_pad_from_number (old_lp_nr); | |
2314 | ||
2315 | throw_stmt = last_stmt (edge_in->src); | |
2316 | gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr); | |
2317 | ||
2318 | new_label = gimple_block_label (new_bb); | |
a3710436 | 2319 | |
1d65f45c RH |
2320 | /* Look for an existing region that might be using NEW_BB already. */ |
2321 | new_lp_nr = EH_LANDING_PAD_NR (new_label); | |
2322 | if (new_lp_nr) | |
a3710436 | 2323 | { |
1d65f45c RH |
2324 | new_lp = get_eh_landing_pad_from_number (new_lp_nr); |
2325 | gcc_assert (new_lp); | |
b8698a0f | 2326 | |
1d65f45c RH |
2327 | /* Unless CHANGE_REGION is true, the new and old landing pad |
2328 | had better be associated with the same EH region. */ | |
2329 | gcc_assert (change_region || new_lp->region == old_lp->region); | |
a3710436 JH |
2330 | } |
2331 | else | |
2332 | { | |
1d65f45c RH |
2333 | new_lp = NULL; |
2334 | gcc_assert (!change_region); | |
a3710436 JH |
2335 | } |
2336 | ||
1d65f45c RH |
2337 | /* Notice when we redirect the last EH edge away from OLD_BB. */ |
2338 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
2339 | if (e != edge_in && (e->flags & EDGE_EH)) | |
2340 | break; | |
cc7220fd | 2341 | |
1d65f45c | 2342 | if (new_lp) |
cc7220fd | 2343 | { |
1d65f45c RH |
2344 | /* NEW_LP already exists. If there are still edges into OLD_LP, |
2345 | there's nothing to do with the EH tree. If there are no more | |
2346 | edges into OLD_LP, then we want to remove OLD_LP as it is unused. | |
2347 | If CHANGE_REGION is true, then our caller is expecting to remove | |
2348 | the landing pad. */ | |
2349 | if (e == NULL && !change_region) | |
2350 | remove_eh_landing_pad (old_lp); | |
cc7220fd | 2351 | } |
1d65f45c | 2352 | else |
cc7220fd | 2353 | { |
1d65f45c RH |
2354 | /* No correct landing pad exists. If there are no more edges |
2355 | into OLD_LP, then we can simply re-use the existing landing pad. | |
2356 | Otherwise, we have to create a new landing pad. */ | |
2357 | if (e == NULL) | |
2358 | { | |
2359 | EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0; | |
2360 | new_lp = old_lp; | |
2361 | } | |
2362 | else | |
2363 | new_lp = gen_eh_landing_pad (old_lp->region); | |
2364 | new_lp->post_landing_pad = new_label; | |
2365 | EH_LANDING_PAD_NR (new_label) = new_lp->index; | |
cc7220fd | 2366 | } |
1d65f45c RH |
2367 | |
2368 | /* Maybe move the throwing statement to the new region. */ | |
2369 | if (old_lp != new_lp) | |
cc7220fd | 2370 | { |
1d65f45c RH |
2371 | remove_stmt_from_eh_lp (throw_stmt); |
2372 | add_stmt_to_eh_lp (throw_stmt, new_lp->index); | |
cc7220fd | 2373 | } |
cc7220fd JH |
2374 | } |
2375 | ||
1d65f45c | 2376 | /* Redirect EH edge E to NEW_BB. */ |
726a989a | 2377 | |
1d65f45c RH |
2378 | edge |
2379 | redirect_eh_edge (edge edge_in, basic_block new_bb) | |
cc7220fd | 2380 | { |
1d65f45c RH |
2381 | redirect_eh_edge_1 (edge_in, new_bb, false); |
2382 | return ssa_redirect_edge (edge_in, new_bb); | |
2383 | } | |
cc7220fd | 2384 | |
1d65f45c RH |
2385 | /* This is a subroutine of gimple_redirect_edge_and_branch. Update the |
2386 | labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB. | |
2387 | The actual edge update will happen in the caller. */ | |
cc7220fd | 2388 | |
1d65f45c | 2389 | void |
538dd0b7 | 2390 | redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb) |
1d65f45c RH |
2391 | { |
2392 | tree new_lab = gimple_block_label (new_bb); | |
2393 | bool any_changed = false; | |
2394 | basic_block old_bb; | |
2395 | eh_region r; | |
2396 | eh_catch c; | |
2397 | ||
2398 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); | |
2399 | switch (r->type) | |
cc7220fd | 2400 | { |
1d65f45c RH |
2401 | case ERT_TRY: |
2402 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
cc7220fd | 2403 | { |
1d65f45c RH |
2404 | old_bb = label_to_block (c->label); |
2405 | if (old_bb == e->dest) | |
2406 | { | |
2407 | c->label = new_lab; | |
2408 | any_changed = true; | |
2409 | } | |
cc7220fd | 2410 | } |
1d65f45c RH |
2411 | break; |
2412 | ||
2413 | case ERT_ALLOWED_EXCEPTIONS: | |
2414 | old_bb = label_to_block (r->u.allowed.label); | |
2415 | gcc_assert (old_bb == e->dest); | |
2416 | r->u.allowed.label = new_lab; | |
2417 | any_changed = true; | |
2418 | break; | |
2419 | ||
2420 | default: | |
2421 | gcc_unreachable (); | |
cc7220fd | 2422 | } |
726a989a | 2423 | |
1d65f45c | 2424 | gcc_assert (any_changed); |
cc7220fd | 2425 | } |
6de9cd9a | 2426 | \f |
726a989a RB |
2427 | /* Helper function for operation_could_trap_p and stmt_could_throw_p. */ |
2428 | ||
890065bf | 2429 | bool |
726a989a RB |
2430 | operation_could_trap_helper_p (enum tree_code op, |
2431 | bool fp_operation, | |
2432 | bool honor_trapv, | |
2433 | bool honor_nans, | |
2434 | bool honor_snans, | |
2435 | tree divisor, | |
2436 | bool *handled) | |
2437 | { | |
2438 | *handled = true; | |
2439 | switch (op) | |
2440 | { | |
2441 | case TRUNC_DIV_EXPR: | |
2442 | case CEIL_DIV_EXPR: | |
2443 | case FLOOR_DIV_EXPR: | |
2444 | case ROUND_DIV_EXPR: | |
2445 | case EXACT_DIV_EXPR: | |
2446 | case CEIL_MOD_EXPR: | |
2447 | case FLOOR_MOD_EXPR: | |
2448 | case ROUND_MOD_EXPR: | |
2449 | case TRUNC_MOD_EXPR: | |
2450 | case RDIV_EXPR: | |
2451 | if (honor_snans || honor_trapv) | |
2452 | return true; | |
2453 | if (fp_operation) | |
2454 | return flag_trapping_math; | |
2455 | if (!TREE_CONSTANT (divisor) || integer_zerop (divisor)) | |
2456 | return true; | |
2457 | return false; | |
2458 | ||
2459 | case LT_EXPR: | |
2460 | case LE_EXPR: | |
2461 | case GT_EXPR: | |
2462 | case GE_EXPR: | |
2463 | case LTGT_EXPR: | |
2464 | /* Some floating point comparisons may trap. */ | |
2465 | return honor_nans; | |
2466 | ||
2467 | case EQ_EXPR: | |
2468 | case NE_EXPR: | |
2469 | case UNORDERED_EXPR: | |
2470 | case ORDERED_EXPR: | |
2471 | case UNLT_EXPR: | |
2472 | case UNLE_EXPR: | |
2473 | case UNGT_EXPR: | |
2474 | case UNGE_EXPR: | |
2475 | case UNEQ_EXPR: | |
2476 | return honor_snans; | |
2477 | ||
726a989a RB |
2478 | case NEGATE_EXPR: |
2479 | case ABS_EXPR: | |
2480 | case CONJ_EXPR: | |
2481 | /* These operations don't trap with floating point. */ | |
2482 | if (honor_trapv) | |
2483 | return true; | |
2484 | return false; | |
2485 | ||
2486 | case PLUS_EXPR: | |
2487 | case MINUS_EXPR: | |
2488 | case MULT_EXPR: | |
2489 | /* Any floating arithmetic may trap. */ | |
2490 | if (fp_operation && flag_trapping_math) | |
2491 | return true; | |
2492 | if (honor_trapv) | |
2493 | return true; | |
2494 | return false; | |
2495 | ||
f5e5b46c RG |
2496 | case COMPLEX_EXPR: |
2497 | case CONSTRUCTOR: | |
2498 | /* Constructing an object cannot trap. */ | |
2499 | return false; | |
2500 | ||
726a989a RB |
2501 | default: |
2502 | /* Any floating arithmetic may trap. */ | |
2503 | if (fp_operation && flag_trapping_math) | |
2504 | return true; | |
2505 | ||
2506 | *handled = false; | |
2507 | return false; | |
2508 | } | |
2509 | } | |
2510 | ||
2511 | /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied | |
2512 | on floating-point values. HONOR_TRAPV is true if OP is applied on integer | |
2513 | type operands that may trap. If OP is a division operator, DIVISOR contains | |
2514 | the value of the divisor. */ | |
2515 | ||
2516 | bool | |
2517 | operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv, | |
2518 | tree divisor) | |
2519 | { | |
2520 | bool honor_nans = (fp_operation && flag_trapping_math | |
2521 | && !flag_finite_math_only); | |
2522 | bool honor_snans = fp_operation && flag_signaling_nans != 0; | |
2523 | bool handled; | |
2524 | ||
2525 | if (TREE_CODE_CLASS (op) != tcc_comparison | |
2526 | && TREE_CODE_CLASS (op) != tcc_unary | |
2527 | && TREE_CODE_CLASS (op) != tcc_binary) | |
2528 | return false; | |
2529 | ||
2530 | return operation_could_trap_helper_p (op, fp_operation, honor_trapv, | |
2531 | honor_nans, honor_snans, divisor, | |
2532 | &handled); | |
2533 | } | |
2534 | ||
862d0b35 DN |
2535 | |
2536 | /* Returns true if it is possible to prove that the index of | |
2537 | an array access REF (an ARRAY_REF expression) falls into the | |
2538 | array bounds. */ | |
2539 | ||
2540 | static bool | |
2541 | in_array_bounds_p (tree ref) | |
2542 | { | |
2543 | tree idx = TREE_OPERAND (ref, 1); | |
2544 | tree min, max; | |
2545 | ||
2546 | if (TREE_CODE (idx) != INTEGER_CST) | |
2547 | return false; | |
2548 | ||
2549 | min = array_ref_low_bound (ref); | |
2550 | max = array_ref_up_bound (ref); | |
2551 | if (!min | |
2552 | || !max | |
2553 | || TREE_CODE (min) != INTEGER_CST | |
2554 | || TREE_CODE (max) != INTEGER_CST) | |
2555 | return false; | |
2556 | ||
2557 | if (tree_int_cst_lt (idx, min) | |
2558 | || tree_int_cst_lt (max, idx)) | |
2559 | return false; | |
2560 | ||
2561 | return true; | |
2562 | } | |
2563 | ||
2564 | /* Returns true if it is possible to prove that the range of | |
2565 | an array access REF (an ARRAY_RANGE_REF expression) falls | |
2566 | into the array bounds. */ | |
2567 | ||
2568 | static bool | |
2569 | range_in_array_bounds_p (tree ref) | |
2570 | { | |
2571 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref)); | |
2572 | tree range_min, range_max, min, max; | |
2573 | ||
2574 | range_min = TYPE_MIN_VALUE (domain_type); | |
2575 | range_max = TYPE_MAX_VALUE (domain_type); | |
2576 | if (!range_min | |
2577 | || !range_max | |
2578 | || TREE_CODE (range_min) != INTEGER_CST | |
2579 | || TREE_CODE (range_max) != INTEGER_CST) | |
2580 | return false; | |
2581 | ||
2582 | min = array_ref_low_bound (ref); | |
2583 | max = array_ref_up_bound (ref); | |
2584 | if (!min | |
2585 | || !max | |
2586 | || TREE_CODE (min) != INTEGER_CST | |
2587 | || TREE_CODE (max) != INTEGER_CST) | |
2588 | return false; | |
2589 | ||
2590 | if (tree_int_cst_lt (range_min, min) | |
2591 | || tree_int_cst_lt (max, range_max)) | |
2592 | return false; | |
2593 | ||
2594 | return true; | |
2595 | } | |
2596 | ||
726a989a | 2597 | /* Return true if EXPR can trap, as in dereferencing an invalid pointer |
1eaba2f2 RH |
2598 | location or floating point arithmetic. C.f. the rtl version, may_trap_p. |
2599 | This routine expects only GIMPLE lhs or rhs input. */ | |
6de9cd9a DN |
2600 | |
2601 | bool | |
2602 | tree_could_trap_p (tree expr) | |
2603 | { | |
726a989a | 2604 | enum tree_code code; |
1eaba2f2 | 2605 | bool fp_operation = false; |
9675412f | 2606 | bool honor_trapv = false; |
726a989a | 2607 | tree t, base, div = NULL_TREE; |
6de9cd9a | 2608 | |
726a989a RB |
2609 | if (!expr) |
2610 | return false; | |
1d65f45c | 2611 | |
726a989a RB |
2612 | code = TREE_CODE (expr); |
2613 | t = TREE_TYPE (expr); | |
2614 | ||
2615 | if (t) | |
1eaba2f2 | 2616 | { |
04b03edb RAE |
2617 | if (COMPARISON_CLASS_P (expr)) |
2618 | fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
2619 | else | |
2620 | fp_operation = FLOAT_TYPE_P (t); | |
726a989a | 2621 | honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t); |
1eaba2f2 RH |
2622 | } |
2623 | ||
726a989a RB |
2624 | if (TREE_CODE_CLASS (code) == tcc_binary) |
2625 | div = TREE_OPERAND (expr, 1); | |
2626 | if (operation_could_trap_p (code, fp_operation, honor_trapv, div)) | |
2627 | return true; | |
2628 | ||
d25cee4d | 2629 | restart: |
6de9cd9a DN |
2630 | switch (code) |
2631 | { | |
6de9cd9a DN |
2632 | case COMPONENT_REF: |
2633 | case REALPART_EXPR: | |
2634 | case IMAGPART_EXPR: | |
2635 | case BIT_FIELD_REF: | |
483edb92 | 2636 | case VIEW_CONVERT_EXPR: |
d25cee4d RH |
2637 | case WITH_SIZE_EXPR: |
2638 | expr = TREE_OPERAND (expr, 0); | |
2639 | code = TREE_CODE (expr); | |
2640 | goto restart; | |
a7e5372d ZD |
2641 | |
2642 | case ARRAY_RANGE_REF: | |
11fc4275 EB |
2643 | base = TREE_OPERAND (expr, 0); |
2644 | if (tree_could_trap_p (base)) | |
a7e5372d | 2645 | return true; |
11fc4275 EB |
2646 | if (TREE_THIS_NOTRAP (expr)) |
2647 | return false; | |
11fc4275 | 2648 | return !range_in_array_bounds_p (expr); |
a7e5372d ZD |
2649 | |
2650 | case ARRAY_REF: | |
2651 | base = TREE_OPERAND (expr, 0); | |
a7e5372d ZD |
2652 | if (tree_could_trap_p (base)) |
2653 | return true; | |
a7e5372d ZD |
2654 | if (TREE_THIS_NOTRAP (expr)) |
2655 | return false; | |
a7e5372d | 2656 | return !in_array_bounds_p (expr); |
6de9cd9a | 2657 | |
4e1f39e4 | 2658 | case TARGET_MEM_REF: |
70f34814 | 2659 | case MEM_REF: |
4e1f39e4 RB |
2660 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR |
2661 | && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))) | |
2662 | return true; | |
2663 | if (TREE_THIS_NOTRAP (expr)) | |
70f34814 | 2664 | return false; |
4e1f39e4 RB |
2665 | /* We cannot prove that the access is in-bounds when we have |
2666 | variable-index TARGET_MEM_REFs. */ | |
2667 | if (code == TARGET_MEM_REF | |
2668 | && (TMR_INDEX (expr) || TMR_INDEX2 (expr))) | |
2669 | return true; | |
2670 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | |
2671 | { | |
2672 | tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0); | |
807e902e KZ |
2673 | offset_int off = mem_ref_offset (expr); |
2674 | if (wi::neg_p (off, SIGNED)) | |
4e1f39e4 RB |
2675 | return true; |
2676 | if (TREE_CODE (base) == STRING_CST) | |
807e902e | 2677 | return wi::leu_p (TREE_STRING_LENGTH (base), off); |
4e1f39e4 RB |
2678 | else if (DECL_SIZE_UNIT (base) == NULL_TREE |
2679 | || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST | |
807e902e | 2680 | || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off)) |
4e1f39e4 RB |
2681 | return true; |
2682 | /* Now we are sure the first byte of the access is inside | |
2683 | the object. */ | |
2684 | return false; | |
2685 | } | |
2686 | return true; | |
2687 | ||
6de9cd9a | 2688 | case INDIRECT_REF: |
1eaba2f2 RH |
2689 | return !TREE_THIS_NOTRAP (expr); |
2690 | ||
2691 | case ASM_EXPR: | |
2692 | return TREE_THIS_VOLATILE (expr); | |
5cb2183e | 2693 | |
726a989a RB |
2694 | case CALL_EXPR: |
2695 | t = get_callee_fndecl (expr); | |
2696 | /* Assume that calls to weak functions may trap. */ | |
f2c3a8ce | 2697 | if (!t || !DECL_P (t)) |
1eaba2f2 | 2698 | return true; |
f2c3a8ce JJ |
2699 | if (DECL_WEAK (t)) |
2700 | return tree_could_trap_p (t); | |
2701 | return false; | |
2702 | ||
2703 | case FUNCTION_DECL: | |
2704 | /* Assume that accesses to weak functions may trap, unless we know | |
2705 | they are certainly defined in current TU or in some other | |
2706 | LTO partition. */ | |
5940e204 | 2707 | if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr)) |
f2c3a8ce | 2708 | { |
5940e204 EB |
2709 | cgraph_node *node = cgraph_node::get (expr); |
2710 | if (node) | |
2711 | node = node->function_symbol (); | |
2712 | return !(node && node->in_other_partition); | |
f2c3a8ce JJ |
2713 | } |
2714 | return false; | |
2715 | ||
2716 | case VAR_DECL: | |
2717 | /* Assume that accesses to weak vars may trap, unless we know | |
2718 | they are certainly defined in current TU or in some other | |
2719 | LTO partition. */ | |
5940e204 | 2720 | if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr)) |
f2c3a8ce | 2721 | { |
5940e204 EB |
2722 | varpool_node *node = varpool_node::get (expr); |
2723 | if (node) | |
2724 | node = node->ultimate_alias_target (); | |
2725 | return !(node && node->in_other_partition); | |
f2c3a8ce | 2726 | } |
1eaba2f2 RH |
2727 | return false; |
2728 | ||
726a989a RB |
2729 | default: |
2730 | return false; | |
2731 | } | |
2732 | } | |
1eaba2f2 | 2733 | |
1eaba2f2 | 2734 | |
726a989a RB |
2735 | /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a |
2736 | an assignment or a conditional) may throw. */ | |
1eaba2f2 | 2737 | |
726a989a | 2738 | static bool |
355fe088 | 2739 | stmt_could_throw_1_p (gimple *stmt) |
726a989a RB |
2740 | { |
2741 | enum tree_code code = gimple_expr_code (stmt); | |
2742 | bool honor_nans = false; | |
2743 | bool honor_snans = false; | |
2744 | bool fp_operation = false; | |
2745 | bool honor_trapv = false; | |
2746 | tree t; | |
2747 | size_t i; | |
2748 | bool handled, ret; | |
9675412f | 2749 | |
726a989a RB |
2750 | if (TREE_CODE_CLASS (code) == tcc_comparison |
2751 | || TREE_CODE_CLASS (code) == tcc_unary | |
2752 | || TREE_CODE_CLASS (code) == tcc_binary) | |
2753 | { | |
dd46054a RG |
2754 | if (is_gimple_assign (stmt) |
2755 | && TREE_CODE_CLASS (code) == tcc_comparison) | |
2756 | t = TREE_TYPE (gimple_assign_rhs1 (stmt)); | |
2757 | else if (gimple_code (stmt) == GIMPLE_COND) | |
2758 | t = TREE_TYPE (gimple_cond_lhs (stmt)); | |
2759 | else | |
2760 | t = gimple_expr_type (stmt); | |
726a989a RB |
2761 | fp_operation = FLOAT_TYPE_P (t); |
2762 | if (fp_operation) | |
2763 | { | |
2764 | honor_nans = flag_trapping_math && !flag_finite_math_only; | |
2765 | honor_snans = flag_signaling_nans != 0; | |
2766 | } | |
2767 | else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t)) | |
2768 | honor_trapv = true; | |
2769 | } | |
2770 | ||
2771 | /* Check if the main expression may trap. */ | |
2772 | t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL; | |
2773 | ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv, | |
2774 | honor_nans, honor_snans, t, | |
2775 | &handled); | |
2776 | if (handled) | |
2777 | return ret; | |
2778 | ||
2779 | /* If the expression does not trap, see if any of the individual operands may | |
2780 | trap. */ | |
2781 | for (i = 0; i < gimple_num_ops (stmt); i++) | |
2782 | if (tree_could_trap_p (gimple_op (stmt, i))) | |
2783 | return true; | |
2784 | ||
2785 | return false; | |
2786 | } | |
2787 | ||
2788 | ||
2789 | /* Return true if statement STMT could throw an exception. */ | |
2790 | ||
2791 | bool | |
355fe088 | 2792 | stmt_could_throw_p (gimple *stmt) |
726a989a | 2793 | { |
726a989a RB |
2794 | if (!flag_exceptions) |
2795 | return false; | |
2796 | ||
2797 | /* The only statements that can throw an exception are assignments, | |
1d65f45c RH |
2798 | conditionals, calls, resx, and asms. */ |
2799 | switch (gimple_code (stmt)) | |
2800 | { | |
2801 | case GIMPLE_RESX: | |
2802 | return true; | |
726a989a | 2803 | |
1d65f45c | 2804 | case GIMPLE_CALL: |
538dd0b7 | 2805 | return !gimple_call_nothrow_p (as_a <gcall *> (stmt)); |
726a989a | 2806 | |
1d65f45c RH |
2807 | case GIMPLE_ASSIGN: |
2808 | case GIMPLE_COND: | |
8f4f502f | 2809 | if (!cfun->can_throw_non_call_exceptions) |
1d65f45c RH |
2810 | return false; |
2811 | return stmt_could_throw_1_p (stmt); | |
726a989a | 2812 | |
1d65f45c | 2813 | case GIMPLE_ASM: |
8f4f502f | 2814 | if (!cfun->can_throw_non_call_exceptions) |
1d65f45c | 2815 | return false; |
538dd0b7 | 2816 | return gimple_asm_volatile_p (as_a <gasm *> (stmt)); |
1d65f45c RH |
2817 | |
2818 | default: | |
2819 | return false; | |
2820 | } | |
6de9cd9a DN |
2821 | } |
2822 | ||
726a989a RB |
2823 | |
2824 | /* Return true if expression T could throw an exception. */ | |
2825 | ||
6de9cd9a DN |
2826 | bool |
2827 | tree_could_throw_p (tree t) | |
2828 | { | |
2829 | if (!flag_exceptions) | |
2830 | return false; | |
726a989a | 2831 | if (TREE_CODE (t) == MODIFY_EXPR) |
6de9cd9a | 2832 | { |
8f4f502f | 2833 | if (cfun->can_throw_non_call_exceptions |
1d65f45c RH |
2834 | && tree_could_trap_p (TREE_OPERAND (t, 0))) |
2835 | return true; | |
726a989a | 2836 | t = TREE_OPERAND (t, 1); |
6de9cd9a DN |
2837 | } |
2838 | ||
d25cee4d RH |
2839 | if (TREE_CODE (t) == WITH_SIZE_EXPR) |
2840 | t = TREE_OPERAND (t, 0); | |
6de9cd9a DN |
2841 | if (TREE_CODE (t) == CALL_EXPR) |
2842 | return (call_expr_flags (t) & ECF_NOTHROW) == 0; | |
8f4f502f | 2843 | if (cfun->can_throw_non_call_exceptions) |
67c605a5 | 2844 | return tree_could_trap_p (t); |
6de9cd9a DN |
2845 | return false; |
2846 | } | |
2847 | ||
33977f81 JH |
2848 | /* Return true if STMT can throw an exception that is not caught within |
2849 | the current function (CFUN). */ | |
2850 | ||
2851 | bool | |
355fe088 | 2852 | stmt_can_throw_external (gimple *stmt) |
33977f81 | 2853 | { |
1d65f45c | 2854 | int lp_nr; |
33977f81 JH |
2855 | |
2856 | if (!stmt_could_throw_p (stmt)) | |
2857 | return false; | |
2858 | ||
1d65f45c RH |
2859 | lp_nr = lookup_stmt_eh_lp (stmt); |
2860 | return lp_nr == 0; | |
33977f81 | 2861 | } |
726a989a RB |
2862 | |
2863 | /* Return true if STMT can throw an exception that is caught within | |
2864 | the current function (CFUN). */ | |
2865 | ||
6de9cd9a | 2866 | bool |
355fe088 | 2867 | stmt_can_throw_internal (gimple *stmt) |
6de9cd9a | 2868 | { |
1d65f45c | 2869 | int lp_nr; |
726a989a | 2870 | |
1d65f45c | 2871 | if (!stmt_could_throw_p (stmt)) |
6de9cd9a | 2872 | return false; |
726a989a | 2873 | |
1d65f45c RH |
2874 | lp_nr = lookup_stmt_eh_lp (stmt); |
2875 | return lp_nr > 0; | |
2876 | } | |
2877 | ||
2878 | /* Given a statement STMT in IFUN, if STMT can no longer throw, then | |
2879 | remove any entry it might have from the EH table. Return true if | |
2880 | any change was made. */ | |
2881 | ||
2882 | bool | |
355fe088 | 2883 | maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt) |
1d65f45c RH |
2884 | { |
2885 | if (stmt_could_throw_p (stmt)) | |
2886 | return false; | |
2887 | return remove_stmt_from_eh_lp_fn (ifun, stmt); | |
6de9cd9a DN |
2888 | } |
2889 | ||
1d65f45c RH |
2890 | /* Likewise, but always use the current function. */ |
2891 | ||
2892 | bool | |
355fe088 | 2893 | maybe_clean_eh_stmt (gimple *stmt) |
1d65f45c RH |
2894 | { |
2895 | return maybe_clean_eh_stmt_fn (cfun, stmt); | |
2896 | } | |
6de9cd9a | 2897 | |
af47810a RH |
2898 | /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced |
2899 | OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT | |
2900 | in the table if it should be in there. Return TRUE if a replacement was | |
2901 | done that my require an EH edge purge. */ | |
2902 | ||
1d65f45c | 2903 | bool |
355fe088 | 2904 | maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt) |
1eaba2f2 | 2905 | { |
1d65f45c | 2906 | int lp_nr = lookup_stmt_eh_lp (old_stmt); |
af47810a | 2907 | |
1d65f45c | 2908 | if (lp_nr != 0) |
af47810a | 2909 | { |
726a989a | 2910 | bool new_stmt_could_throw = stmt_could_throw_p (new_stmt); |
af47810a RH |
2911 | |
2912 | if (new_stmt == old_stmt && new_stmt_could_throw) | |
2913 | return false; | |
2914 | ||
1d65f45c | 2915 | remove_stmt_from_eh_lp (old_stmt); |
af47810a RH |
2916 | if (new_stmt_could_throw) |
2917 | { | |
1d65f45c | 2918 | add_stmt_to_eh_lp (new_stmt, lp_nr); |
af47810a RH |
2919 | return false; |
2920 | } | |
2921 | else | |
2922 | return true; | |
2923 | } | |
2924 | ||
1eaba2f2 RH |
2925 | return false; |
2926 | } | |
1d65f45c | 2927 | |
073a8998 | 2928 | /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT |
1d65f45c RH |
2929 | in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP |
2930 | operand is the return value of duplicate_eh_regions. */ | |
2931 | ||
2932 | bool | |
355fe088 TS |
2933 | maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt, |
2934 | struct function *old_fun, gimple *old_stmt, | |
b787e7a2 TS |
2935 | hash_map<void *, void *> *map, |
2936 | int default_lp_nr) | |
1d65f45c RH |
2937 | { |
2938 | int old_lp_nr, new_lp_nr; | |
1d65f45c RH |
2939 | |
2940 | if (!stmt_could_throw_p (new_stmt)) | |
2941 | return false; | |
2942 | ||
2943 | old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt); | |
2944 | if (old_lp_nr == 0) | |
2945 | { | |
2946 | if (default_lp_nr == 0) | |
2947 | return false; | |
2948 | new_lp_nr = default_lp_nr; | |
2949 | } | |
2950 | else if (old_lp_nr > 0) | |
2951 | { | |
2952 | eh_landing_pad old_lp, new_lp; | |
2953 | ||
9771b263 | 2954 | old_lp = (*old_fun->eh->lp_array)[old_lp_nr]; |
b787e7a2 | 2955 | new_lp = static_cast<eh_landing_pad> (*map->get (old_lp)); |
1d65f45c RH |
2956 | new_lp_nr = new_lp->index; |
2957 | } | |
2958 | else | |
2959 | { | |
2960 | eh_region old_r, new_r; | |
2961 | ||
9771b263 | 2962 | old_r = (*old_fun->eh->region_array)[-old_lp_nr]; |
b787e7a2 | 2963 | new_r = static_cast<eh_region> (*map->get (old_r)); |
1d65f45c RH |
2964 | new_lp_nr = -new_r->index; |
2965 | } | |
2966 | ||
2967 | add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr); | |
2968 | return true; | |
2969 | } | |
2970 | ||
2971 | /* Similar, but both OLD_STMT and NEW_STMT are within the current function, | |
2972 | and thus no remapping is required. */ | |
2973 | ||
2974 | bool | |
355fe088 | 2975 | maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt) |
1d65f45c RH |
2976 | { |
2977 | int lp_nr; | |
2978 | ||
2979 | if (!stmt_could_throw_p (new_stmt)) | |
2980 | return false; | |
2981 | ||
2982 | lp_nr = lookup_stmt_eh_lp (old_stmt); | |
2983 | if (lp_nr == 0) | |
2984 | return false; | |
2985 | ||
2986 | add_stmt_to_eh_lp (new_stmt, lp_nr); | |
2987 | return true; | |
2988 | } | |
a24549d4 | 2989 | \f |
726a989a RB |
2990 | /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of |
2991 | GIMPLE_TRY) that are similar enough to be considered the same. Currently | |
2992 | this only handles handlers consisting of a single call, as that's the | |
2993 | important case for C++: a destructor call for a particular object showing | |
2994 | up in multiple handlers. */ | |
a24549d4 JM |
2995 | |
2996 | static bool | |
726a989a | 2997 | same_handler_p (gimple_seq oneh, gimple_seq twoh) |
a24549d4 | 2998 | { |
726a989a | 2999 | gimple_stmt_iterator gsi; |
355fe088 | 3000 | gimple *ones, *twos; |
726a989a | 3001 | unsigned int ai; |
a24549d4 | 3002 | |
726a989a RB |
3003 | gsi = gsi_start (oneh); |
3004 | if (!gsi_one_before_end_p (gsi)) | |
a24549d4 | 3005 | return false; |
726a989a | 3006 | ones = gsi_stmt (gsi); |
a24549d4 | 3007 | |
726a989a RB |
3008 | gsi = gsi_start (twoh); |
3009 | if (!gsi_one_before_end_p (gsi)) | |
a24549d4 | 3010 | return false; |
726a989a RB |
3011 | twos = gsi_stmt (gsi); |
3012 | ||
3013 | if (!is_gimple_call (ones) | |
3014 | || !is_gimple_call (twos) | |
3015 | || gimple_call_lhs (ones) | |
3016 | || gimple_call_lhs (twos) | |
3017 | || gimple_call_chain (ones) | |
3018 | || gimple_call_chain (twos) | |
25583c4f | 3019 | || !gimple_call_same_target_p (ones, twos) |
726a989a | 3020 | || gimple_call_num_args (ones) != gimple_call_num_args (twos)) |
a24549d4 JM |
3021 | return false; |
3022 | ||
726a989a RB |
3023 | for (ai = 0; ai < gimple_call_num_args (ones); ++ai) |
3024 | if (!operand_equal_p (gimple_call_arg (ones, ai), | |
1d65f45c | 3025 | gimple_call_arg (twos, ai), 0)) |
a24549d4 JM |
3026 | return false; |
3027 | ||
3028 | return true; | |
3029 | } | |
3030 | ||
3031 | /* Optimize | |
3032 | try { A() } finally { try { ~B() } catch { ~A() } } | |
3033 | try { ... } finally { ~A() } | |
3034 | into | |
3035 | try { A() } catch { ~B() } | |
3036 | try { ~B() ... } finally { ~A() } | |
3037 | ||
3038 | This occurs frequently in C++, where A is a local variable and B is a | |
3039 | temporary used in the initializer for A. */ | |
3040 | ||
3041 | static void | |
538dd0b7 | 3042 | optimize_double_finally (gtry *one, gtry *two) |
a24549d4 | 3043 | { |
355fe088 | 3044 | gimple *oneh; |
726a989a | 3045 | gimple_stmt_iterator gsi; |
355a7673 | 3046 | gimple_seq cleanup; |
a24549d4 | 3047 | |
355a7673 MM |
3048 | cleanup = gimple_try_cleanup (one); |
3049 | gsi = gsi_start (cleanup); | |
726a989a | 3050 | if (!gsi_one_before_end_p (gsi)) |
a24549d4 JM |
3051 | return; |
3052 | ||
726a989a RB |
3053 | oneh = gsi_stmt (gsi); |
3054 | if (gimple_code (oneh) != GIMPLE_TRY | |
3055 | || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH) | |
a24549d4 JM |
3056 | return; |
3057 | ||
726a989a | 3058 | if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two))) |
a24549d4 | 3059 | { |
726a989a | 3060 | gimple_seq seq = gimple_try_eval (oneh); |
a24549d4 | 3061 | |
726a989a RB |
3062 | gimple_try_set_cleanup (one, seq); |
3063 | gimple_try_set_kind (one, GIMPLE_TRY_CATCH); | |
3064 | seq = copy_gimple_seq_and_replace_locals (seq); | |
3065 | gimple_seq_add_seq (&seq, gimple_try_eval (two)); | |
3066 | gimple_try_set_eval (two, seq); | |
a24549d4 JM |
3067 | } |
3068 | } | |
3069 | ||
3070 | /* Perform EH refactoring optimizations that are simpler to do when code | |
84fbffb2 | 3071 | flow has been lowered but EH structures haven't. */ |
a24549d4 JM |
3072 | |
3073 | static void | |
726a989a | 3074 | refactor_eh_r (gimple_seq seq) |
a24549d4 | 3075 | { |
726a989a | 3076 | gimple_stmt_iterator gsi; |
355fe088 | 3077 | gimple *one, *two; |
a24549d4 | 3078 | |
726a989a RB |
3079 | one = NULL; |
3080 | two = NULL; | |
3081 | gsi = gsi_start (seq); | |
3082 | while (1) | |
3083 | { | |
3084 | one = two; | |
3085 | if (gsi_end_p (gsi)) | |
3086 | two = NULL; | |
3087 | else | |
3088 | two = gsi_stmt (gsi); | |
538dd0b7 DM |
3089 | if (one && two) |
3090 | if (gtry *try_one = dyn_cast <gtry *> (one)) | |
3091 | if (gtry *try_two = dyn_cast <gtry *> (two)) | |
3092 | if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY | |
3093 | && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY) | |
3094 | optimize_double_finally (try_one, try_two); | |
726a989a RB |
3095 | if (one) |
3096 | switch (gimple_code (one)) | |
a24549d4 | 3097 | { |
726a989a RB |
3098 | case GIMPLE_TRY: |
3099 | refactor_eh_r (gimple_try_eval (one)); | |
3100 | refactor_eh_r (gimple_try_cleanup (one)); | |
3101 | break; | |
3102 | case GIMPLE_CATCH: | |
538dd0b7 | 3103 | refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one))); |
726a989a RB |
3104 | break; |
3105 | case GIMPLE_EH_FILTER: | |
3106 | refactor_eh_r (gimple_eh_filter_failure (one)); | |
3107 | break; | |
0a35513e | 3108 | case GIMPLE_EH_ELSE: |
538dd0b7 DM |
3109 | { |
3110 | geh_else *eh_else_stmt = as_a <geh_else *> (one); | |
3111 | refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt)); | |
3112 | refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt)); | |
3113 | } | |
0a35513e | 3114 | break; |
726a989a RB |
3115 | default: |
3116 | break; | |
a24549d4 | 3117 | } |
726a989a RB |
3118 | if (two) |
3119 | gsi_next (&gsi); | |
3120 | else | |
3121 | break; | |
a24549d4 JM |
3122 | } |
3123 | } | |
3124 | ||
17795822 TS |
3125 | namespace { |
3126 | ||
3127 | const pass_data pass_data_refactor_eh = | |
a24549d4 | 3128 | { |
27a4cd48 DM |
3129 | GIMPLE_PASS, /* type */ |
3130 | "ehopt", /* name */ | |
3131 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
3132 | TV_TREE_EH, /* tv_id */ |
3133 | PROP_gimple_lcf, /* properties_required */ | |
3134 | 0, /* properties_provided */ | |
3135 | 0, /* properties_destroyed */ | |
3136 | 0, /* todo_flags_start */ | |
3137 | 0, /* todo_flags_finish */ | |
a24549d4 | 3138 | }; |
27a4cd48 | 3139 | |
17795822 | 3140 | class pass_refactor_eh : public gimple_opt_pass |
27a4cd48 DM |
3141 | { |
3142 | public: | |
c3284718 RS |
3143 | pass_refactor_eh (gcc::context *ctxt) |
3144 | : gimple_opt_pass (pass_data_refactor_eh, ctxt) | |
27a4cd48 DM |
3145 | {} |
3146 | ||
3147 | /* opt_pass methods: */ | |
1a3d085c | 3148 | virtual bool gate (function *) { return flag_exceptions != 0; } |
be55bfe6 TS |
3149 | virtual unsigned int execute (function *) |
3150 | { | |
3151 | refactor_eh_r (gimple_body (current_function_decl)); | |
3152 | return 0; | |
3153 | } | |
27a4cd48 DM |
3154 | |
3155 | }; // class pass_refactor_eh | |
3156 | ||
17795822 TS |
3157 | } // anon namespace |
3158 | ||
27a4cd48 DM |
3159 | gimple_opt_pass * |
3160 | make_pass_refactor_eh (gcc::context *ctxt) | |
3161 | { | |
3162 | return new pass_refactor_eh (ctxt); | |
3163 | } | |
1d65f45c RH |
3164 | \f |
3165 | /* At the end of gimple optimization, we can lower RESX. */ | |
a8da523f | 3166 | |
1d65f45c | 3167 | static bool |
538dd0b7 DM |
3168 | lower_resx (basic_block bb, gresx *stmt, |
3169 | hash_map<eh_region, tree> *mnt_map) | |
a8da523f | 3170 | { |
1d65f45c RH |
3171 | int lp_nr; |
3172 | eh_region src_r, dst_r; | |
3173 | gimple_stmt_iterator gsi; | |
355fe088 | 3174 | gimple *x; |
1d65f45c RH |
3175 | tree fn, src_nr; |
3176 | bool ret = false; | |
a8da523f | 3177 | |
1d65f45c RH |
3178 | lp_nr = lookup_stmt_eh_lp (stmt); |
3179 | if (lp_nr != 0) | |
3180 | dst_r = get_eh_region_from_lp_number (lp_nr); | |
3181 | else | |
3182 | dst_r = NULL; | |
a8da523f | 3183 | |
1d65f45c | 3184 | src_r = get_eh_region_from_number (gimple_resx_region (stmt)); |
1d65f45c | 3185 | gsi = gsi_last_bb (bb); |
a8da523f | 3186 | |
072c87d1 RH |
3187 | if (src_r == NULL) |
3188 | { | |
3189 | /* We can wind up with no source region when pass_cleanup_eh shows | |
3190 | that there are no entries into an eh region and deletes it, but | |
3191 | then the block that contains the resx isn't removed. This can | |
3192 | happen without optimization when the switch statement created by | |
3193 | lower_try_finally_switch isn't simplified to remove the eh case. | |
3194 | ||
3195 | Resolve this by expanding the resx node to an abort. */ | |
3196 | ||
e79983f4 | 3197 | fn = builtin_decl_implicit (BUILT_IN_TRAP); |
072c87d1 RH |
3198 | x = gimple_build_call (fn, 0); |
3199 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3200 | ||
3201 | while (EDGE_COUNT (bb->succs) > 0) | |
3202 | remove_edge (EDGE_SUCC (bb, 0)); | |
3203 | } | |
3204 | else if (dst_r) | |
1d65f45c RH |
3205 | { |
3206 | /* When we have a destination region, we resolve this by copying | |
3207 | the excptr and filter values into place, and changing the edge | |
3208 | to immediately after the landing pad. */ | |
3209 | edge e; | |
a8da523f | 3210 | |
1d65f45c RH |
3211 | if (lp_nr < 0) |
3212 | { | |
3213 | basic_block new_bb; | |
1d65f45c | 3214 | tree lab; |
496a4ef5 | 3215 | |
1d65f45c RH |
3216 | /* We are resuming into a MUST_NOT_CALL region. Expand a call to |
3217 | the failure decl into a new block, if needed. */ | |
3218 | gcc_assert (dst_r->type == ERT_MUST_NOT_THROW); | |
a8da523f | 3219 | |
b787e7a2 | 3220 | tree *slot = mnt_map->get (dst_r); |
1d65f45c RH |
3221 | if (slot == NULL) |
3222 | { | |
3223 | gimple_stmt_iterator gsi2; | |
a8da523f | 3224 | |
1d65f45c | 3225 | new_bb = create_empty_bb (bb); |
726338f4 | 3226 | add_bb_to_loop (new_bb, bb->loop_father); |
1d65f45c RH |
3227 | lab = gimple_block_label (new_bb); |
3228 | gsi2 = gsi_start_bb (new_bb); | |
a8da523f | 3229 | |
1d65f45c RH |
3230 | fn = dst_r->u.must_not_throw.failure_decl; |
3231 | x = gimple_build_call (fn, 0); | |
3232 | gimple_set_location (x, dst_r->u.must_not_throw.failure_loc); | |
3233 | gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING); | |
4e6d1743 | 3234 | |
b787e7a2 | 3235 | mnt_map->put (dst_r, lab); |
1d65f45c RH |
3236 | } |
3237 | else | |
3238 | { | |
b787e7a2 | 3239 | lab = *slot; |
1d65f45c RH |
3240 | new_bb = label_to_block (lab); |
3241 | } | |
a8da523f | 3242 | |
1d65f45c RH |
3243 | gcc_assert (EDGE_COUNT (bb->succs) == 0); |
3244 | e = make_edge (bb, new_bb, EDGE_FALLTHRU); | |
3245 | e->count = bb->count; | |
3246 | e->probability = REG_BR_PROB_BASE; | |
3247 | } | |
3248 | else | |
3249 | { | |
3250 | edge_iterator ei; | |
413581ba | 3251 | tree dst_nr = build_int_cst (integer_type_node, dst_r->index); |
a8da523f | 3252 | |
e79983f4 | 3253 | fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES); |
413581ba | 3254 | src_nr = build_int_cst (integer_type_node, src_r->index); |
1d65f45c RH |
3255 | x = gimple_build_call (fn, 2, dst_nr, src_nr); |
3256 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
a8da523f | 3257 | |
1d65f45c RH |
3258 | /* Update the flags for the outgoing edge. */ |
3259 | e = single_succ_edge (bb); | |
3260 | gcc_assert (e->flags & EDGE_EH); | |
3261 | e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU; | |
a8da523f | 3262 | |
1d65f45c RH |
3263 | /* If there are no more EH users of the landing pad, delete it. */ |
3264 | FOR_EACH_EDGE (e, ei, e->dest->preds) | |
3265 | if (e->flags & EDGE_EH) | |
3266 | break; | |
3267 | if (e == NULL) | |
3268 | { | |
3269 | eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr); | |
3270 | remove_eh_landing_pad (lp); | |
3271 | } | |
3272 | } | |
a8da523f | 3273 | |
1d65f45c RH |
3274 | ret = true; |
3275 | } | |
3276 | else | |
3277 | { | |
3278 | tree var; | |
a8da523f | 3279 | |
1d65f45c RH |
3280 | /* When we don't have a destination region, this exception escapes |
3281 | up the call chain. We resolve this by generating a call to the | |
3282 | _Unwind_Resume library function. */ | |
a8da523f | 3283 | |
384c400a | 3284 | /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup |
1d65f45c | 3285 | with no arguments for C++ and Java. Check for that. */ |
384c400a RH |
3286 | if (src_r->use_cxa_end_cleanup) |
3287 | { | |
e79983f4 | 3288 | fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP); |
384c400a RH |
3289 | x = gimple_build_call (fn, 0); |
3290 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3291 | } | |
3292 | else | |
4e6d1743 | 3293 | { |
e79983f4 | 3294 | fn = builtin_decl_implicit (BUILT_IN_EH_POINTER); |
413581ba | 3295 | src_nr = build_int_cst (integer_type_node, src_r->index); |
1d65f45c | 3296 | x = gimple_build_call (fn, 1, src_nr); |
b731b390 | 3297 | var = create_tmp_var (ptr_type_node); |
1d65f45c RH |
3298 | var = make_ssa_name (var, x); |
3299 | gimple_call_set_lhs (x, var); | |
3300 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3301 | ||
e79983f4 | 3302 | fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME); |
1d65f45c RH |
3303 | x = gimple_build_call (fn, 1, var); |
3304 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
4e6d1743 | 3305 | } |
a8da523f | 3306 | |
1d65f45c | 3307 | gcc_assert (EDGE_COUNT (bb->succs) == 0); |
4e6d1743 | 3308 | } |
496a4ef5 | 3309 | |
1d65f45c RH |
3310 | gsi_remove (&gsi, true); |
3311 | ||
3312 | return ret; | |
4e6d1743 JH |
3313 | } |
3314 | ||
17795822 TS |
3315 | namespace { |
3316 | ||
3317 | const pass_data pass_data_lower_resx = | |
4e6d1743 | 3318 | { |
27a4cd48 DM |
3319 | GIMPLE_PASS, /* type */ |
3320 | "resx", /* name */ | |
3321 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
3322 | TV_TREE_EH, /* tv_id */ |
3323 | PROP_gimple_lcf, /* properties_required */ | |
3324 | 0, /* properties_provided */ | |
3325 | 0, /* properties_destroyed */ | |
3326 | 0, /* todo_flags_start */ | |
3bea341f | 3327 | 0, /* todo_flags_finish */ |
4e6d1743 JH |
3328 | }; |
3329 | ||
17795822 | 3330 | class pass_lower_resx : public gimple_opt_pass |
27a4cd48 DM |
3331 | { |
3332 | public: | |
c3284718 RS |
3333 | pass_lower_resx (gcc::context *ctxt) |
3334 | : gimple_opt_pass (pass_data_lower_resx, ctxt) | |
27a4cd48 DM |
3335 | {} |
3336 | ||
3337 | /* opt_pass methods: */ | |
1a3d085c | 3338 | virtual bool gate (function *) { return flag_exceptions != 0; } |
be55bfe6 | 3339 | virtual unsigned int execute (function *); |
27a4cd48 DM |
3340 | |
3341 | }; // class pass_lower_resx | |
3342 | ||
be55bfe6 TS |
3343 | unsigned |
3344 | pass_lower_resx::execute (function *fun) | |
3345 | { | |
3346 | basic_block bb; | |
be55bfe6 TS |
3347 | bool dominance_invalidated = false; |
3348 | bool any_rewritten = false; | |
3349 | ||
b787e7a2 | 3350 | hash_map<eh_region, tree> mnt_map; |
be55bfe6 TS |
3351 | |
3352 | FOR_EACH_BB_FN (bb, fun) | |
3353 | { | |
355fe088 | 3354 | gimple *last = last_stmt (bb); |
be55bfe6 TS |
3355 | if (last && is_gimple_resx (last)) |
3356 | { | |
538dd0b7 DM |
3357 | dominance_invalidated |= |
3358 | lower_resx (bb, as_a <gresx *> (last), &mnt_map); | |
be55bfe6 TS |
3359 | any_rewritten = true; |
3360 | } | |
3361 | } | |
3362 | ||
be55bfe6 TS |
3363 | if (dominance_invalidated) |
3364 | { | |
3365 | free_dominance_info (CDI_DOMINATORS); | |
3366 | free_dominance_info (CDI_POST_DOMINATORS); | |
3367 | } | |
3368 | ||
3369 | return any_rewritten ? TODO_update_ssa_only_virtuals : 0; | |
3370 | } | |
3371 | ||
17795822 TS |
3372 | } // anon namespace |
3373 | ||
27a4cd48 DM |
3374 | gimple_opt_pass * |
3375 | make_pass_lower_resx (gcc::context *ctxt) | |
3376 | { | |
3377 | return new pass_lower_resx (ctxt); | |
3378 | } | |
3379 | ||
960f0c9d JJ |
3380 | /* Try to optimize var = {v} {CLOBBER} stmts followed just by |
3381 | external throw. */ | |
3382 | ||
3383 | static void | |
3384 | optimize_clobbers (basic_block bb) | |
3385 | { | |
3386 | gimple_stmt_iterator gsi = gsi_last_bb (bb); | |
f223bb13 JJ |
3387 | bool any_clobbers = false; |
3388 | bool seen_stack_restore = false; | |
3389 | edge_iterator ei; | |
3390 | edge e; | |
3391 | ||
3392 | /* Only optimize anything if the bb contains at least one clobber, | |
3393 | ends with resx (checked by caller), optionally contains some | |
3394 | debug stmts or labels, or at most one __builtin_stack_restore | |
3395 | call, and has an incoming EH edge. */ | |
6d1c2bd3 | 3396 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) |
960f0c9d | 3397 | { |
355fe088 | 3398 | gimple *stmt = gsi_stmt (gsi); |
960f0c9d | 3399 | if (is_gimple_debug (stmt)) |
6d1c2bd3 | 3400 | continue; |
f223bb13 JJ |
3401 | if (gimple_clobber_p (stmt)) |
3402 | { | |
3403 | any_clobbers = true; | |
3404 | continue; | |
3405 | } | |
3406 | if (!seen_stack_restore | |
3407 | && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
3408 | { | |
3409 | seen_stack_restore = true; | |
3410 | continue; | |
3411 | } | |
3412 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3413 | break; | |
3414 | return; | |
3415 | } | |
3416 | if (!any_clobbers) | |
3417 | return; | |
3418 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3419 | if (e->flags & EDGE_EH) | |
3420 | break; | |
3421 | if (e == NULL) | |
3422 | return; | |
3423 | gsi = gsi_last_bb (bb); | |
3424 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3425 | { | |
355fe088 | 3426 | gimple *stmt = gsi_stmt (gsi); |
f223bb13 JJ |
3427 | if (!gimple_clobber_p (stmt)) |
3428 | continue; | |
960f0c9d JJ |
3429 | unlink_stmt_vdef (stmt); |
3430 | gsi_remove (&gsi, true); | |
3431 | release_defs (stmt); | |
3432 | } | |
3433 | } | |
1d65f45c | 3434 | |
ea85edfe JJ |
3435 | /* Try to sink var = {v} {CLOBBER} stmts followed just by |
3436 | internal throw to successor BB. */ | |
3437 | ||
3438 | static int | |
3439 | sink_clobbers (basic_block bb) | |
3440 | { | |
3441 | edge e; | |
3442 | edge_iterator ei; | |
3443 | gimple_stmt_iterator gsi, dgsi; | |
3444 | basic_block succbb; | |
3445 | bool any_clobbers = false; | |
df35498a | 3446 | unsigned todo = 0; |
ea85edfe JJ |
3447 | |
3448 | /* Only optimize if BB has a single EH successor and | |
3449 | all predecessor edges are EH too. */ | |
3450 | if (!single_succ_p (bb) | |
3451 | || (single_succ_edge (bb)->flags & EDGE_EH) == 0) | |
3452 | return 0; | |
3453 | ||
3454 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3455 | { | |
3456 | if ((e->flags & EDGE_EH) == 0) | |
3457 | return 0; | |
3458 | } | |
3459 | ||
3460 | /* And BB contains only CLOBBER stmts before the final | |
3461 | RESX. */ | |
3462 | gsi = gsi_last_bb (bb); | |
3463 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3464 | { | |
355fe088 | 3465 | gimple *stmt = gsi_stmt (gsi); |
ea85edfe JJ |
3466 | if (is_gimple_debug (stmt)) |
3467 | continue; | |
3468 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3469 | break; | |
f223bb13 | 3470 | if (!gimple_clobber_p (stmt)) |
ea85edfe JJ |
3471 | return 0; |
3472 | any_clobbers = true; | |
3473 | } | |
3474 | if (!any_clobbers) | |
3475 | return 0; | |
3476 | ||
4c1aff1c RB |
3477 | edge succe = single_succ_edge (bb); |
3478 | succbb = succe->dest; | |
3479 | ||
3480 | /* See if there is a virtual PHI node to take an updated virtual | |
3481 | operand from. */ | |
538dd0b7 | 3482 | gphi *vphi = NULL; |
4c1aff1c | 3483 | tree vuse = NULL_TREE; |
538dd0b7 DM |
3484 | for (gphi_iterator gpi = gsi_start_phis (succbb); |
3485 | !gsi_end_p (gpi); gsi_next (&gpi)) | |
4c1aff1c | 3486 | { |
538dd0b7 | 3487 | tree res = gimple_phi_result (gpi.phi ()); |
4c1aff1c RB |
3488 | if (virtual_operand_p (res)) |
3489 | { | |
538dd0b7 | 3490 | vphi = gpi.phi (); |
4c1aff1c RB |
3491 | vuse = res; |
3492 | break; | |
3493 | } | |
3494 | } | |
3495 | ||
ea85edfe JJ |
3496 | dgsi = gsi_after_labels (succbb); |
3497 | gsi = gsi_last_bb (bb); | |
3498 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3499 | { | |
355fe088 | 3500 | gimple *stmt = gsi_stmt (gsi); |
f223bb13 | 3501 | tree lhs; |
ea85edfe JJ |
3502 | if (is_gimple_debug (stmt)) |
3503 | continue; | |
3504 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3505 | break; | |
f223bb13 JJ |
3506 | lhs = gimple_assign_lhs (stmt); |
3507 | /* Unfortunately we don't have dominance info updated at this | |
3508 | point, so checking if | |
3509 | dominated_by_p (CDI_DOMINATORS, succbb, | |
3510 | gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0))) | |
3511 | would be too costly. Thus, avoid sinking any clobbers that | |
3512 | refer to non-(D) SSA_NAMEs. */ | |
3513 | if (TREE_CODE (lhs) == MEM_REF | |
3514 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME | |
3515 | && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))) | |
3516 | { | |
4c1aff1c | 3517 | unlink_stmt_vdef (stmt); |
f223bb13 JJ |
3518 | gsi_remove (&gsi, true); |
3519 | release_defs (stmt); | |
3520 | continue; | |
3521 | } | |
4c1aff1c RB |
3522 | |
3523 | /* As we do not change stmt order when sinking across a | |
3524 | forwarder edge we can keep virtual operands in place. */ | |
ea85edfe | 3525 | gsi_remove (&gsi, false); |
4c1aff1c RB |
3526 | gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT); |
3527 | ||
3528 | /* But adjust virtual operands if we sunk across a PHI node. */ | |
3529 | if (vuse) | |
3530 | { | |
355fe088 | 3531 | gimple *use_stmt; |
4c1aff1c RB |
3532 | imm_use_iterator iter; |
3533 | use_operand_p use_p; | |
3534 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse) | |
3535 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
3536 | SET_USE (use_p, gimple_vdef (stmt)); | |
0a1a83cb RB |
3537 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse)) |
3538 | { | |
3539 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1; | |
3540 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0; | |
3541 | } | |
4c1aff1c RB |
3542 | /* Adjust the incoming virtual operand. */ |
3543 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt)); | |
3544 | SET_USE (gimple_vuse_op (stmt), vuse); | |
3545 | } | |
df35498a RB |
3546 | /* If there isn't a single predecessor but no virtual PHI node |
3547 | arrange for virtual operands to be renamed. */ | |
3548 | else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P | |
3549 | && !single_pred_p (succbb)) | |
3550 | { | |
3551 | /* In this case there will be no use of the VDEF of this stmt. | |
3552 | ??? Unless this is a secondary opportunity and we have not | |
3553 | removed unreachable blocks yet, so we cannot assert this. | |
3554 | Which also means we will end up renaming too many times. */ | |
3555 | SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun)); | |
3556 | mark_virtual_operands_for_renaming (cfun); | |
3557 | todo |= TODO_update_ssa_only_virtuals; | |
3558 | } | |
ea85edfe JJ |
3559 | } |
3560 | ||
df35498a | 3561 | return todo; |
ea85edfe JJ |
3562 | } |
3563 | ||
9f698956 AB |
3564 | /* At the end of inlining, we can lower EH_DISPATCH. Return true when |
3565 | we have found some duplicate labels and removed some edges. */ | |
4e6d1743 | 3566 | |
9f698956 | 3567 | static bool |
538dd0b7 | 3568 | lower_eh_dispatch (basic_block src, geh_dispatch *stmt) |
4e6d1743 | 3569 | { |
1d65f45c RH |
3570 | gimple_stmt_iterator gsi; |
3571 | int region_nr; | |
3572 | eh_region r; | |
3573 | tree filter, fn; | |
355fe088 | 3574 | gimple *x; |
9f698956 | 3575 | bool redirected = false; |
4e6d1743 | 3576 | |
1d65f45c RH |
3577 | region_nr = gimple_eh_dispatch_region (stmt); |
3578 | r = get_eh_region_from_number (region_nr); | |
4e6d1743 | 3579 | |
1d65f45c | 3580 | gsi = gsi_last_bb (src); |
4e6d1743 | 3581 | |
1d65f45c | 3582 | switch (r->type) |
4e6d1743 | 3583 | { |
1d65f45c RH |
3584 | case ERT_TRY: |
3585 | { | |
ef062b13 | 3586 | auto_vec<tree> labels; |
1d65f45c RH |
3587 | tree default_label = NULL; |
3588 | eh_catch c; | |
3589 | edge_iterator ei; | |
3590 | edge e; | |
6e2830c3 | 3591 | hash_set<tree> seen_values; |
1d65f45c RH |
3592 | |
3593 | /* Collect the labels for a switch. Zero the post_landing_pad | |
3594 | field becase we'll no longer have anything keeping these labels | |
073a8998 | 3595 | in existence and the optimizer will be free to merge these |
1d65f45c RH |
3596 | blocks at will. */ |
3597 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
3598 | { | |
3599 | tree tp_node, flt_node, lab = c->label; | |
9f698956 | 3600 | bool have_label = false; |
4e6d1743 | 3601 | |
1d65f45c RH |
3602 | c->label = NULL; |
3603 | tp_node = c->type_list; | |
3604 | flt_node = c->filter_list; | |
3605 | ||
3606 | if (tp_node == NULL) | |
3607 | { | |
3608 | default_label = lab; | |
3609 | break; | |
3610 | } | |
3611 | do | |
3612 | { | |
9f698956 AB |
3613 | /* Filter out duplicate labels that arise when this handler |
3614 | is shadowed by an earlier one. When no labels are | |
3615 | attached to the handler anymore, we remove | |
3616 | the corresponding edge and then we delete unreachable | |
3617 | blocks at the end of this pass. */ | |
6e2830c3 | 3618 | if (! seen_values.contains (TREE_VALUE (flt_node))) |
9f698956 | 3619 | { |
3d528853 NF |
3620 | tree t = build_case_label (TREE_VALUE (flt_node), |
3621 | NULL, lab); | |
9771b263 | 3622 | labels.safe_push (t); |
6e2830c3 | 3623 | seen_values.add (TREE_VALUE (flt_node)); |
9f698956 AB |
3624 | have_label = true; |
3625 | } | |
1d65f45c RH |
3626 | |
3627 | tp_node = TREE_CHAIN (tp_node); | |
3628 | flt_node = TREE_CHAIN (flt_node); | |
3629 | } | |
3630 | while (tp_node); | |
9f698956 AB |
3631 | if (! have_label) |
3632 | { | |
3633 | remove_edge (find_edge (src, label_to_block (lab))); | |
3634 | redirected = true; | |
3635 | } | |
1d65f45c RH |
3636 | } |
3637 | ||
3638 | /* Clean up the edge flags. */ | |
3639 | FOR_EACH_EDGE (e, ei, src->succs) | |
3640 | { | |
3641 | if (e->flags & EDGE_FALLTHRU) | |
3642 | { | |
3643 | /* If there was no catch-all, use the fallthru edge. */ | |
3644 | if (default_label == NULL) | |
3645 | default_label = gimple_block_label (e->dest); | |
3646 | e->flags &= ~EDGE_FALLTHRU; | |
3647 | } | |
3648 | } | |
3649 | gcc_assert (default_label != NULL); | |
3650 | ||
3651 | /* Don't generate a switch if there's only a default case. | |
3652 | This is common in the form of try { A; } catch (...) { B; }. */ | |
9771b263 | 3653 | if (!labels.exists ()) |
1d65f45c RH |
3654 | { |
3655 | e = single_succ_edge (src); | |
3656 | e->flags |= EDGE_FALLTHRU; | |
3657 | } | |
3658 | else | |
3659 | { | |
e79983f4 | 3660 | fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
413581ba RG |
3661 | x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3662 | region_nr)); | |
b731b390 | 3663 | filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); |
1d65f45c RH |
3664 | filter = make_ssa_name (filter, x); |
3665 | gimple_call_set_lhs (x, filter); | |
3666 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3667 | ||
3668 | /* Turn the default label into a default case. */ | |
3d528853 | 3669 | default_label = build_case_label (NULL, NULL, default_label); |
1d65f45c RH |
3670 | sort_case_labels (labels); |
3671 | ||
fd8d363e | 3672 | x = gimple_build_switch (filter, default_label, labels); |
1d65f45c | 3673 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
1d65f45c RH |
3674 | } |
3675 | } | |
3676 | break; | |
3677 | ||
3678 | case ERT_ALLOWED_EXCEPTIONS: | |
3679 | { | |
3680 | edge b_e = BRANCH_EDGE (src); | |
3681 | edge f_e = FALLTHRU_EDGE (src); | |
3682 | ||
e79983f4 | 3683 | fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
413581ba RG |
3684 | x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3685 | region_nr)); | |
b731b390 | 3686 | filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); |
1d65f45c RH |
3687 | filter = make_ssa_name (filter, x); |
3688 | gimple_call_set_lhs (x, filter); | |
3689 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3690 | ||
3691 | r->u.allowed.label = NULL; | |
3692 | x = gimple_build_cond (EQ_EXPR, filter, | |
3693 | build_int_cst (TREE_TYPE (filter), | |
3694 | r->u.allowed.filter), | |
3695 | NULL_TREE, NULL_TREE); | |
3696 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3697 | ||
3698 | b_e->flags = b_e->flags | EDGE_TRUE_VALUE; | |
3699 | f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE; | |
3700 | } | |
3701 | break; | |
3702 | ||
3703 | default: | |
3704 | gcc_unreachable (); | |
4e6d1743 | 3705 | } |
1d65f45c RH |
3706 | |
3707 | /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */ | |
3708 | gsi_remove (&gsi, true); | |
9f698956 | 3709 | return redirected; |
4e6d1743 JH |
3710 | } |
3711 | ||
17795822 TS |
3712 | namespace { |
3713 | ||
3714 | const pass_data pass_data_lower_eh_dispatch = | |
be55bfe6 TS |
3715 | { |
3716 | GIMPLE_PASS, /* type */ | |
3717 | "ehdisp", /* name */ | |
3718 | OPTGROUP_NONE, /* optinfo_flags */ | |
be55bfe6 TS |
3719 | TV_TREE_EH, /* tv_id */ |
3720 | PROP_gimple_lcf, /* properties_required */ | |
3721 | 0, /* properties_provided */ | |
3722 | 0, /* properties_destroyed */ | |
3723 | 0, /* todo_flags_start */ | |
3bea341f | 3724 | 0, /* todo_flags_finish */ |
be55bfe6 TS |
3725 | }; |
3726 | ||
17795822 | 3727 | class pass_lower_eh_dispatch : public gimple_opt_pass |
be55bfe6 TS |
3728 | { |
3729 | public: | |
3730 | pass_lower_eh_dispatch (gcc::context *ctxt) | |
3731 | : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt) | |
3732 | {} | |
3733 | ||
3734 | /* opt_pass methods: */ | |
3735 | virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; } | |
3736 | virtual unsigned int execute (function *); | |
3737 | ||
3738 | }; // class pass_lower_eh_dispatch | |
3739 | ||
3740 | unsigned | |
3741 | pass_lower_eh_dispatch::execute (function *fun) | |
1d65f45c RH |
3742 | { |
3743 | basic_block bb; | |
ea85edfe | 3744 | int flags = 0; |
9f698956 | 3745 | bool redirected = false; |
4e6d1743 | 3746 | |
1d65f45c | 3747 | assign_filter_values (); |
496a4ef5 | 3748 | |
be55bfe6 | 3749 | FOR_EACH_BB_FN (bb, fun) |
1d65f45c | 3750 | { |
355fe088 | 3751 | gimple *last = last_stmt (bb); |
960f0c9d JJ |
3752 | if (last == NULL) |
3753 | continue; | |
3754 | if (gimple_code (last) == GIMPLE_EH_DISPATCH) | |
1d65f45c | 3755 | { |
538dd0b7 DM |
3756 | redirected |= lower_eh_dispatch (bb, |
3757 | as_a <geh_dispatch *> (last)); | |
ea85edfe JJ |
3758 | flags |= TODO_update_ssa_only_virtuals; |
3759 | } | |
3760 | else if (gimple_code (last) == GIMPLE_RESX) | |
3761 | { | |
3762 | if (stmt_can_throw_external (last)) | |
3763 | optimize_clobbers (bb); | |
3764 | else | |
3765 | flags |= sink_clobbers (bb); | |
1d65f45c RH |
3766 | } |
3767 | } | |
3768 | ||
9f698956 AB |
3769 | if (redirected) |
3770 | delete_unreachable_blocks (); | |
ea85edfe | 3771 | return flags; |
1d65f45c RH |
3772 | } |
3773 | ||
17795822 TS |
3774 | } // anon namespace |
3775 | ||
27a4cd48 DM |
3776 | gimple_opt_pass * |
3777 | make_pass_lower_eh_dispatch (gcc::context *ctxt) | |
3778 | { | |
3779 | return new pass_lower_eh_dispatch (ctxt); | |
3780 | } | |
1d65f45c | 3781 | \f |
d273b176 SB |
3782 | /* Walk statements, see what regions and, optionally, landing pads |
3783 | are really referenced. | |
3784 | ||
3785 | Returns in R_REACHABLEP an sbitmap with bits set for reachable regions, | |
3786 | and in LP_REACHABLE an sbitmap with bits set for reachable landing pads. | |
3787 | ||
3788 | Passing NULL for LP_REACHABLE is valid, in this case only reachable | |
3789 | regions are marked. | |
3790 | ||
3791 | The caller is responsible for freeing the returned sbitmaps. */ | |
1d65f45c RH |
3792 | |
3793 | static void | |
d273b176 | 3794 | mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep) |
1d65f45c RH |
3795 | { |
3796 | sbitmap r_reachable, lp_reachable; | |
1d65f45c | 3797 | basic_block bb; |
d273b176 SB |
3798 | bool mark_landing_pads = (lp_reachablep != NULL); |
3799 | gcc_checking_assert (r_reachablep != NULL); | |
4e6d1743 | 3800 | |
9771b263 | 3801 | r_reachable = sbitmap_alloc (cfun->eh->region_array->length ()); |
f61e445a | 3802 | bitmap_clear (r_reachable); |
d273b176 SB |
3803 | *r_reachablep = r_reachable; |
3804 | ||
3805 | if (mark_landing_pads) | |
3806 | { | |
3807 | lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ()); | |
3808 | bitmap_clear (lp_reachable); | |
3809 | *lp_reachablep = lp_reachable; | |
3810 | } | |
3811 | else | |
3812 | lp_reachable = NULL; | |
4e6d1743 | 3813 | |
11cd3bed | 3814 | FOR_EACH_BB_FN (bb, cfun) |
4e6d1743 | 3815 | { |
57f93411 | 3816 | gimple_stmt_iterator gsi; |
1d65f45c RH |
3817 | |
3818 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3819 | { | |
355fe088 | 3820 | gimple *stmt = gsi_stmt (gsi); |
1d65f45c | 3821 | |
d273b176 | 3822 | if (mark_landing_pads) |
1d65f45c | 3823 | { |
d273b176 SB |
3824 | int lp_nr = lookup_stmt_eh_lp (stmt); |
3825 | ||
3826 | /* Negative LP numbers are MUST_NOT_THROW regions which | |
3827 | are not considered BB enders. */ | |
3828 | if (lp_nr < 0) | |
3829 | bitmap_set_bit (r_reachable, -lp_nr); | |
3830 | ||
3831 | /* Positive LP numbers are real landing pads, and BB enders. */ | |
3832 | else if (lp_nr > 0) | |
3833 | { | |
3834 | gcc_assert (gsi_one_before_end_p (gsi)); | |
3835 | eh_region region = get_eh_region_from_lp_number (lp_nr); | |
3836 | bitmap_set_bit (r_reachable, region->index); | |
3837 | bitmap_set_bit (lp_reachable, lp_nr); | |
3838 | } | |
1d65f45c | 3839 | } |
6ae70ea2 JJ |
3840 | |
3841 | /* Avoid removing regions referenced from RESX/EH_DISPATCH. */ | |
3842 | switch (gimple_code (stmt)) | |
3843 | { | |
3844 | case GIMPLE_RESX: | |
538dd0b7 DM |
3845 | bitmap_set_bit (r_reachable, |
3846 | gimple_resx_region (as_a <gresx *> (stmt))); | |
6ae70ea2 JJ |
3847 | break; |
3848 | case GIMPLE_EH_DISPATCH: | |
538dd0b7 DM |
3849 | bitmap_set_bit (r_reachable, |
3850 | gimple_eh_dispatch_region ( | |
3851 | as_a <geh_dispatch *> (stmt))); | |
6ae70ea2 | 3852 | break; |
5b1441be RH |
3853 | case GIMPLE_CALL: |
3854 | if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES)) | |
3855 | for (int i = 0; i < 2; ++i) | |
3856 | { | |
3857 | tree rt = gimple_call_arg (stmt, i); | |
3858 | HOST_WIDE_INT ri = tree_to_shwi (rt); | |
3859 | ||
3754c8fe | 3860 | gcc_assert (ri == (int)ri); |
5b1441be RH |
3861 | bitmap_set_bit (r_reachable, ri); |
3862 | } | |
3863 | break; | |
6ae70ea2 JJ |
3864 | default: |
3865 | break; | |
3866 | } | |
1d65f45c | 3867 | } |
4e6d1743 | 3868 | } |
d273b176 SB |
3869 | } |
3870 | ||
3871 | /* Remove unreachable handlers and unreachable landing pads. */ | |
3872 | ||
3873 | static void | |
3874 | remove_unreachable_handlers (void) | |
3875 | { | |
3876 | sbitmap r_reachable, lp_reachable; | |
3877 | eh_region region; | |
3878 | eh_landing_pad lp; | |
3879 | unsigned i; | |
3880 | ||
3881 | mark_reachable_handlers (&r_reachable, &lp_reachable); | |
1d65f45c RH |
3882 | |
3883 | if (dump_file) | |
4e6d1743 | 3884 | { |
1d65f45c RH |
3885 | fprintf (dump_file, "Before removal of unreachable regions:\n"); |
3886 | dump_eh_tree (dump_file, cfun); | |
3887 | fprintf (dump_file, "Reachable regions: "); | |
f61e445a | 3888 | dump_bitmap_file (dump_file, r_reachable); |
1d65f45c | 3889 | fprintf (dump_file, "Reachable landing pads: "); |
f61e445a | 3890 | dump_bitmap_file (dump_file, lp_reachable); |
4e6d1743 JH |
3891 | } |
3892 | ||
d273b176 SB |
3893 | if (dump_file) |
3894 | { | |
3895 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region) | |
3896 | if (region && !bitmap_bit_p (r_reachable, region->index)) | |
3897 | fprintf (dump_file, | |
3898 | "Removing unreachable region %d\n", | |
3899 | region->index); | |
3900 | } | |
3901 | ||
3902 | remove_unreachable_eh_regions (r_reachable); | |
4e6d1743 | 3903 | |
d273b176 SB |
3904 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp) |
3905 | if (lp && !bitmap_bit_p (lp_reachable, lp->index)) | |
1d65f45c RH |
3906 | { |
3907 | if (dump_file) | |
d273b176 SB |
3908 | fprintf (dump_file, |
3909 | "Removing unreachable landing pad %d\n", | |
3910 | lp->index); | |
1d65f45c RH |
3911 | remove_eh_landing_pad (lp); |
3912 | } | |
b8698a0f | 3913 | |
1d65f45c | 3914 | if (dump_file) |
4e6d1743 | 3915 | { |
1d65f45c RH |
3916 | fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n"); |
3917 | dump_eh_tree (dump_file, cfun); | |
3918 | fprintf (dump_file, "\n\n"); | |
4e6d1743 JH |
3919 | } |
3920 | ||
1d65f45c RH |
3921 | sbitmap_free (r_reachable); |
3922 | sbitmap_free (lp_reachable); | |
3923 | ||
b2b29377 MM |
3924 | if (flag_checking) |
3925 | verify_eh_tree (cfun); | |
1d65f45c RH |
3926 | } |
3927 | ||
99d8763e JJ |
3928 | /* Remove unreachable handlers if any landing pads have been removed after |
3929 | last ehcleanup pass (due to gimple_purge_dead_eh_edges). */ | |
3930 | ||
3931 | void | |
3932 | maybe_remove_unreachable_handlers (void) | |
3933 | { | |
3934 | eh_landing_pad lp; | |
d273b176 | 3935 | unsigned i; |
99d8763e JJ |
3936 | |
3937 | if (cfun->eh == NULL) | |
3938 | return; | |
d273b176 SB |
3939 | |
3940 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp) | |
99d8763e JJ |
3941 | if (lp && lp->post_landing_pad) |
3942 | { | |
3943 | if (label_to_block (lp->post_landing_pad) == NULL) | |
3944 | { | |
3945 | remove_unreachable_handlers (); | |
3946 | return; | |
3947 | } | |
3948 | } | |
3949 | } | |
3950 | ||
1d65f45c RH |
3951 | /* Remove regions that do not have landing pads. This assumes |
3952 | that remove_unreachable_handlers has already been run, and | |
d273b176 SB |
3953 | that we've just manipulated the landing pads since then. |
3954 | ||
3955 | Preserve regions with landing pads and regions that prevent | |
3956 | exceptions from propagating further, even if these regions | |
3957 | are not reachable. */ | |
1d65f45c RH |
3958 | |
3959 | static void | |
3960 | remove_unreachable_handlers_no_lp (void) | |
3961 | { | |
d273b176 | 3962 | eh_region region; |
1a47f99c | 3963 | sbitmap r_reachable; |
d273b176 | 3964 | unsigned i; |
1a47f99c | 3965 | |
d273b176 | 3966 | mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL); |
1a47f99c | 3967 | |
d273b176 | 3968 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region) |
1a47f99c | 3969 | { |
d273b176 SB |
3970 | if (! region) |
3971 | continue; | |
3972 | ||
3973 | if (region->landing_pads != NULL | |
3974 | || region->type == ERT_MUST_NOT_THROW) | |
3975 | bitmap_set_bit (r_reachable, region->index); | |
3976 | ||
3977 | if (dump_file | |
3978 | && !bitmap_bit_p (r_reachable, region->index)) | |
3979 | fprintf (dump_file, | |
3980 | "Removing unreachable region %d\n", | |
3981 | region->index); | |
1a47f99c | 3982 | } |
1d65f45c | 3983 | |
d273b176 | 3984 | remove_unreachable_eh_regions (r_reachable); |
1a47f99c MM |
3985 | |
3986 | sbitmap_free (r_reachable); | |
4e6d1743 JH |
3987 | } |
3988 | ||
1d65f45c RH |
3989 | /* Undo critical edge splitting on an EH landing pad. Earlier, we |
3990 | optimisticaly split all sorts of edges, including EH edges. The | |
3991 | optimization passes in between may not have needed them; if not, | |
3992 | we should undo the split. | |
3993 | ||
3994 | Recognize this case by having one EH edge incoming to the BB and | |
3995 | one normal edge outgoing; BB should be empty apart from the | |
3996 | post_landing_pad label. | |
3997 | ||
3998 | Note that this is slightly different from the empty handler case | |
3999 | handled by cleanup_empty_eh, in that the actual handler may yet | |
4000 | have actual code but the landing pad has been separated from the | |
4001 | handler. As such, cleanup_empty_eh relies on this transformation | |
4002 | having been done first. */ | |
a8da523f JH |
4003 | |
4004 | static bool | |
1d65f45c | 4005 | unsplit_eh (eh_landing_pad lp) |
a8da523f | 4006 | { |
1d65f45c RH |
4007 | basic_block bb = label_to_block (lp->post_landing_pad); |
4008 | gimple_stmt_iterator gsi; | |
4009 | edge e_in, e_out; | |
4010 | ||
4011 | /* Quickly check the edge counts on BB for singularity. */ | |
f223bb13 | 4012 | if (!single_pred_p (bb) || !single_succ_p (bb)) |
1d65f45c | 4013 | return false; |
f223bb13 JJ |
4014 | e_in = single_pred_edge (bb); |
4015 | e_out = single_succ_edge (bb); | |
a8da523f | 4016 | |
1d65f45c RH |
4017 | /* Input edge must be EH and output edge must be normal. */ |
4018 | if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0) | |
4019 | return false; | |
4020 | ||
3333cd50 RG |
4021 | /* The block must be empty except for the labels and debug insns. */ |
4022 | gsi = gsi_after_labels (bb); | |
4023 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4024 | gsi_next_nondebug (&gsi); | |
4025 | if (!gsi_end_p (gsi)) | |
1d65f45c RH |
4026 | return false; |
4027 | ||
4028 | /* The destination block must not already have a landing pad | |
4029 | for a different region. */ | |
4030 | for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
a8da523f | 4031 | { |
538dd0b7 | 4032 | glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)); |
1d65f45c RH |
4033 | tree lab; |
4034 | int lp_nr; | |
a8da523f | 4035 | |
538dd0b7 | 4036 | if (!label_stmt) |
1d65f45c | 4037 | break; |
538dd0b7 | 4038 | lab = gimple_label_label (label_stmt); |
1d65f45c RH |
4039 | lp_nr = EH_LANDING_PAD_NR (lab); |
4040 | if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | |
4041 | return false; | |
4042 | } | |
a8da523f | 4043 | |
f8fd49b5 RH |
4044 | /* The new destination block must not already be a destination of |
4045 | the source block, lest we merge fallthru and eh edges and get | |
4046 | all sorts of confused. */ | |
4047 | if (find_edge (e_in->src, e_out->dest)) | |
4048 | return false; | |
4049 | ||
d6063d7f RH |
4050 | /* ??? We can get degenerate phis due to cfg cleanups. I would have |
4051 | thought this should have been cleaned up by a phicprop pass, but | |
4052 | that doesn't appear to handle virtuals. Propagate by hand. */ | |
4053 | if (!gimple_seq_empty_p (phi_nodes (bb))) | |
4054 | { | |
538dd0b7 | 4055 | for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); ) |
d6063d7f | 4056 | { |
355fe088 | 4057 | gimple *use_stmt; |
538dd0b7 | 4058 | gphi *phi = gpi.phi (); |
d6063d7f RH |
4059 | tree lhs = gimple_phi_result (phi); |
4060 | tree rhs = gimple_phi_arg_def (phi, 0); | |
4061 | use_operand_p use_p; | |
4062 | imm_use_iterator iter; | |
4063 | ||
4064 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) | |
4065 | { | |
4066 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
4067 | SET_USE (use_p, rhs); | |
4068 | } | |
4069 | ||
4070 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
4071 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1; | |
4072 | ||
538dd0b7 | 4073 | remove_phi_node (&gpi, true); |
d6063d7f RH |
4074 | } |
4075 | } | |
496a4ef5 | 4076 | |
1d65f45c RH |
4077 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4078 | fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n", | |
4079 | lp->index, e_out->dest->index); | |
4080 | ||
4081 | /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving | |
4082 | a successor edge, humor it. But do the real CFG change with the | |
4083 | predecessor of E_OUT in order to preserve the ordering of arguments | |
4084 | to the PHI nodes in E_OUT->DEST. */ | |
4085 | redirect_eh_edge_1 (e_in, e_out->dest, false); | |
4086 | redirect_edge_pred (e_out, e_in->src); | |
4087 | e_out->flags = e_in->flags; | |
4088 | e_out->probability = e_in->probability; | |
4089 | e_out->count = e_in->count; | |
4090 | remove_edge (e_in); | |
496a4ef5 | 4091 | |
1d65f45c RH |
4092 | return true; |
4093 | } | |
496a4ef5 | 4094 | |
1d65f45c | 4095 | /* Examine each landing pad block and see if it matches unsplit_eh. */ |
496a4ef5 | 4096 | |
1d65f45c RH |
4097 | static bool |
4098 | unsplit_all_eh (void) | |
4099 | { | |
4100 | bool changed = false; | |
4101 | eh_landing_pad lp; | |
4102 | int i; | |
496a4ef5 | 4103 | |
9771b263 | 4104 | for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) |
1d65f45c RH |
4105 | if (lp) |
4106 | changed |= unsplit_eh (lp); | |
4107 | ||
4108 | return changed; | |
4109 | } | |
4110 | ||
4111 | /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming | |
4112 | to OLD_BB to NEW_BB; return true on success, false on failure. | |
4113 | ||
4114 | OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any | |
4115 | PHI variables from OLD_BB we can pick them up from OLD_BB_OUT. | |
4116 | Virtual PHIs may be deleted and marked for renaming. */ | |
4117 | ||
4118 | static bool | |
4119 | cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb, | |
d6063d7f | 4120 | edge old_bb_out, bool change_region) |
1d65f45c | 4121 | { |
538dd0b7 | 4122 | gphi_iterator ngsi, ogsi; |
1d65f45c RH |
4123 | edge_iterator ei; |
4124 | edge e; | |
1d65f45c RH |
4125 | bitmap ophi_handled; |
4126 | ||
336ead04 JJ |
4127 | /* The destination block must not be a regular successor for any |
4128 | of the preds of the landing pad. Thus, avoid turning | |
4129 | <..> | |
4130 | | \ EH | |
4131 | | <..> | |
4132 | | / | |
4133 | <..> | |
4134 | into | |
4135 | <..> | |
4136 | | | EH | |
4137 | <..> | |
4138 | which CFG verification would choke on. See PR45172 and PR51089. */ | |
4139 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
4140 | if (find_edge (e->src, new_bb)) | |
4141 | return false; | |
4142 | ||
1d65f45c RH |
4143 | FOR_EACH_EDGE (e, ei, old_bb->preds) |
4144 | redirect_edge_var_map_clear (e); | |
4145 | ||
4146 | ophi_handled = BITMAP_ALLOC (NULL); | |
1d65f45c RH |
4147 | |
4148 | /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map | |
4149 | for the edges we're going to move. */ | |
4150 | for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi)) | |
4151 | { | |
538dd0b7 | 4152 | gphi *ophi, *nphi = ngsi.phi (); |
1d65f45c RH |
4153 | tree nresult, nop; |
4154 | ||
4155 | nresult = gimple_phi_result (nphi); | |
4156 | nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx); | |
4157 | ||
4158 | /* Find the corresponding PHI in OLD_BB so we can forward-propagate | |
4159 | the source ssa_name. */ | |
4160 | ophi = NULL; | |
4161 | for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | |
4162 | { | |
538dd0b7 | 4163 | ophi = ogsi.phi (); |
1d65f45c RH |
4164 | if (gimple_phi_result (ophi) == nop) |
4165 | break; | |
4166 | ophi = NULL; | |
a3710436 | 4167 | } |
496a4ef5 | 4168 | |
1d65f45c RH |
4169 | /* If we did find the corresponding PHI, copy those inputs. */ |
4170 | if (ophi) | |
a8da523f | 4171 | { |
3ffe07e1 JJ |
4172 | /* If NOP is used somewhere else beyond phis in new_bb, give up. */ |
4173 | if (!has_single_use (nop)) | |
4174 | { | |
4175 | imm_use_iterator imm_iter; | |
4176 | use_operand_p use_p; | |
4177 | ||
4178 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop) | |
4179 | { | |
4180 | if (!gimple_debug_bind_p (USE_STMT (use_p)) | |
4181 | && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI | |
4182 | || gimple_bb (USE_STMT (use_p)) != new_bb)) | |
4183 | goto fail; | |
4184 | } | |
4185 | } | |
1d65f45c RH |
4186 | bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop)); |
4187 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
496a4ef5 | 4188 | { |
1d65f45c RH |
4189 | location_t oloc; |
4190 | tree oop; | |
4191 | ||
4192 | if ((e->flags & EDGE_EH) == 0) | |
4193 | continue; | |
4194 | oop = gimple_phi_arg_def (ophi, e->dest_idx); | |
4195 | oloc = gimple_phi_arg_location (ophi, e->dest_idx); | |
9e227d60 | 4196 | redirect_edge_var_map_add (e, nresult, oop, oloc); |
496a4ef5 | 4197 | } |
1d65f45c | 4198 | } |
d90e76d4 | 4199 | /* If we didn't find the PHI, if it's a real variable or a VOP, we know |
1d65f45c RH |
4200 | from the fact that OLD_BB is tree_empty_eh_handler_p that the |
4201 | variable is unchanged from input to the block and we can simply | |
4202 | re-use the input to NEW_BB from the OLD_BB_OUT edge. */ | |
4203 | else | |
4204 | { | |
4205 | location_t nloc | |
4206 | = gimple_phi_arg_location (nphi, old_bb_out->dest_idx); | |
4207 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
9e227d60 | 4208 | redirect_edge_var_map_add (e, nresult, nop, nloc); |
1d65f45c RH |
4209 | } |
4210 | } | |
4211 | ||
4212 | /* Second, verify that all PHIs from OLD_BB have been handled. If not, | |
4213 | we don't know what values from the other edges into NEW_BB to use. */ | |
4214 | for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | |
4215 | { | |
538dd0b7 | 4216 | gphi *ophi = ogsi.phi (); |
1d65f45c RH |
4217 | tree oresult = gimple_phi_result (ophi); |
4218 | if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult))) | |
4219 | goto fail; | |
4220 | } | |
4221 | ||
1d65f45c RH |
4222 | /* Finally, move the edges and update the PHIs. */ |
4223 | for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); ) | |
4224 | if (e->flags & EDGE_EH) | |
4225 | { | |
efa26eaa RG |
4226 | /* ??? CFG manipluation routines do not try to update loop |
4227 | form on edge redirection. Do so manually here for now. */ | |
4228 | /* If we redirect a loop entry or latch edge that will either create | |
4229 | a multiple entry loop or rotate the loop. If the loops merge | |
4230 | we may have created a loop with multiple latches. | |
4231 | All of this isn't easily fixed thus cancel the affected loop | |
4232 | and mark the other loop as possibly having multiple latches. */ | |
726338f4 | 4233 | if (e->dest == e->dest->loop_father->header) |
efa26eaa | 4234 | { |
08c13199 | 4235 | mark_loop_for_removal (e->dest->loop_father); |
efa26eaa | 4236 | new_bb->loop_father->latch = NULL; |
08c13199 | 4237 | loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES); |
efa26eaa | 4238 | } |
d6063d7f | 4239 | redirect_eh_edge_1 (e, new_bb, change_region); |
1d65f45c RH |
4240 | redirect_edge_succ (e, new_bb); |
4241 | flush_pending_stmts (e); | |
4242 | } | |
4243 | else | |
4244 | ei_next (&ei); | |
4e6d1743 | 4245 | |
1d65f45c | 4246 | BITMAP_FREE (ophi_handled); |
1d65f45c RH |
4247 | return true; |
4248 | ||
4249 | fail: | |
4250 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
4251 | redirect_edge_var_map_clear (e); | |
4252 | BITMAP_FREE (ophi_handled); | |
1d65f45c RH |
4253 | return false; |
4254 | } | |
4255 | ||
4256 | /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its | |
4257 | old region to NEW_REGION at BB. */ | |
4258 | ||
4259 | static void | |
4260 | cleanup_empty_eh_move_lp (basic_block bb, edge e_out, | |
4261 | eh_landing_pad lp, eh_region new_region) | |
4262 | { | |
4263 | gimple_stmt_iterator gsi; | |
4264 | eh_landing_pad *pp; | |
4265 | ||
4266 | for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp) | |
4267 | continue; | |
4268 | *pp = lp->next_lp; | |
4269 | ||
4270 | lp->region = new_region; | |
4271 | lp->next_lp = new_region->landing_pads; | |
4272 | new_region->landing_pads = lp; | |
4273 | ||
4274 | /* Delete the RESX that was matched within the empty handler block. */ | |
4275 | gsi = gsi_last_bb (bb); | |
3d3f2249 | 4276 | unlink_stmt_vdef (gsi_stmt (gsi)); |
1d65f45c RH |
4277 | gsi_remove (&gsi, true); |
4278 | ||
4279 | /* Clean up E_OUT for the fallthru. */ | |
4280 | e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU; | |
4281 | e_out->probability = REG_BR_PROB_BASE; | |
4282 | } | |
4283 | ||
4284 | /* A subroutine of cleanup_empty_eh. Handle more complex cases of | |
b8698a0f | 4285 | unsplitting than unsplit_eh was prepared to handle, e.g. when |
1d65f45c RH |
4286 | multiple incoming edges and phis are involved. */ |
4287 | ||
4288 | static bool | |
d6063d7f | 4289 | cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp) |
1d65f45c RH |
4290 | { |
4291 | gimple_stmt_iterator gsi; | |
1d65f45c RH |
4292 | tree lab; |
4293 | ||
4294 | /* We really ought not have totally lost everything following | |
4295 | a landing pad label. Given that BB is empty, there had better | |
4296 | be a successor. */ | |
4297 | gcc_assert (e_out != NULL); | |
4298 | ||
d6063d7f RH |
4299 | /* The destination block must not already have a landing pad |
4300 | for a different region. */ | |
1d65f45c RH |
4301 | lab = NULL; |
4302 | for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4303 | { | |
538dd0b7 | 4304 | glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)); |
d6063d7f RH |
4305 | int lp_nr; |
4306 | ||
538dd0b7 | 4307 | if (!stmt) |
1d65f45c RH |
4308 | break; |
4309 | lab = gimple_label_label (stmt); | |
d6063d7f RH |
4310 | lp_nr = EH_LANDING_PAD_NR (lab); |
4311 | if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | |
4312 | return false; | |
1d65f45c | 4313 | } |
1d65f45c RH |
4314 | |
4315 | /* Attempt to move the PHIs into the successor block. */ | |
d6063d7f | 4316 | if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false)) |
1d65f45c RH |
4317 | { |
4318 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4319 | fprintf (dump_file, | |
d6063d7f RH |
4320 | "Unsplit EH landing pad %d to block %i " |
4321 | "(via cleanup_empty_eh).\n", | |
4322 | lp->index, e_out->dest->index); | |
1d65f45c RH |
4323 | return true; |
4324 | } | |
4325 | ||
4326 | return false; | |
4327 | } | |
4328 | ||
afaaa67d JJ |
4329 | /* Return true if edge E_FIRST is part of an empty infinite loop |
4330 | or leads to such a loop through a series of single successor | |
4331 | empty bbs. */ | |
4332 | ||
4333 | static bool | |
4334 | infinite_empty_loop_p (edge e_first) | |
4335 | { | |
4336 | bool inf_loop = false; | |
4337 | edge e; | |
4338 | ||
4339 | if (e_first->dest == e_first->src) | |
4340 | return true; | |
4341 | ||
4342 | e_first->src->aux = (void *) 1; | |
4343 | for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest)) | |
4344 | { | |
4345 | gimple_stmt_iterator gsi; | |
4346 | if (e->dest->aux) | |
4347 | { | |
4348 | inf_loop = true; | |
4349 | break; | |
4350 | } | |
4351 | e->dest->aux = (void *) 1; | |
4352 | gsi = gsi_after_labels (e->dest); | |
4353 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4354 | gsi_next_nondebug (&gsi); | |
4355 | if (!gsi_end_p (gsi)) | |
4356 | break; | |
4357 | } | |
4358 | e_first->src->aux = NULL; | |
4359 | for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest)) | |
4360 | e->dest->aux = NULL; | |
4361 | ||
4362 | return inf_loop; | |
4363 | } | |
4364 | ||
1d65f45c RH |
4365 | /* Examine the block associated with LP to determine if it's an empty |
4366 | handler for its EH region. If so, attempt to redirect EH edges to | |
4367 | an outer region. Return true the CFG was updated in any way. This | |
4368 | is similar to jump forwarding, just across EH edges. */ | |
4369 | ||
4370 | static bool | |
4371 | cleanup_empty_eh (eh_landing_pad lp) | |
4372 | { | |
4373 | basic_block bb = label_to_block (lp->post_landing_pad); | |
4374 | gimple_stmt_iterator gsi; | |
355fe088 | 4375 | gimple *resx; |
1d65f45c RH |
4376 | eh_region new_region; |
4377 | edge_iterator ei; | |
4378 | edge e, e_out; | |
4379 | bool has_non_eh_pred; | |
81bfd197 | 4380 | bool ret = false; |
1d65f45c RH |
4381 | int new_lp_nr; |
4382 | ||
4383 | /* There can be zero or one edges out of BB. This is the quickest test. */ | |
4384 | switch (EDGE_COUNT (bb->succs)) | |
4385 | { | |
4386 | case 0: | |
4387 | e_out = NULL; | |
4388 | break; | |
4389 | case 1: | |
f223bb13 | 4390 | e_out = single_succ_edge (bb); |
1d65f45c RH |
4391 | break; |
4392 | default: | |
4393 | return false; | |
4394 | } | |
81bfd197 MM |
4395 | |
4396 | resx = last_stmt (bb); | |
4397 | if (resx && is_gimple_resx (resx)) | |
4398 | { | |
4399 | if (stmt_can_throw_external (resx)) | |
4400 | optimize_clobbers (bb); | |
4401 | else if (sink_clobbers (bb)) | |
4402 | ret = true; | |
4403 | } | |
4404 | ||
1d65f45c RH |
4405 | gsi = gsi_after_labels (bb); |
4406 | ||
4407 | /* Make sure to skip debug statements. */ | |
4408 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4409 | gsi_next_nondebug (&gsi); | |
4410 | ||
4411 | /* If the block is totally empty, look for more unsplitting cases. */ | |
4412 | if (gsi_end_p (gsi)) | |
0d228a52 | 4413 | { |
609524d2 JJ |
4414 | /* For the degenerate case of an infinite loop bail out. |
4415 | If bb has no successors and is totally empty, which can happen e.g. | |
4416 | because of incorrect noreturn attribute, bail out too. */ | |
4417 | if (e_out == NULL | |
4418 | || infinite_empty_loop_p (e_out)) | |
81bfd197 | 4419 | return ret; |
0d228a52 | 4420 | |
81bfd197 | 4421 | return ret | cleanup_empty_eh_unsplit (bb, e_out, lp); |
0d228a52 | 4422 | } |
1d65f45c | 4423 | |
1ee0d660 EB |
4424 | /* The block should consist only of a single RESX statement, modulo a |
4425 | preceding call to __builtin_stack_restore if there is no outgoing | |
4426 | edge, since the call can be eliminated in this case. */ | |
1d65f45c | 4427 | resx = gsi_stmt (gsi); |
1ee0d660 EB |
4428 | if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE)) |
4429 | { | |
4430 | gsi_next (&gsi); | |
4431 | resx = gsi_stmt (gsi); | |
4432 | } | |
1d65f45c | 4433 | if (!is_gimple_resx (resx)) |
81bfd197 | 4434 | return ret; |
1d65f45c RH |
4435 | gcc_assert (gsi_one_before_end_p (gsi)); |
4436 | ||
4437 | /* Determine if there are non-EH edges, or resx edges into the handler. */ | |
4438 | has_non_eh_pred = false; | |
4439 | FOR_EACH_EDGE (e, ei, bb->preds) | |
4440 | if (!(e->flags & EDGE_EH)) | |
4441 | has_non_eh_pred = true; | |
4442 | ||
4443 | /* Find the handler that's outer of the empty handler by looking at | |
4444 | where the RESX instruction was vectored. */ | |
4445 | new_lp_nr = lookup_stmt_eh_lp (resx); | |
4446 | new_region = get_eh_region_from_lp_number (new_lp_nr); | |
4447 | ||
4448 | /* If there's no destination region within the current function, | |
4449 | redirection is trivial via removing the throwing statements from | |
4450 | the EH region, removing the EH edges, and allowing the block | |
4451 | to go unreachable. */ | |
4452 | if (new_region == NULL) | |
4453 | { | |
4454 | gcc_assert (e_out == NULL); | |
4455 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
4456 | if (e->flags & EDGE_EH) | |
4457 | { | |
355fe088 | 4458 | gimple *stmt = last_stmt (e->src); |
1d65f45c RH |
4459 | remove_stmt_from_eh_lp (stmt); |
4460 | remove_edge (e); | |
4461 | } | |
4462 | else | |
4463 | ei_next (&ei); | |
4464 | goto succeed; | |
4465 | } | |
4466 | ||
4467 | /* If the destination region is a MUST_NOT_THROW, allow the runtime | |
4468 | to handle the abort and allow the blocks to go unreachable. */ | |
4469 | if (new_region->type == ERT_MUST_NOT_THROW) | |
4470 | { | |
4471 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
4472 | if (e->flags & EDGE_EH) | |
4473 | { | |
355fe088 | 4474 | gimple *stmt = last_stmt (e->src); |
1d65f45c RH |
4475 | remove_stmt_from_eh_lp (stmt); |
4476 | add_stmt_to_eh_lp (stmt, new_lp_nr); | |
4477 | remove_edge (e); | |
4478 | } | |
4479 | else | |
4480 | ei_next (&ei); | |
4481 | goto succeed; | |
4482 | } | |
4483 | ||
4484 | /* Try to redirect the EH edges and merge the PHIs into the destination | |
4485 | landing pad block. If the merge succeeds, we'll already have redirected | |
4486 | all the EH edges. The handler itself will go unreachable if there were | |
4487 | no normal edges. */ | |
d6063d7f | 4488 | if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true)) |
1d65f45c RH |
4489 | goto succeed; |
4490 | ||
4491 | /* Finally, if all input edges are EH edges, then we can (potentially) | |
4492 | reduce the number of transfers from the runtime by moving the landing | |
4493 | pad from the original region to the new region. This is a win when | |
4494 | we remove the last CLEANUP region along a particular exception | |
4495 | propagation path. Since nothing changes except for the region with | |
4496 | which the landing pad is associated, the PHI nodes do not need to be | |
4497 | adjusted at all. */ | |
4498 | if (!has_non_eh_pred) | |
4499 | { | |
4500 | cleanup_empty_eh_move_lp (bb, e_out, lp, new_region); | |
4501 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4502 | fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n", | |
4503 | lp->index, new_region->index); | |
4504 | ||
4505 | /* ??? The CFG didn't change, but we may have rendered the | |
4506 | old EH region unreachable. Trigger a cleanup there. */ | |
a8da523f JH |
4507 | return true; |
4508 | } | |
1d65f45c | 4509 | |
81bfd197 | 4510 | return ret; |
1d65f45c RH |
4511 | |
4512 | succeed: | |
4513 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4514 | fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index); | |
4515 | remove_eh_landing_pad (lp); | |
4516 | return true; | |
a8da523f JH |
4517 | } |
4518 | ||
1d65f45c RH |
4519 | /* Do a post-order traversal of the EH region tree. Examine each |
4520 | post_landing_pad block and see if we can eliminate it as empty. */ | |
4521 | ||
4522 | static bool | |
4523 | cleanup_all_empty_eh (void) | |
4524 | { | |
4525 | bool changed = false; | |
4526 | eh_landing_pad lp; | |
4527 | int i; | |
4528 | ||
9771b263 | 4529 | for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) |
1d65f45c RH |
4530 | if (lp) |
4531 | changed |= cleanup_empty_eh (lp); | |
4532 | ||
4533 | return changed; | |
4534 | } | |
a8da523f JH |
4535 | |
4536 | /* Perform cleanups and lowering of exception handling | |
4537 | 1) cleanups regions with handlers doing nothing are optimized out | |
4538 | 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out | |
4539 | 3) Info about regions that are containing instructions, and regions | |
4540 | reachable via local EH edges is collected | |
c0d18c6c | 4541 | 4) Eh tree is pruned for regions no longer necessary. |
1d65f45c RH |
4542 | |
4543 | TODO: Push MUST_NOT_THROW regions to the root of the EH tree. | |
4544 | Unify those that have the same failure decl and locus. | |
4545 | */ | |
a8da523f JH |
4546 | |
4547 | static unsigned int | |
66a3e339 | 4548 | execute_cleanup_eh_1 (void) |
a8da523f | 4549 | { |
1d65f45c RH |
4550 | /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die |
4551 | looking up unreachable landing pads. */ | |
4552 | remove_unreachable_handlers (); | |
a8da523f | 4553 | |
1d65f45c | 4554 | /* Watch out for the region tree vanishing due to all unreachable. */ |
25fe40b0 | 4555 | if (cfun->eh->region_tree) |
a8da523f | 4556 | { |
1d65f45c | 4557 | bool changed = false; |
a8da523f | 4558 | |
25fe40b0 RB |
4559 | if (optimize) |
4560 | changed |= unsplit_all_eh (); | |
1d65f45c RH |
4561 | changed |= cleanup_all_empty_eh (); |
4562 | ||
4563 | if (changed) | |
6d07ad98 JH |
4564 | { |
4565 | free_dominance_info (CDI_DOMINATORS); | |
4566 | free_dominance_info (CDI_POST_DOMINATORS); | |
a8da523f | 4567 | |
1d65f45c RH |
4568 | /* We delayed all basic block deletion, as we may have performed |
4569 | cleanups on EH edges while non-EH edges were still present. */ | |
4570 | delete_unreachable_blocks (); | |
a8da523f | 4571 | |
1d65f45c RH |
4572 | /* We manipulated the landing pads. Remove any region that no |
4573 | longer has a landing pad. */ | |
4574 | remove_unreachable_handlers_no_lp (); | |
4575 | ||
4576 | return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals; | |
4577 | } | |
a8da523f JH |
4578 | } |
4579 | ||
1d65f45c RH |
4580 | return 0; |
4581 | } | |
4582 | ||
17795822 TS |
4583 | namespace { |
4584 | ||
4585 | const pass_data pass_data_cleanup_eh = | |
27a4cd48 DM |
4586 | { |
4587 | GIMPLE_PASS, /* type */ | |
4588 | "ehcleanup", /* name */ | |
4589 | OPTGROUP_NONE, /* optinfo_flags */ | |
27a4cd48 DM |
4590 | TV_TREE_EH, /* tv_id */ |
4591 | PROP_gimple_lcf, /* properties_required */ | |
4592 | 0, /* properties_provided */ | |
4593 | 0, /* properties_destroyed */ | |
4594 | 0, /* todo_flags_start */ | |
3bea341f | 4595 | 0, /* todo_flags_finish */ |
a8da523f | 4596 | }; |
27a4cd48 | 4597 | |
17795822 | 4598 | class pass_cleanup_eh : public gimple_opt_pass |
27a4cd48 DM |
4599 | { |
4600 | public: | |
c3284718 RS |
4601 | pass_cleanup_eh (gcc::context *ctxt) |
4602 | : gimple_opt_pass (pass_data_cleanup_eh, ctxt) | |
27a4cd48 DM |
4603 | {} |
4604 | ||
4605 | /* opt_pass methods: */ | |
65d3284b | 4606 | opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); } |
1a3d085c TS |
4607 | virtual bool gate (function *fun) |
4608 | { | |
4609 | return fun->eh != NULL && fun->eh->region_tree != NULL; | |
4610 | } | |
4611 | ||
be55bfe6 | 4612 | virtual unsigned int execute (function *); |
27a4cd48 DM |
4613 | |
4614 | }; // class pass_cleanup_eh | |
4615 | ||
be55bfe6 TS |
4616 | unsigned int |
4617 | pass_cleanup_eh::execute (function *fun) | |
4618 | { | |
4619 | int ret = execute_cleanup_eh_1 (); | |
4620 | ||
4621 | /* If the function no longer needs an EH personality routine | |
4622 | clear it. This exposes cross-language inlining opportunities | |
4623 | and avoids references to a never defined personality routine. */ | |
4624 | if (DECL_FUNCTION_PERSONALITY (current_function_decl) | |
4625 | && function_needs_eh_personality (fun) != eh_personality_lang) | |
4626 | DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE; | |
4627 | ||
4628 | return ret; | |
4629 | } | |
4630 | ||
17795822 TS |
4631 | } // anon namespace |
4632 | ||
27a4cd48 DM |
4633 | gimple_opt_pass * |
4634 | make_pass_cleanup_eh (gcc::context *ctxt) | |
4635 | { | |
4636 | return new pass_cleanup_eh (ctxt); | |
4637 | } | |
1d65f45c RH |
4638 | \f |
4639 | /* Verify that BB containing STMT as the last statement, has precisely the | |
4640 | edge that make_eh_edges would create. */ | |
4641 | ||
24e47c76 | 4642 | DEBUG_FUNCTION bool |
355fe088 | 4643 | verify_eh_edges (gimple *stmt) |
1d65f45c RH |
4644 | { |
4645 | basic_block bb = gimple_bb (stmt); | |
4646 | eh_landing_pad lp = NULL; | |
4647 | int lp_nr; | |
4648 | edge_iterator ei; | |
4649 | edge e, eh_edge; | |
4650 | ||
4651 | lp_nr = lookup_stmt_eh_lp (stmt); | |
4652 | if (lp_nr > 0) | |
4653 | lp = get_eh_landing_pad_from_number (lp_nr); | |
4654 | ||
4655 | eh_edge = NULL; | |
4656 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4657 | { | |
4658 | if (e->flags & EDGE_EH) | |
4659 | { | |
4660 | if (eh_edge) | |
4661 | { | |
4662 | error ("BB %i has multiple EH edges", bb->index); | |
4663 | return true; | |
4664 | } | |
4665 | else | |
4666 | eh_edge = e; | |
4667 | } | |
4668 | } | |
4669 | ||
4670 | if (lp == NULL) | |
4671 | { | |
4672 | if (eh_edge) | |
4673 | { | |
4674 | error ("BB %i can not throw but has an EH edge", bb->index); | |
4675 | return true; | |
4676 | } | |
4677 | return false; | |
4678 | } | |
4679 | ||
4680 | if (!stmt_could_throw_p (stmt)) | |
4681 | { | |
4682 | error ("BB %i last statement has incorrectly set lp", bb->index); | |
4683 | return true; | |
4684 | } | |
4685 | ||
4686 | if (eh_edge == NULL) | |
4687 | { | |
4688 | error ("BB %i is missing an EH edge", bb->index); | |
4689 | return true; | |
4690 | } | |
4691 | ||
4692 | if (eh_edge->dest != label_to_block (lp->post_landing_pad)) | |
4693 | { | |
4694 | error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index); | |
4695 | return true; | |
4696 | } | |
4697 | ||
4698 | return false; | |
4699 | } | |
4700 | ||
4701 | /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */ | |
4702 | ||
24e47c76 | 4703 | DEBUG_FUNCTION bool |
538dd0b7 | 4704 | verify_eh_dispatch_edge (geh_dispatch *stmt) |
1d65f45c RH |
4705 | { |
4706 | eh_region r; | |
4707 | eh_catch c; | |
4708 | basic_block src, dst; | |
4709 | bool want_fallthru = true; | |
4710 | edge_iterator ei; | |
4711 | edge e, fall_edge; | |
4712 | ||
4713 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); | |
4714 | src = gimple_bb (stmt); | |
4715 | ||
4716 | FOR_EACH_EDGE (e, ei, src->succs) | |
4717 | gcc_assert (e->aux == NULL); | |
4718 | ||
4719 | switch (r->type) | |
4720 | { | |
4721 | case ERT_TRY: | |
4722 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
4723 | { | |
4724 | dst = label_to_block (c->label); | |
4725 | e = find_edge (src, dst); | |
4726 | if (e == NULL) | |
4727 | { | |
4728 | error ("BB %i is missing an edge", src->index); | |
4729 | return true; | |
4730 | } | |
4731 | e->aux = (void *)e; | |
4732 | ||
4733 | /* A catch-all handler doesn't have a fallthru. */ | |
4734 | if (c->type_list == NULL) | |
4735 | { | |
4736 | want_fallthru = false; | |
4737 | break; | |
4738 | } | |
4739 | } | |
4740 | break; | |
4741 | ||
4742 | case ERT_ALLOWED_EXCEPTIONS: | |
4743 | dst = label_to_block (r->u.allowed.label); | |
4744 | e = find_edge (src, dst); | |
4745 | if (e == NULL) | |
4746 | { | |
4747 | error ("BB %i is missing an edge", src->index); | |
4748 | return true; | |
4749 | } | |
4750 | e->aux = (void *)e; | |
4751 | break; | |
4752 | ||
4753 | default: | |
4754 | gcc_unreachable (); | |
4755 | } | |
4756 | ||
4757 | fall_edge = NULL; | |
4758 | FOR_EACH_EDGE (e, ei, src->succs) | |
4759 | { | |
4760 | if (e->flags & EDGE_FALLTHRU) | |
4761 | { | |
4762 | if (fall_edge != NULL) | |
4763 | { | |
4764 | error ("BB %i too many fallthru edges", src->index); | |
4765 | return true; | |
4766 | } | |
4767 | fall_edge = e; | |
4768 | } | |
4769 | else if (e->aux) | |
4770 | e->aux = NULL; | |
4771 | else | |
4772 | { | |
4773 | error ("BB %i has incorrect edge", src->index); | |
4774 | return true; | |
4775 | } | |
4776 | } | |
4777 | if ((fall_edge != NULL) ^ want_fallthru) | |
4778 | { | |
4779 | error ("BB %i has incorrect fallthru edge", src->index); | |
4780 | return true; | |
4781 | } | |
4782 | ||
4783 | return false; | |
4784 | } |