]>
Commit | Line | Data |
---|---|---|
4ee9c684 | 1 | /* Exception handling semantics and decomposition for trees. |
fbd26352 | 2 | Copyright (C) 2003-2019 Free Software Foundation, Inc. |
4ee9c684 | 3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8c4c00c1 | 8 | the Free Software Foundation; either version 3, or (at your option) |
4ee9c684 | 9 | any later version. |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
4ee9c684 | 19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
9ef16211 | 23 | #include "backend.h" |
7c29e30e | 24 | #include "rtl.h" |
4ee9c684 | 25 | #include "tree.h" |
9ef16211 | 26 | #include "gimple.h" |
7c29e30e | 27 | #include "cfghooks.h" |
28 | #include "tree-pass.h" | |
9ef16211 | 29 | #include "ssa.h" |
7c29e30e | 30 | #include "cgraph.h" |
31 | #include "diagnostic-core.h" | |
9ef16211 | 32 | #include "fold-const.h" |
d53441c8 | 33 | #include "calls.h" |
4ee9c684 | 34 | #include "except.h" |
94ea8568 | 35 | #include "cfganal.h" |
36 | #include "cfgcleanup.h" | |
bc61cadb | 37 | #include "tree-eh.h" |
dcf1a1ec | 38 | #include "gimple-iterator.h" |
073c1fd5 | 39 | #include "tree-cfg.h" |
073c1fd5 | 40 | #include "tree-into-ssa.h" |
69ee5dbb | 41 | #include "tree-ssa.h" |
4ee9c684 | 42 | #include "tree-inline.h" |
4ee9c684 | 43 | #include "langhooks.h" |
79f958cb | 44 | #include "cfgloop.h" |
424a4a92 | 45 | #include "gimple-low.h" |
30a86690 | 46 | #include "stringpool.h" |
47 | #include "attribs.h" | |
5b64e274 | 48 | #include "asan.h" |
04936b7c | 49 | #include "gimplify.h" |
75a70cf9 | 50 | |
51 | /* In some instances a tree and a gimple need to be stored in a same table, | |
52 | i.e. in hash tables. This is a structure to do this. */ | |
42acab1c | 53 | typedef union {tree *tp; tree t; gimple *g;} treemple; |
4ee9c684 | 54 | |
4ee9c684 | 55 | /* Misc functions used in this file. */ |
56 | ||
e38def9c | 57 | /* Remember and lookup EH landing pad data for arbitrary statements. |
4ee9c684 | 58 | Really this means any statement that could_throw_p. We could |
59 | stuff this information into the stmt_ann data structure, but: | |
60 | ||
61 | (1) We absolutely rely on this information being kept until | |
62 | we get to rtl. Once we're done with lowering here, if we lose | |
63 | the information there's no way to recover it! | |
64 | ||
ac13e8d9 | 65 | (2) There are many more statements that *cannot* throw as |
4ee9c684 | 66 | compared to those that can. We should be saving some amount |
67 | of space by only allocating memory for those that can throw. */ | |
68 | ||
e38def9c | 69 | /* Add statement T in function IFUN to landing pad NUM. */ |
75a70cf9 | 70 | |
4e57e76d | 71 | static void |
42acab1c | 72 | add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num) |
4ee9c684 | 73 | { |
e38def9c | 74 | gcc_assert (num != 0); |
4ee9c684 | 75 | |
0de999f1 | 76 | if (!get_eh_throw_stmt_table (ifun)) |
42acab1c | 77 | set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31)); |
0de999f1 | 78 | |
8f359205 | 79 | gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num)); |
4ee9c684 | 80 | } |
35c15734 | 81 | |
e38def9c | 82 | /* Add statement T in the current function (cfun) to EH landing pad NUM. */ |
75a70cf9 | 83 | |
b3f1469f | 84 | void |
42acab1c | 85 | add_stmt_to_eh_lp (gimple *t, int num) |
b3f1469f | 86 | { |
e38def9c | 87 | add_stmt_to_eh_lp_fn (cfun, t, num); |
88 | } | |
89 | ||
90 | /* Add statement T to the single EH landing pad in REGION. */ | |
91 | ||
92 | static void | |
42acab1c | 93 | record_stmt_eh_region (eh_region region, gimple *t) |
e38def9c | 94 | { |
95 | if (region == NULL) | |
96 | return; | |
97 | if (region->type == ERT_MUST_NOT_THROW) | |
98 | add_stmt_to_eh_lp_fn (cfun, t, -region->index); | |
99 | else | |
100 | { | |
101 | eh_landing_pad lp = region->landing_pads; | |
102 | if (lp == NULL) | |
103 | lp = gen_eh_landing_pad (region); | |
104 | else | |
105 | gcc_assert (lp->next_lp == NULL); | |
106 | add_stmt_to_eh_lp_fn (cfun, t, lp->index); | |
107 | } | |
b3f1469f | 108 | } |
109 | ||
75a70cf9 | 110 | |
e38def9c | 111 | /* Remove statement T in function IFUN from its EH landing pad. */ |
75a70cf9 | 112 | |
35c15734 | 113 | bool |
42acab1c | 114 | remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t) |
35c15734 | 115 | { |
b3f1469f | 116 | if (!get_eh_throw_stmt_table (ifun)) |
35c15734 | 117 | return false; |
118 | ||
8f359205 | 119 | if (!get_eh_throw_stmt_table (ifun)->get (t)) |
35c15734 | 120 | return false; |
8f359205 | 121 | |
122 | get_eh_throw_stmt_table (ifun)->remove (t); | |
123 | return true; | |
35c15734 | 124 | } |
125 | ||
75a70cf9 | 126 | |
e38def9c | 127 | /* Remove statement T in the current function (cfun) from its |
128 | EH landing pad. */ | |
75a70cf9 | 129 | |
b3f1469f | 130 | bool |
42acab1c | 131 | remove_stmt_from_eh_lp (gimple *t) |
b3f1469f | 132 | { |
e38def9c | 133 | return remove_stmt_from_eh_lp_fn (cfun, t); |
b3f1469f | 134 | } |
135 | ||
75a70cf9 | 136 | /* Determine if statement T is inside an EH region in function IFUN. |
e38def9c | 137 | Positive numbers indicate a landing pad index; negative numbers |
138 | indicate a MUST_NOT_THROW region index; zero indicates that the | |
139 | statement is not recorded in the region table. */ | |
75a70cf9 | 140 | |
4ee9c684 | 141 | int |
42acab1c | 142 | lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t) |
4ee9c684 | 143 | { |
e38def9c | 144 | if (ifun->eh->throw_stmt_table == NULL) |
145 | return 0; | |
4ee9c684 | 146 | |
8f359205 | 147 | int *lp_nr = ifun->eh->throw_stmt_table->get (t); |
148 | return lp_nr ? *lp_nr : 0; | |
4ee9c684 | 149 | } |
150 | ||
e38def9c | 151 | /* Likewise, but always use the current function. */ |
75a70cf9 | 152 | |
b3f1469f | 153 | int |
42acab1c | 154 | lookup_stmt_eh_lp (gimple *t) |
b3f1469f | 155 | { |
156 | /* We can get called from initialized data when -fnon-call-exceptions | |
157 | is on; prevent crash. */ | |
158 | if (!cfun) | |
e38def9c | 159 | return 0; |
160 | return lookup_stmt_eh_lp_fn (cfun, t); | |
b3f1469f | 161 | } |
4ee9c684 | 162 | |
75a70cf9 | 163 | /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY |
4ee9c684 | 164 | nodes and LABEL_DECL nodes. We will use this during the second phase to |
165 | determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */ | |
166 | ||
167 | struct finally_tree_node | |
168 | { | |
75a70cf9 | 169 | /* When storing a GIMPLE_TRY, we have to record a gimple. However |
170 | when deciding whether a GOTO to a certain LABEL_DECL (which is a | |
171 | tree) leaves the TRY block, its necessary to record a tree in | |
172 | this field. Thus a treemple is used. */ | |
e38def9c | 173 | treemple child; |
1a91d914 | 174 | gtry *parent; |
4ee9c684 | 175 | }; |
176 | ||
d9dd21a8 | 177 | /* Hashtable helpers. */ |
178 | ||
298e7f9a | 179 | struct finally_tree_hasher : free_ptr_hash <finally_tree_node> |
d9dd21a8 | 180 | { |
9969c043 | 181 | static inline hashval_t hash (const finally_tree_node *); |
182 | static inline bool equal (const finally_tree_node *, | |
183 | const finally_tree_node *); | |
d9dd21a8 | 184 | }; |
185 | ||
186 | inline hashval_t | |
9969c043 | 187 | finally_tree_hasher::hash (const finally_tree_node *v) |
d9dd21a8 | 188 | { |
189 | return (intptr_t)v->child.t >> 4; | |
190 | } | |
191 | ||
192 | inline bool | |
9969c043 | 193 | finally_tree_hasher::equal (const finally_tree_node *v, |
194 | const finally_tree_node *c) | |
d9dd21a8 | 195 | { |
196 | return v->child.t == c->child.t; | |
197 | } | |
198 | ||
4ee9c684 | 199 | /* Note that this table is *not* marked GTY. It is short-lived. */ |
c1f445d2 | 200 | static hash_table<finally_tree_hasher> *finally_tree; |
4ee9c684 | 201 | |
202 | static void | |
1a91d914 | 203 | record_in_finally_tree (treemple child, gtry *parent) |
4ee9c684 | 204 | { |
205 | struct finally_tree_node *n; | |
d9dd21a8 | 206 | finally_tree_node **slot; |
4ee9c684 | 207 | |
680a19b9 | 208 | n = XNEW (struct finally_tree_node); |
4ee9c684 | 209 | n->child = child; |
210 | n->parent = parent; | |
211 | ||
c1f445d2 | 212 | slot = finally_tree->find_slot (n, INSERT); |
8c0963c4 | 213 | gcc_assert (!*slot); |
4ee9c684 | 214 | *slot = n; |
215 | } | |
216 | ||
217 | static void | |
42acab1c | 218 | collect_finally_tree (gimple *stmt, gtry *region); |
75a70cf9 | 219 | |
e38def9c | 220 | /* Go through the gimple sequence. Works with collect_finally_tree to |
75a70cf9 | 221 | record all GIMPLE_LABEL and GIMPLE_TRY statements. */ |
222 | ||
223 | static void | |
1a91d914 | 224 | collect_finally_tree_1 (gimple_seq seq, gtry *region) |
4ee9c684 | 225 | { |
75a70cf9 | 226 | gimple_stmt_iterator gsi; |
4ee9c684 | 227 | |
75a70cf9 | 228 | for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi)) |
229 | collect_finally_tree (gsi_stmt (gsi), region); | |
230 | } | |
4ee9c684 | 231 | |
75a70cf9 | 232 | static void |
42acab1c | 233 | collect_finally_tree (gimple *stmt, gtry *region) |
75a70cf9 | 234 | { |
235 | treemple temp; | |
236 | ||
237 | switch (gimple_code (stmt)) | |
238 | { | |
239 | case GIMPLE_LABEL: | |
1a91d914 | 240 | temp.t = gimple_label_label (as_a <glabel *> (stmt)); |
75a70cf9 | 241 | record_in_finally_tree (temp, region); |
242 | break; | |
4ee9c684 | 243 | |
75a70cf9 | 244 | case GIMPLE_TRY: |
245 | if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY) | |
246 | { | |
247 | temp.g = stmt; | |
248 | record_in_finally_tree (temp, region); | |
1a91d914 | 249 | collect_finally_tree_1 (gimple_try_eval (stmt), |
250 | as_a <gtry *> (stmt)); | |
75a70cf9 | 251 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); |
252 | } | |
253 | else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH) | |
254 | { | |
255 | collect_finally_tree_1 (gimple_try_eval (stmt), region); | |
256 | collect_finally_tree_1 (gimple_try_cleanup (stmt), region); | |
257 | } | |
258 | break; | |
4ee9c684 | 259 | |
75a70cf9 | 260 | case GIMPLE_CATCH: |
1a91d914 | 261 | collect_finally_tree_1 (gimple_catch_handler ( |
262 | as_a <gcatch *> (stmt)), | |
263 | region); | |
75a70cf9 | 264 | break; |
4ee9c684 | 265 | |
75a70cf9 | 266 | case GIMPLE_EH_FILTER: |
267 | collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region); | |
4ee9c684 | 268 | break; |
269 | ||
4c0315d0 | 270 | case GIMPLE_EH_ELSE: |
1a91d914 | 271 | { |
272 | geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
273 | collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region); | |
274 | collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region); | |
275 | } | |
4c0315d0 | 276 | break; |
277 | ||
4ee9c684 | 278 | default: |
279 | /* A type, a decl, or some kind of statement that we're not | |
280 | interested in. Don't walk them. */ | |
281 | break; | |
282 | } | |
283 | } | |
284 | ||
75a70cf9 | 285 | |
4ee9c684 | 286 | /* Use the finally tree to determine if a jump from START to TARGET |
287 | would leave the try_finally node that START lives in. */ | |
288 | ||
289 | static bool | |
42acab1c | 290 | outside_finally_tree (treemple start, gimple *target) |
4ee9c684 | 291 | { |
292 | struct finally_tree_node n, *p; | |
293 | ||
294 | do | |
295 | { | |
296 | n.child = start; | |
c1f445d2 | 297 | p = finally_tree->find (&n); |
4ee9c684 | 298 | if (!p) |
299 | return true; | |
75a70cf9 | 300 | start.g = p->parent; |
4ee9c684 | 301 | } |
75a70cf9 | 302 | while (start.g != target); |
4ee9c684 | 303 | |
304 | return false; | |
305 | } | |
75a70cf9 | 306 | |
307 | /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY | |
308 | nodes into a set of gotos, magic labels, and eh regions. | |
4ee9c684 | 309 | The eh region creation is straight-forward, but frobbing all the gotos |
310 | and such into shape isn't. */ | |
311 | ||
48e1416a | 312 | /* The sequence into which we record all EH stuff. This will be |
e38def9c | 313 | placed at the end of the function when we're all done. */ |
314 | static gimple_seq eh_seq; | |
315 | ||
316 | /* Record whether an EH region contains something that can throw, | |
317 | indexed by EH region number. */ | |
55d6d4e4 | 318 | static bitmap eh_region_may_contain_throw_map; |
e38def9c | 319 | |
47ae02b7 | 320 | /* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN |
0b09525f | 321 | statements that are seen to escape this GIMPLE_TRY_FINALLY node. |
322 | The idea is to record a gimple statement for everything except for | |
323 | the conditionals, which get their labels recorded. Since labels are | |
324 | of type 'tree', we need this node to store both gimple and tree | |
325 | objects. REPL_STMT is the sequence used to replace the goto/return | |
326 | statement. CONT_STMT is used to store the statement that allows | |
327 | the return/goto to jump to the original destination. */ | |
328 | ||
329 | struct goto_queue_node | |
330 | { | |
331 | treemple stmt; | |
d7ebacec | 332 | location_t location; |
0b09525f | 333 | gimple_seq repl_stmt; |
42acab1c | 334 | gimple *cont_stmt; |
0b09525f | 335 | int index; |
336 | /* This is used when index >= 0 to indicate that stmt is a label (as | |
337 | opposed to a goto stmt). */ | |
338 | int is_label; | |
339 | }; | |
340 | ||
4ee9c684 | 341 | /* State of the world while lowering. */ |
342 | ||
343 | struct leh_state | |
344 | { | |
ac13e8d9 | 345 | /* What's "current" while constructing the eh region tree. These |
4ee9c684 | 346 | correspond to variables of the same name in cfun->eh, which we |
347 | don't have easy access to. */ | |
e38def9c | 348 | eh_region cur_region; |
349 | ||
350 | /* What's "current" for the purposes of __builtin_eh_pointer. For | |
351 | a CATCH, this is the associated TRY. For an EH_FILTER, this is | |
352 | the associated ALLOWED_EXCEPTIONS, etc. */ | |
353 | eh_region ehp_region; | |
4ee9c684 | 354 | |
355 | /* Processing of TRY_FINALLY requires a bit more state. This is | |
356 | split out into a separate structure so that we don't have to | |
357 | copy so much when processing other nodes. */ | |
358 | struct leh_tf_state *tf; | |
359 | }; | |
360 | ||
361 | struct leh_tf_state | |
362 | { | |
75a70cf9 | 363 | /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The |
364 | try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain | |
365 | this so that outside_finally_tree can reliably reference the tree used | |
366 | in the collect_finally_tree data structures. */ | |
1a91d914 | 367 | gtry *try_finally_expr; |
368 | gtry *top_p; | |
e38def9c | 369 | |
75a70cf9 | 370 | /* While lowering a top_p usually it is expanded into multiple statements, |
371 | thus we need the following field to store them. */ | |
372 | gimple_seq top_p_seq; | |
4ee9c684 | 373 | |
374 | /* The state outside this try_finally node. */ | |
375 | struct leh_state *outer; | |
376 | ||
377 | /* The exception region created for it. */ | |
e38def9c | 378 | eh_region region; |
4ee9c684 | 379 | |
0b09525f | 380 | /* The goto queue. */ |
381 | struct goto_queue_node *goto_queue; | |
4ee9c684 | 382 | size_t goto_queue_size; |
383 | size_t goto_queue_active; | |
384 | ||
f0b5f617 | 385 | /* Pointer map to help in searching goto_queue when it is large. */ |
42acab1c | 386 | hash_map<gimple *, goto_queue_node *> *goto_queue_map; |
46699809 | 387 | |
4ee9c684 | 388 | /* The set of unique labels seen as entries in the goto queue. */ |
f1f41a6c | 389 | vec<tree> dest_array; |
4ee9c684 | 390 | |
391 | /* A label to be added at the end of the completed transformed | |
392 | sequence. It will be set if may_fallthru was true *at one time*, | |
393 | though subsequent transformations may have cleared that flag. */ | |
394 | tree fallthru_label; | |
395 | ||
4ee9c684 | 396 | /* True if it is possible to fall out the bottom of the try block. |
397 | Cleared if the fallthru is converted to a goto. */ | |
398 | bool may_fallthru; | |
399 | ||
75a70cf9 | 400 | /* True if any entry in goto_queue is a GIMPLE_RETURN. */ |
4ee9c684 | 401 | bool may_return; |
402 | ||
403 | /* True if the finally block can receive an exception edge. | |
404 | Cleared if the exception case is handled by code duplication. */ | |
405 | bool may_throw; | |
406 | }; | |
407 | ||
1a91d914 | 408 | static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *); |
4ee9c684 | 409 | |
4ee9c684 | 410 | /* Search for STMT in the goto queue. Return the replacement, |
411 | or null if the statement isn't in the queue. */ | |
412 | ||
46699809 | 413 | #define LARGE_GOTO_QUEUE 20 |
414 | ||
e3a19533 | 415 | static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq); |
75a70cf9 | 416 | |
417 | static gimple_seq | |
418 | find_goto_replacement (struct leh_tf_state *tf, treemple stmt) | |
4ee9c684 | 419 | { |
46699809 | 420 | unsigned int i; |
46699809 | 421 | |
422 | if (tf->goto_queue_active < LARGE_GOTO_QUEUE) | |
423 | { | |
424 | for (i = 0; i < tf->goto_queue_active; i++) | |
75a70cf9 | 425 | if ( tf->goto_queue[i].stmt.g == stmt.g) |
46699809 | 426 | return tf->goto_queue[i].repl_stmt; |
427 | return NULL; | |
428 | } | |
429 | ||
430 | /* If we have a large number of entries in the goto_queue, create a | |
431 | pointer map and use that for searching. */ | |
432 | ||
433 | if (!tf->goto_queue_map) | |
434 | { | |
42acab1c | 435 | tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>; |
46699809 | 436 | for (i = 0; i < tf->goto_queue_active; i++) |
437 | { | |
06ecf488 | 438 | bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g, |
439 | &tf->goto_queue[i]); | |
440 | gcc_assert (!existed); | |
46699809 | 441 | } |
442 | } | |
443 | ||
06ecf488 | 444 | goto_queue_node **slot = tf->goto_queue_map->get (stmt.g); |
46699809 | 445 | if (slot != NULL) |
06ecf488 | 446 | return ((*slot)->repl_stmt); |
46699809 | 447 | |
448 | return NULL; | |
4ee9c684 | 449 | } |
450 | ||
451 | /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a | |
75a70cf9 | 452 | lowered GIMPLE_COND. If, by chance, the replacement is a simple goto, |
4ee9c684 | 453 | then we can just splat it in, otherwise we add the new stmts immediately |
75a70cf9 | 454 | after the GIMPLE_COND and redirect. */ |
4ee9c684 | 455 | |
456 | static void | |
457 | replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf, | |
75a70cf9 | 458 | gimple_stmt_iterator *gsi) |
4ee9c684 | 459 | { |
75a70cf9 | 460 | tree label; |
f4e36c33 | 461 | gimple_seq new_seq; |
75a70cf9 | 462 | treemple temp; |
e60a6f7b | 463 | location_t loc = gimple_location (gsi_stmt (*gsi)); |
4ee9c684 | 464 | |
75a70cf9 | 465 | temp.tp = tp; |
f4e36c33 | 466 | new_seq = find_goto_replacement (tf, temp); |
467 | if (!new_seq) | |
4ee9c684 | 468 | return; |
469 | ||
f4e36c33 | 470 | if (gimple_seq_singleton_p (new_seq) |
471 | && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO) | |
4ee9c684 | 472 | { |
f4e36c33 | 473 | *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq)); |
4ee9c684 | 474 | return; |
475 | } | |
476 | ||
e60a6f7b | 477 | label = create_artificial_label (loc); |
75a70cf9 | 478 | /* Set the new label for the GIMPLE_COND */ |
479 | *tp = label; | |
4ee9c684 | 480 | |
75a70cf9 | 481 | gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING); |
f4e36c33 | 482 | gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING); |
4ee9c684 | 483 | } |
484 | ||
ac13e8d9 | 485 | /* The real work of replace_goto_queue. Returns with TSI updated to |
4ee9c684 | 486 | point to the next statement. */ |
487 | ||
e3a19533 | 488 | static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *); |
4ee9c684 | 489 | |
490 | static void | |
42acab1c | 491 | replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf, |
75a70cf9 | 492 | gimple_stmt_iterator *gsi) |
4ee9c684 | 493 | { |
75a70cf9 | 494 | gimple_seq seq; |
495 | treemple temp; | |
496 | temp.g = NULL; | |
497 | ||
498 | switch (gimple_code (stmt)) | |
4ee9c684 | 499 | { |
75a70cf9 | 500 | case GIMPLE_GOTO: |
501 | case GIMPLE_RETURN: | |
502 | temp.g = stmt; | |
503 | seq = find_goto_replacement (tf, temp); | |
504 | if (seq) | |
4ee9c684 | 505 | { |
fe02bede | 506 | gimple_stmt_iterator i; |
507 | seq = gimple_seq_copy (seq); | |
508 | for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i)) | |
509 | gimple_set_location (gsi_stmt (i), gimple_location (stmt)); | |
510 | gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT); | |
75a70cf9 | 511 | gsi_remove (gsi, false); |
4ee9c684 | 512 | return; |
513 | } | |
514 | break; | |
515 | ||
75a70cf9 | 516 | case GIMPLE_COND: |
517 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi); | |
518 | replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi); | |
4ee9c684 | 519 | break; |
520 | ||
75a70cf9 | 521 | case GIMPLE_TRY: |
e3a19533 | 522 | replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf); |
523 | replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf); | |
4ee9c684 | 524 | break; |
75a70cf9 | 525 | case GIMPLE_CATCH: |
1a91d914 | 526 | replace_goto_queue_stmt_list (gimple_catch_handler_ptr ( |
527 | as_a <gcatch *> (stmt)), | |
528 | tf); | |
4ee9c684 | 529 | break; |
75a70cf9 | 530 | case GIMPLE_EH_FILTER: |
e3a19533 | 531 | replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf); |
4ee9c684 | 532 | break; |
4c0315d0 | 533 | case GIMPLE_EH_ELSE: |
1a91d914 | 534 | { |
535 | geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |
536 | replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt), | |
537 | tf); | |
538 | replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt), | |
539 | tf); | |
540 | } | |
4c0315d0 | 541 | break; |
4ee9c684 | 542 | |
4ee9c684 | 543 | default: |
544 | /* These won't have gotos in them. */ | |
545 | break; | |
546 | } | |
547 | ||
75a70cf9 | 548 | gsi_next (gsi); |
4ee9c684 | 549 | } |
550 | ||
75a70cf9 | 551 | /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */ |
4ee9c684 | 552 | |
553 | static void | |
e3a19533 | 554 | replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf) |
4ee9c684 | 555 | { |
e3a19533 | 556 | gimple_stmt_iterator gsi = gsi_start (*seq); |
75a70cf9 | 557 | |
558 | while (!gsi_end_p (gsi)) | |
559 | replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi); | |
4ee9c684 | 560 | } |
561 | ||
562 | /* Replace all goto queue members. */ | |
563 | ||
564 | static void | |
565 | replace_goto_queue (struct leh_tf_state *tf) | |
566 | { | |
82a8c0dd | 567 | if (tf->goto_queue_active == 0) |
568 | return; | |
e3a19533 | 569 | replace_goto_queue_stmt_list (&tf->top_p_seq, tf); |
570 | replace_goto_queue_stmt_list (&eh_seq, tf); | |
4ee9c684 | 571 | } |
572 | ||
75a70cf9 | 573 | /* Add a new record to the goto queue contained in TF. NEW_STMT is the |
574 | data to be added, IS_LABEL indicates whether NEW_STMT is a label or | |
575 | a gimple return. */ | |
4ee9c684 | 576 | |
577 | static void | |
75a70cf9 | 578 | record_in_goto_queue (struct leh_tf_state *tf, |
579 | treemple new_stmt, | |
580 | int index, | |
d7ebacec | 581 | bool is_label, |
582 | location_t location) | |
4ee9c684 | 583 | { |
4ee9c684 | 584 | size_t active, size; |
75a70cf9 | 585 | struct goto_queue_node *q; |
4ee9c684 | 586 | |
46699809 | 587 | gcc_assert (!tf->goto_queue_map); |
588 | ||
4ee9c684 | 589 | active = tf->goto_queue_active; |
590 | size = tf->goto_queue_size; | |
591 | if (active >= size) | |
592 | { | |
593 | size = (size ? size * 2 : 32); | |
594 | tf->goto_queue_size = size; | |
595 | tf->goto_queue | |
680a19b9 | 596 | = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size); |
4ee9c684 | 597 | } |
598 | ||
599 | q = &tf->goto_queue[active]; | |
600 | tf->goto_queue_active = active + 1; | |
ac13e8d9 | 601 | |
4ee9c684 | 602 | memset (q, 0, sizeof (*q)); |
75a70cf9 | 603 | q->stmt = new_stmt; |
4ee9c684 | 604 | q->index = index; |
d7ebacec | 605 | q->location = location; |
75a70cf9 | 606 | q->is_label = is_label; |
607 | } | |
608 | ||
609 | /* Record the LABEL label in the goto queue contained in TF. | |
610 | TF is not null. */ | |
611 | ||
612 | static void | |
d7ebacec | 613 | record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label, |
614 | location_t location) | |
75a70cf9 | 615 | { |
616 | int index; | |
617 | treemple temp, new_stmt; | |
618 | ||
619 | if (!label) | |
620 | return; | |
621 | ||
622 | /* Computed and non-local gotos do not get processed. Given | |
623 | their nature we can neither tell whether we've escaped the | |
624 | finally block nor redirect them if we knew. */ | |
625 | if (TREE_CODE (label) != LABEL_DECL) | |
626 | return; | |
627 | ||
628 | /* No need to record gotos that don't leave the try block. */ | |
629 | temp.t = label; | |
630 | if (!outside_finally_tree (temp, tf->try_finally_expr)) | |
631 | return; | |
632 | ||
f1f41a6c | 633 | if (! tf->dest_array.exists ()) |
75a70cf9 | 634 | { |
f1f41a6c | 635 | tf->dest_array.create (10); |
636 | tf->dest_array.quick_push (label); | |
75a70cf9 | 637 | index = 0; |
638 | } | |
639 | else | |
640 | { | |
f1f41a6c | 641 | int n = tf->dest_array.length (); |
75a70cf9 | 642 | for (index = 0; index < n; ++index) |
f1f41a6c | 643 | if (tf->dest_array[index] == label) |
75a70cf9 | 644 | break; |
645 | if (index == n) | |
f1f41a6c | 646 | tf->dest_array.safe_push (label); |
75a70cf9 | 647 | } |
648 | ||
649 | /* In the case of a GOTO we want to record the destination label, | |
650 | since with a GIMPLE_COND we have an easy access to the then/else | |
651 | labels. */ | |
652 | new_stmt = stmt; | |
d7ebacec | 653 | record_in_goto_queue (tf, new_stmt, index, true, location); |
75a70cf9 | 654 | } |
655 | ||
656 | /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally | |
657 | node, and if so record that fact in the goto queue associated with that | |
658 | try_finally node. */ | |
659 | ||
660 | static void | |
42acab1c | 661 | maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt) |
75a70cf9 | 662 | { |
663 | struct leh_tf_state *tf = state->tf; | |
664 | treemple new_stmt; | |
665 | ||
666 | if (!tf) | |
667 | return; | |
668 | ||
669 | switch (gimple_code (stmt)) | |
670 | { | |
671 | case GIMPLE_COND: | |
1a91d914 | 672 | { |
673 | gcond *cond_stmt = as_a <gcond *> (stmt); | |
674 | new_stmt.tp = gimple_op_ptr (cond_stmt, 2); | |
675 | record_in_goto_queue_label (tf, new_stmt, | |
676 | gimple_cond_true_label (cond_stmt), | |
677 | EXPR_LOCATION (*new_stmt.tp)); | |
678 | new_stmt.tp = gimple_op_ptr (cond_stmt, 3); | |
679 | record_in_goto_queue_label (tf, new_stmt, | |
680 | gimple_cond_false_label (cond_stmt), | |
681 | EXPR_LOCATION (*new_stmt.tp)); | |
682 | } | |
75a70cf9 | 683 | break; |
684 | case GIMPLE_GOTO: | |
685 | new_stmt.g = stmt; | |
d7ebacec | 686 | record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt), |
687 | gimple_location (stmt)); | |
75a70cf9 | 688 | break; |
689 | ||
690 | case GIMPLE_RETURN: | |
691 | tf->may_return = true; | |
692 | new_stmt.g = stmt; | |
d7ebacec | 693 | record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt)); |
75a70cf9 | 694 | break; |
695 | ||
696 | default: | |
697 | gcc_unreachable (); | |
698 | } | |
4ee9c684 | 699 | } |
700 | ||
75a70cf9 | 701 | |
382ecba7 | 702 | #if CHECKING_P |
75a70cf9 | 703 | /* We do not process GIMPLE_SWITCHes for now. As long as the original source |
4ee9c684 | 704 | was in fact structured, and we've not yet done jump threading, then none |
75a70cf9 | 705 | of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */ |
4ee9c684 | 706 | |
707 | static void | |
1a91d914 | 708 | verify_norecord_switch_expr (struct leh_state *state, |
709 | gswitch *switch_expr) | |
4ee9c684 | 710 | { |
711 | struct leh_tf_state *tf = state->tf; | |
712 | size_t i, n; | |
4ee9c684 | 713 | |
714 | if (!tf) | |
715 | return; | |
716 | ||
75a70cf9 | 717 | n = gimple_switch_num_labels (switch_expr); |
4ee9c684 | 718 | |
719 | for (i = 0; i < n; ++i) | |
720 | { | |
75a70cf9 | 721 | treemple temp; |
722 | tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i)); | |
723 | temp.t = lab; | |
724 | gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr)); | |
4ee9c684 | 725 | } |
726 | } | |
727 | #else | |
728 | #define verify_norecord_switch_expr(state, switch_expr) | |
729 | #endif | |
730 | ||
9a14ac4f | 731 | /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is |
732 | non-null, insert it before the new branch. */ | |
4ee9c684 | 733 | |
734 | static void | |
9a14ac4f | 735 | do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod) |
4ee9c684 | 736 | { |
42acab1c | 737 | gimple *x; |
75a70cf9 | 738 | |
9a14ac4f | 739 | /* In the case of a return, the queue node must be a gimple statement. */ |
75a70cf9 | 740 | gcc_assert (!q->is_label); |
741 | ||
9a14ac4f | 742 | /* Note that the return value may have already been computed, e.g., |
4ee9c684 | 743 | |
9a14ac4f | 744 | int x; |
745 | int foo (void) | |
4ee9c684 | 746 | { |
9a14ac4f | 747 | x = 0; |
748 | try { | |
749 | return x; | |
750 | } finally { | |
751 | x++; | |
752 | } | |
4ee9c684 | 753 | } |
9a14ac4f | 754 | |
755 | should return 0, not 1. We don't have to do anything to make | |
756 | this happens because the return value has been placed in the | |
757 | RESULT_DECL already. */ | |
758 | ||
759 | q->cont_stmt = q->stmt.g; | |
75a70cf9 | 760 | |
4ee9c684 | 761 | if (mod) |
75a70cf9 | 762 | gimple_seq_add_seq (&q->repl_stmt, mod); |
4ee9c684 | 763 | |
75a70cf9 | 764 | x = gimple_build_goto (finlab); |
ed4d69dc | 765 | gimple_set_location (x, q->location); |
75a70cf9 | 766 | gimple_seq_add_stmt (&q->repl_stmt, x); |
4ee9c684 | 767 | } |
768 | ||
75a70cf9 | 769 | /* Similar, but easier, for GIMPLE_GOTO. */ |
4ee9c684 | 770 | |
771 | static void | |
75a70cf9 | 772 | do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod, |
773 | struct leh_tf_state *tf) | |
4ee9c684 | 774 | { |
1a91d914 | 775 | ggoto *x; |
75a70cf9 | 776 | |
777 | gcc_assert (q->is_label); | |
75a70cf9 | 778 | |
f1f41a6c | 779 | q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]); |
4ee9c684 | 780 | |
4ee9c684 | 781 | if (mod) |
75a70cf9 | 782 | gimple_seq_add_seq (&q->repl_stmt, mod); |
4ee9c684 | 783 | |
75a70cf9 | 784 | x = gimple_build_goto (finlab); |
ed4d69dc | 785 | gimple_set_location (x, q->location); |
75a70cf9 | 786 | gimple_seq_add_stmt (&q->repl_stmt, x); |
4ee9c684 | 787 | } |
788 | ||
e38def9c | 789 | /* Emit a standard landing pad sequence into SEQ for REGION. */ |
790 | ||
791 | static void | |
792 | emit_post_landing_pad (gimple_seq *seq, eh_region region) | |
793 | { | |
794 | eh_landing_pad lp = region->landing_pads; | |
1a91d914 | 795 | glabel *x; |
e38def9c | 796 | |
797 | if (lp == NULL) | |
798 | lp = gen_eh_landing_pad (region); | |
799 | ||
800 | lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION); | |
801 | EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index; | |
802 | ||
803 | x = gimple_build_label (lp->post_landing_pad); | |
804 | gimple_seq_add_stmt (seq, x); | |
805 | } | |
806 | ||
807 | /* Emit a RESX statement into SEQ for REGION. */ | |
808 | ||
809 | static void | |
810 | emit_resx (gimple_seq *seq, eh_region region) | |
811 | { | |
1a91d914 | 812 | gresx *x = gimple_build_resx (region->index); |
e38def9c | 813 | gimple_seq_add_stmt (seq, x); |
814 | if (region->outer) | |
815 | record_stmt_eh_region (region->outer, x); | |
816 | } | |
817 | ||
e38def9c | 818 | /* Note that the current EH region may contain a throw, or a |
819 | call to a function which itself may contain a throw. */ | |
820 | ||
821 | static void | |
822 | note_eh_region_may_contain_throw (eh_region region) | |
823 | { | |
6ef9bbe0 | 824 | while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index)) |
e38def9c | 825 | { |
39efead8 | 826 | if (region->type == ERT_MUST_NOT_THROW) |
827 | break; | |
e38def9c | 828 | region = region->outer; |
829 | if (region == NULL) | |
830 | break; | |
831 | } | |
832 | } | |
833 | ||
55d6d4e4 | 834 | /* Check if REGION has been marked as containing a throw. If REGION is |
835 | NULL, this predicate is false. */ | |
836 | ||
837 | static inline bool | |
838 | eh_region_may_contain_throw (eh_region r) | |
839 | { | |
840 | return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index); | |
841 | } | |
842 | ||
4ee9c684 | 843 | /* We want to transform |
844 | try { body; } catch { stuff; } | |
845 | to | |
4422041b | 846 | normal_sequence: |
e38def9c | 847 | body; |
848 | over: | |
4422041b | 849 | eh_sequence: |
e38def9c | 850 | landing_pad: |
851 | stuff; | |
852 | goto over; | |
853 | ||
854 | TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad | |
4ee9c684 | 855 | should be placed before the second operand, or NULL. OVER is |
856 | an existing label that should be put at the exit, or NULL. */ | |
857 | ||
75a70cf9 | 858 | static gimple_seq |
1a91d914 | 859 | frob_into_branch_around (gtry *tp, eh_region region, tree over) |
4ee9c684 | 860 | { |
42acab1c | 861 | gimple *x; |
75a70cf9 | 862 | gimple_seq cleanup, result; |
e60a6f7b | 863 | location_t loc = gimple_location (tp); |
4ee9c684 | 864 | |
75a70cf9 | 865 | cleanup = gimple_try_cleanup (tp); |
866 | result = gimple_try_eval (tp); | |
4ee9c684 | 867 | |
e38def9c | 868 | if (region) |
869 | emit_post_landing_pad (&eh_seq, region); | |
870 | ||
871 | if (gimple_seq_may_fallthru (cleanup)) | |
4ee9c684 | 872 | { |
873 | if (!over) | |
e60a6f7b | 874 | over = create_artificial_label (loc); |
75a70cf9 | 875 | x = gimple_build_goto (over); |
ed4d69dc | 876 | gimple_set_location (x, loc); |
e38def9c | 877 | gimple_seq_add_stmt (&cleanup, x); |
4ee9c684 | 878 | } |
e38def9c | 879 | gimple_seq_add_seq (&eh_seq, cleanup); |
4ee9c684 | 880 | |
881 | if (over) | |
882 | { | |
75a70cf9 | 883 | x = gimple_build_label (over); |
884 | gimple_seq_add_stmt (&result, x); | |
4ee9c684 | 885 | } |
75a70cf9 | 886 | return result; |
4ee9c684 | 887 | } |
888 | ||
889 | /* A subroutine of lower_try_finally. Duplicate the tree rooted at T. | |
890 | Make sure to record all new labels found. */ | |
891 | ||
75a70cf9 | 892 | static gimple_seq |
d7ebacec | 893 | lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state, |
894 | location_t loc) | |
4ee9c684 | 895 | { |
1a91d914 | 896 | gtry *region = NULL; |
75a70cf9 | 897 | gimple_seq new_seq; |
d7ebacec | 898 | gimple_stmt_iterator gsi; |
4ee9c684 | 899 | |
75a70cf9 | 900 | new_seq = copy_gimple_seq_and_replace_locals (seq); |
4ee9c684 | 901 | |
d7ebacec | 902 | for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi)) |
cc9f317f | 903 | { |
42acab1c | 904 | gimple *stmt = gsi_stmt (gsi); |
bb8dfc4e | 905 | /* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating |
906 | it on the EH paths. When it is not eliminated, make it transparent in | |
907 | the debug info. */ | |
908 | if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
909 | gimple_set_location (stmt, UNKNOWN_LOCATION); | |
910 | else if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION) | |
cc9f317f | 911 | { |
912 | tree block = gimple_block (stmt); | |
913 | gimple_set_location (stmt, loc); | |
914 | gimple_set_block (stmt, block); | |
915 | } | |
916 | } | |
d7ebacec | 917 | |
4ee9c684 | 918 | if (outer_state->tf) |
919 | region = outer_state->tf->try_finally_expr; | |
75a70cf9 | 920 | collect_finally_tree_1 (new_seq, region); |
4ee9c684 | 921 | |
75a70cf9 | 922 | return new_seq; |
4ee9c684 | 923 | } |
924 | ||
925 | /* A subroutine of lower_try_finally. Create a fallthru label for | |
926 | the given try_finally state. The only tricky bit here is that | |
927 | we have to make sure to record the label in our outer context. */ | |
928 | ||
929 | static tree | |
930 | lower_try_finally_fallthru_label (struct leh_tf_state *tf) | |
931 | { | |
932 | tree label = tf->fallthru_label; | |
75a70cf9 | 933 | treemple temp; |
934 | ||
4ee9c684 | 935 | if (!label) |
936 | { | |
e60a6f7b | 937 | label = create_artificial_label (gimple_location (tf->try_finally_expr)); |
4ee9c684 | 938 | tf->fallthru_label = label; |
939 | if (tf->outer->tf) | |
75a70cf9 | 940 | { |
941 | temp.t = label; | |
942 | record_in_finally_tree (temp, tf->outer->tf->try_finally_expr); | |
943 | } | |
4ee9c684 | 944 | } |
945 | return label; | |
946 | } | |
947 | ||
4c0315d0 | 948 | /* A subroutine of lower_try_finally. If FINALLY consits of a |
949 | GIMPLE_EH_ELSE node, return it. */ | |
950 | ||
1a91d914 | 951 | static inline geh_else * |
4c0315d0 | 952 | get_eh_else (gimple_seq finally) |
953 | { | |
42acab1c | 954 | gimple *x = gimple_seq_first_stmt (finally); |
4c0315d0 | 955 | if (gimple_code (x) == GIMPLE_EH_ELSE) |
956 | { | |
957 | gcc_assert (gimple_seq_singleton_p (finally)); | |
1a91d914 | 958 | return as_a <geh_else *> (x); |
4c0315d0 | 959 | } |
960 | return NULL; | |
961 | } | |
962 | ||
596981c8 | 963 | /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions |
964 | langhook returns non-null, then the language requires that the exception | |
965 | path out of a try_finally be treated specially. To wit: the code within | |
966 | the finally block may not itself throw an exception. We have two choices | |
967 | here. First we can duplicate the finally block and wrap it in a | |
968 | must_not_throw region. Second, we can generate code like | |
4ee9c684 | 969 | |
970 | try { | |
971 | finally_block; | |
972 | } catch { | |
973 | if (fintmp == eh_edge) | |
974 | protect_cleanup_actions; | |
975 | } | |
976 | ||
977 | where "fintmp" is the temporary used in the switch statement generation | |
978 | alternative considered below. For the nonce, we always choose the first | |
ac13e8d9 | 979 | option. |
4ee9c684 | 980 | |
822e391f | 981 | THIS_STATE may be null if this is a try-cleanup, not a try-finally. */ |
4ee9c684 | 982 | |
983 | static void | |
984 | honor_protect_cleanup_actions (struct leh_state *outer_state, | |
985 | struct leh_state *this_state, | |
986 | struct leh_tf_state *tf) | |
987 | { | |
b8bdfa31 | 988 | gimple_seq finally = gimple_try_cleanup (tf->top_p); |
4ee9c684 | 989 | |
b8bdfa31 | 990 | /* EH_ELSE doesn't come from user code; only compiler generated stuff. |
991 | It does need to be handled here, so as to separate the (different) | |
992 | EH path from the normal path. But we should not attempt to wrap | |
993 | it with a must-not-throw node (which indeed gets in the way). */ | |
994 | if (geh_else *eh_else = get_eh_else (finally)) | |
4c0315d0 | 995 | { |
4c0315d0 | 996 | gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else)); |
b8bdfa31 | 997 | finally = gimple_eh_else_e_body (eh_else); |
998 | ||
41def54b | 999 | /* Let the ELSE see the exception that's being processed, but |
1000 | since the cleanup is outside the try block, process it with | |
1001 | outer_state, otherwise it may be used as a cleanup for | |
1002 | itself, and Bad Things (TM) ensue. */ | |
1003 | eh_region save_ehp = outer_state->ehp_region; | |
1004 | outer_state->ehp_region = this_state->cur_region; | |
1005 | lower_eh_constructs_1 (outer_state, &finally); | |
1006 | outer_state->ehp_region = save_ehp; | |
4c0315d0 | 1007 | } |
b8bdfa31 | 1008 | else |
0bc060a4 | 1009 | { |
b8bdfa31 | 1010 | /* First check for nothing to do. */ |
1011 | if (lang_hooks.eh_protect_cleanup_actions == NULL) | |
1012 | return; | |
1013 | tree actions = lang_hooks.eh_protect_cleanup_actions (); | |
1014 | if (actions == NULL) | |
1015 | return; | |
1016 | ||
1017 | if (this_state) | |
1018 | finally = lower_try_finally_dup_block (finally, outer_state, | |
1019 | gimple_location (tf->try_finally_expr)); | |
1020 | ||
1021 | /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP | |
1022 | set, the handler of the TRY_CATCH_EXPR is another cleanup which ought | |
1023 | to be in an enclosing scope, but needs to be implemented at this level | |
1024 | to avoid a nesting violation (see wrap_temporary_cleanups in | |
1025 | cp/decl.c). Since it's logically at an outer level, we should call | |
1026 | terminate before we get to it, so strip it away before adding the | |
1027 | MUST_NOT_THROW filter. */ | |
1028 | gimple_stmt_iterator gsi = gsi_start (finally); | |
1029 | gimple *x = gsi_stmt (gsi); | |
1030 | if (gimple_code (x) == GIMPLE_TRY | |
1031 | && gimple_try_kind (x) == GIMPLE_TRY_CATCH | |
1032 | && gimple_try_catch_is_cleanup (x)) | |
1033 | { | |
1034 | gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT); | |
1035 | gsi_remove (&gsi, false); | |
1036 | } | |
0bc060a4 | 1037 | |
b8bdfa31 | 1038 | /* Wrap the block with protect_cleanup_actions as the action. */ |
1039 | geh_mnt *eh_mnt = gimple_build_eh_must_not_throw (actions); | |
1040 | gtry *try_stmt = gimple_build_try (finally, | |
1041 | gimple_seq_alloc_with_stmt (eh_mnt), | |
1042 | GIMPLE_TRY_CATCH); | |
1043 | finally = lower_eh_must_not_throw (outer_state, try_stmt); | |
1044 | } | |
e38def9c | 1045 | |
1046 | /* Drop all of this into the exception sequence. */ | |
1047 | emit_post_landing_pad (&eh_seq, tf->region); | |
1048 | gimple_seq_add_seq (&eh_seq, finally); | |
b8bdfa31 | 1049 | if (gimple_seq_may_fallthru (finally)) |
e38def9c | 1050 | emit_resx (&eh_seq, tf->region); |
4ee9c684 | 1051 | |
1052 | /* Having now been handled, EH isn't to be considered with | |
1053 | the rest of the outgoing edges. */ | |
1054 | tf->may_throw = false; | |
1055 | } | |
1056 | ||
1057 | /* A subroutine of lower_try_finally. We have determined that there is | |
1058 | no fallthru edge out of the finally block. This means that there is | |
1059 | no outgoing edge corresponding to any incoming edge. Restructure the | |
1060 | try_finally node for this special case. */ | |
1061 | ||
1062 | static void | |
75a70cf9 | 1063 | lower_try_finally_nofallthru (struct leh_state *state, |
1064 | struct leh_tf_state *tf) | |
4ee9c684 | 1065 | { |
9a14ac4f | 1066 | tree lab; |
42acab1c | 1067 | gimple *x; |
1a91d914 | 1068 | geh_else *eh_else; |
75a70cf9 | 1069 | gimple_seq finally; |
4ee9c684 | 1070 | struct goto_queue_node *q, *qe; |
1071 | ||
e38def9c | 1072 | lab = create_artificial_label (gimple_location (tf->try_finally_expr)); |
4ee9c684 | 1073 | |
75a70cf9 | 1074 | /* We expect that tf->top_p is a GIMPLE_TRY. */ |
1075 | finally = gimple_try_cleanup (tf->top_p); | |
1076 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
4ee9c684 | 1077 | |
75a70cf9 | 1078 | x = gimple_build_label (lab); |
1079 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
4ee9c684 | 1080 | |
4ee9c684 | 1081 | q = tf->goto_queue; |
1082 | qe = q + tf->goto_queue_active; | |
1083 | for (; q < qe; ++q) | |
1084 | if (q->index < 0) | |
9a14ac4f | 1085 | do_return_redirection (q, lab, NULL); |
4ee9c684 | 1086 | else |
75a70cf9 | 1087 | do_goto_redirection (q, lab, NULL, tf); |
4ee9c684 | 1088 | |
1089 | replace_goto_queue (tf); | |
1090 | ||
4c0315d0 | 1091 | /* Emit the finally block into the stream. Lower EH_ELSE at this time. */ |
1092 | eh_else = get_eh_else (finally); | |
1093 | if (eh_else) | |
1094 | { | |
1095 | finally = gimple_eh_else_n_body (eh_else); | |
e3a19533 | 1096 | lower_eh_constructs_1 (state, &finally); |
4c0315d0 | 1097 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
e38def9c | 1098 | |
4c0315d0 | 1099 | if (tf->may_throw) |
1100 | { | |
1101 | finally = gimple_eh_else_e_body (eh_else); | |
e3a19533 | 1102 | lower_eh_constructs_1 (state, &finally); |
4c0315d0 | 1103 | |
1104 | emit_post_landing_pad (&eh_seq, tf->region); | |
1105 | gimple_seq_add_seq (&eh_seq, finally); | |
1106 | } | |
1107 | } | |
1108 | else | |
e38def9c | 1109 | { |
e3a19533 | 1110 | lower_eh_constructs_1 (state, &finally); |
4c0315d0 | 1111 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
e38def9c | 1112 | |
4c0315d0 | 1113 | if (tf->may_throw) |
1114 | { | |
1115 | emit_post_landing_pad (&eh_seq, tf->region); | |
1116 | ||
1117 | x = gimple_build_goto (lab); | |
ed4d69dc | 1118 | gimple_set_location (x, gimple_location (tf->try_finally_expr)); |
4c0315d0 | 1119 | gimple_seq_add_stmt (&eh_seq, x); |
1120 | } | |
e38def9c | 1121 | } |
4ee9c684 | 1122 | } |
1123 | ||
1124 | /* A subroutine of lower_try_finally. We have determined that there is | |
1125 | exactly one destination of the finally block. Restructure the | |
1126 | try_finally node for this special case. */ | |
1127 | ||
1128 | static void | |
1129 | lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf) | |
1130 | { | |
1131 | struct goto_queue_node *q, *qe; | |
1a91d914 | 1132 | geh_else *eh_else; |
1133 | glabel *label_stmt; | |
42acab1c | 1134 | gimple *x; |
75a70cf9 | 1135 | gimple_seq finally; |
ae117ec5 | 1136 | gimple_stmt_iterator gsi; |
75a70cf9 | 1137 | tree finally_label; |
e60a6f7b | 1138 | location_t loc = gimple_location (tf->try_finally_expr); |
4ee9c684 | 1139 | |
75a70cf9 | 1140 | finally = gimple_try_cleanup (tf->top_p); |
1141 | tf->top_p_seq = gimple_try_eval (tf->top_p); | |
4ee9c684 | 1142 | |
4c0315d0 | 1143 | /* Since there's only one destination, and the destination edge can only |
1144 | either be EH or non-EH, that implies that all of our incoming edges | |
1145 | are of the same type. Therefore we can lower EH_ELSE immediately. */ | |
1a91d914 | 1146 | eh_else = get_eh_else (finally); |
1147 | if (eh_else) | |
4c0315d0 | 1148 | { |
1149 | if (tf->may_throw) | |
1a91d914 | 1150 | finally = gimple_eh_else_e_body (eh_else); |
4c0315d0 | 1151 | else |
1a91d914 | 1152 | finally = gimple_eh_else_n_body (eh_else); |
4c0315d0 | 1153 | } |
1154 | ||
e3a19533 | 1155 | lower_eh_constructs_1 (state, &finally); |
4ee9c684 | 1156 | |
ae117ec5 | 1157 | for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi)) |
1158 | { | |
42acab1c | 1159 | gimple *stmt = gsi_stmt (gsi); |
ae117ec5 | 1160 | if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION) |
1161 | { | |
1162 | tree block = gimple_block (stmt); | |
1163 | gimple_set_location (stmt, gimple_location (tf->try_finally_expr)); | |
1164 | gimple_set_block (stmt, block); | |
1165 | } | |
1166 | } | |
1167 | ||
4ee9c684 | 1168 | if (tf->may_throw) |
1169 | { | |
1170 | /* Only reachable via the exception edge. Add the given label to | |
1171 | the head of the FINALLY block. Append a RESX at the end. */ | |
e38def9c | 1172 | emit_post_landing_pad (&eh_seq, tf->region); |
1173 | gimple_seq_add_seq (&eh_seq, finally); | |
1174 | emit_resx (&eh_seq, tf->region); | |
4ee9c684 | 1175 | return; |
1176 | } | |
1177 | ||
1178 | if (tf->may_fallthru) | |
1179 | { | |
1180 | /* Only reachable via the fallthru edge. Do nothing but let | |
1181 | the two blocks run together; we'll fall out the bottom. */ | |
75a70cf9 | 1182 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
4ee9c684 | 1183 | return; |
1184 | } | |
1185 | ||
e60a6f7b | 1186 | finally_label = create_artificial_label (loc); |
1a91d914 | 1187 | label_stmt = gimple_build_label (finally_label); |
1188 | gimple_seq_add_stmt (&tf->top_p_seq, label_stmt); | |
4ee9c684 | 1189 | |
75a70cf9 | 1190 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
4ee9c684 | 1191 | |
1192 | q = tf->goto_queue; | |
1193 | qe = q + tf->goto_queue_active; | |
1194 | ||
1195 | if (tf->may_return) | |
1196 | { | |
1197 | /* Reachable by return expressions only. Redirect them. */ | |
4ee9c684 | 1198 | for (; q < qe; ++q) |
9a14ac4f | 1199 | do_return_redirection (q, finally_label, NULL); |
4ee9c684 | 1200 | replace_goto_queue (tf); |
1201 | } | |
1202 | else | |
1203 | { | |
1204 | /* Reachable by goto expressions only. Redirect them. */ | |
1205 | for (; q < qe; ++q) | |
75a70cf9 | 1206 | do_goto_redirection (q, finally_label, NULL, tf); |
4ee9c684 | 1207 | replace_goto_queue (tf); |
ac13e8d9 | 1208 | |
f1f41a6c | 1209 | if (tf->dest_array[0] == tf->fallthru_label) |
4ee9c684 | 1210 | { |
1211 | /* Reachable by goto to fallthru label only. Redirect it | |
1212 | to the new label (already created, sadly), and do not | |
1213 | emit the final branch out, or the fallthru label. */ | |
1214 | tf->fallthru_label = NULL; | |
1215 | return; | |
1216 | } | |
1217 | } | |
1218 | ||
75a70cf9 | 1219 | /* Place the original return/goto to the original destination |
1220 | immediately after the finally block. */ | |
1221 | x = tf->goto_queue[0].cont_stmt; | |
1222 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
1223 | maybe_record_in_goto_queue (state, x); | |
4ee9c684 | 1224 | } |
1225 | ||
1226 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1227 | and outgoing from the finally block. Implement this by duplicating the | |
1228 | finally block for every destination. */ | |
1229 | ||
1230 | static void | |
1231 | lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf) | |
1232 | { | |
75a70cf9 | 1233 | gimple_seq finally; |
1234 | gimple_seq new_stmt; | |
1235 | gimple_seq seq; | |
42acab1c | 1236 | gimple *x; |
1a91d914 | 1237 | geh_else *eh_else; |
75a70cf9 | 1238 | tree tmp; |
e60a6f7b | 1239 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
4ee9c684 | 1240 | |
75a70cf9 | 1241 | finally = gimple_try_cleanup (tf->top_p); |
4c0315d0 | 1242 | |
1243 | /* Notice EH_ELSE, and simplify some of the remaining code | |
1244 | by considering FINALLY to be the normal return path only. */ | |
1245 | eh_else = get_eh_else (finally); | |
1246 | if (eh_else) | |
1247 | finally = gimple_eh_else_n_body (eh_else); | |
1248 | ||
75a70cf9 | 1249 | tf->top_p_seq = gimple_try_eval (tf->top_p); |
1250 | new_stmt = NULL; | |
4ee9c684 | 1251 | |
1252 | if (tf->may_fallthru) | |
1253 | { | |
d7ebacec | 1254 | seq = lower_try_finally_dup_block (finally, state, tf_loc); |
e3a19533 | 1255 | lower_eh_constructs_1 (state, &seq); |
75a70cf9 | 1256 | gimple_seq_add_seq (&new_stmt, seq); |
4ee9c684 | 1257 | |
75a70cf9 | 1258 | tmp = lower_try_finally_fallthru_label (tf); |
1259 | x = gimple_build_goto (tmp); | |
ed4d69dc | 1260 | gimple_set_location (x, tf_loc); |
75a70cf9 | 1261 | gimple_seq_add_stmt (&new_stmt, x); |
4ee9c684 | 1262 | } |
1263 | ||
1264 | if (tf->may_throw) | |
1265 | { | |
4c0315d0 | 1266 | /* We don't need to copy the EH path of EH_ELSE, |
1267 | since it is only emitted once. */ | |
1268 | if (eh_else) | |
1269 | seq = gimple_eh_else_e_body (eh_else); | |
1270 | else | |
d7ebacec | 1271 | seq = lower_try_finally_dup_block (finally, state, tf_loc); |
e3a19533 | 1272 | lower_eh_constructs_1 (state, &seq); |
4ee9c684 | 1273 | |
2fabdfdf | 1274 | emit_post_landing_pad (&eh_seq, tf->region); |
1275 | gimple_seq_add_seq (&eh_seq, seq); | |
e38def9c | 1276 | emit_resx (&eh_seq, tf->region); |
4ee9c684 | 1277 | } |
1278 | ||
1279 | if (tf->goto_queue) | |
1280 | { | |
1281 | struct goto_queue_node *q, *qe; | |
22347b24 | 1282 | int return_index, index; |
680a19b9 | 1283 | struct labels_s |
22347b24 | 1284 | { |
1285 | struct goto_queue_node *q; | |
1286 | tree label; | |
1287 | } *labels; | |
4ee9c684 | 1288 | |
f1f41a6c | 1289 | return_index = tf->dest_array.length (); |
680a19b9 | 1290 | labels = XCNEWVEC (struct labels_s, return_index + 1); |
4ee9c684 | 1291 | |
1292 | q = tf->goto_queue; | |
1293 | qe = q + tf->goto_queue_active; | |
1294 | for (; q < qe; q++) | |
1295 | { | |
22347b24 | 1296 | index = q->index < 0 ? return_index : q->index; |
1297 | ||
1298 | if (!labels[index].q) | |
1299 | labels[index].q = q; | |
1300 | } | |
1301 | ||
1302 | for (index = 0; index < return_index + 1; index++) | |
1303 | { | |
1304 | tree lab; | |
1305 | ||
1306 | q = labels[index].q; | |
1307 | if (! q) | |
1308 | continue; | |
1309 | ||
e60a6f7b | 1310 | lab = labels[index].label |
1311 | = create_artificial_label (tf_loc); | |
4ee9c684 | 1312 | |
1313 | if (index == return_index) | |
9a14ac4f | 1314 | do_return_redirection (q, lab, NULL); |
4ee9c684 | 1315 | else |
75a70cf9 | 1316 | do_goto_redirection (q, lab, NULL, tf); |
4ee9c684 | 1317 | |
75a70cf9 | 1318 | x = gimple_build_label (lab); |
1319 | gimple_seq_add_stmt (&new_stmt, x); | |
4ee9c684 | 1320 | |
d7ebacec | 1321 | seq = lower_try_finally_dup_block (finally, state, q->location); |
e3a19533 | 1322 | lower_eh_constructs_1 (state, &seq); |
75a70cf9 | 1323 | gimple_seq_add_seq (&new_stmt, seq); |
4ee9c684 | 1324 | |
75a70cf9 | 1325 | gimple_seq_add_stmt (&new_stmt, q->cont_stmt); |
22347b24 | 1326 | maybe_record_in_goto_queue (state, q->cont_stmt); |
4ee9c684 | 1327 | } |
22347b24 | 1328 | |
1329 | for (q = tf->goto_queue; q < qe; q++) | |
1330 | { | |
1331 | tree lab; | |
1332 | ||
1333 | index = q->index < 0 ? return_index : q->index; | |
1334 | ||
1335 | if (labels[index].q == q) | |
1336 | continue; | |
1337 | ||
1338 | lab = labels[index].label; | |
1339 | ||
1340 | if (index == return_index) | |
9a14ac4f | 1341 | do_return_redirection (q, lab, NULL); |
22347b24 | 1342 | else |
75a70cf9 | 1343 | do_goto_redirection (q, lab, NULL, tf); |
22347b24 | 1344 | } |
e38def9c | 1345 | |
4ee9c684 | 1346 | replace_goto_queue (tf); |
1347 | free (labels); | |
1348 | } | |
1349 | ||
1350 | /* Need to link new stmts after running replace_goto_queue due | |
1351 | to not wanting to process the same goto stmts twice. */ | |
75a70cf9 | 1352 | gimple_seq_add_seq (&tf->top_p_seq, new_stmt); |
4ee9c684 | 1353 | } |
1354 | ||
1355 | /* A subroutine of lower_try_finally. There are multiple edges incoming | |
1356 | and outgoing from the finally block. Implement this by instrumenting | |
1357 | each incoming edge and creating a switch statement at the end of the | |
1358 | finally block that branches to the appropriate destination. */ | |
1359 | ||
1360 | static void | |
1361 | lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf) | |
1362 | { | |
1363 | struct goto_queue_node *q, *qe; | |
75a70cf9 | 1364 | tree finally_tmp, finally_label; |
4ee9c684 | 1365 | int return_index, eh_index, fallthru_index; |
1366 | int nlabels, ndests, j, last_case_index; | |
75a70cf9 | 1367 | tree last_case; |
b7cbf36d | 1368 | auto_vec<tree> case_label_vec; |
e3a19533 | 1369 | gimple_seq switch_body = NULL; |
42acab1c | 1370 | gimple *x; |
1a91d914 | 1371 | geh_else *eh_else; |
75a70cf9 | 1372 | tree tmp; |
42acab1c | 1373 | gimple *switch_stmt; |
75a70cf9 | 1374 | gimple_seq finally; |
42acab1c | 1375 | hash_map<tree, gimple *> *cont_map = NULL; |
e60a6f7b | 1376 | /* The location of the TRY_FINALLY stmt. */ |
0b35068b | 1377 | location_t tf_loc = gimple_location (tf->try_finally_expr); |
e60a6f7b | 1378 | /* The location of the finally block. */ |
1379 | location_t finally_loc; | |
75a70cf9 | 1380 | |
4c0315d0 | 1381 | finally = gimple_try_cleanup (tf->top_p); |
1382 | eh_else = get_eh_else (finally); | |
4ee9c684 | 1383 | |
1384 | /* Mash the TRY block to the head of the chain. */ | |
75a70cf9 | 1385 | tf->top_p_seq = gimple_try_eval (tf->top_p); |
4ee9c684 | 1386 | |
e60a6f7b | 1387 | /* The location of the finally is either the last stmt in the finally |
1388 | block or the location of the TRY_FINALLY itself. */ | |
a6217c59 | 1389 | x = gimple_seq_last_stmt (finally); |
1390 | finally_loc = x ? gimple_location (x) : tf_loc; | |
e60a6f7b | 1391 | |
4ee9c684 | 1392 | /* Prepare for switch statement generation. */ |
f1f41a6c | 1393 | nlabels = tf->dest_array.length (); |
4ee9c684 | 1394 | return_index = nlabels; |
1395 | eh_index = return_index + tf->may_return; | |
4c0315d0 | 1396 | fallthru_index = eh_index + (tf->may_throw && !eh_else); |
4ee9c684 | 1397 | ndests = fallthru_index + tf->may_fallthru; |
1398 | ||
1399 | finally_tmp = create_tmp_var (integer_type_node, "finally_tmp"); | |
e60a6f7b | 1400 | finally_label = create_artificial_label (finally_loc); |
4ee9c684 | 1401 | |
f1f41a6c | 1402 | /* We use vec::quick_push on case_label_vec throughout this function, |
75a70cf9 | 1403 | since we know the size in advance and allocate precisely as muce |
1404 | space as needed. */ | |
f1f41a6c | 1405 | case_label_vec.create (ndests); |
4ee9c684 | 1406 | last_case = NULL; |
1407 | last_case_index = 0; | |
1408 | ||
1409 | /* Begin inserting code for getting to the finally block. Things | |
1410 | are done in this order to correspond to the sequence the code is | |
9d75589a | 1411 | laid out. */ |
4ee9c684 | 1412 | |
1413 | if (tf->may_fallthru) | |
1414 | { | |
e38def9c | 1415 | x = gimple_build_assign (finally_tmp, |
bad12c62 | 1416 | build_int_cst (integer_type_node, |
1417 | fallthru_index)); | |
eb8aa278 | 1418 | gimple_set_location (x, finally_loc); |
75a70cf9 | 1419 | gimple_seq_add_stmt (&tf->top_p_seq, x); |
4ee9c684 | 1420 | |
b6e3dd65 | 1421 | tmp = build_int_cst (integer_type_node, fallthru_index); |
1422 | last_case = build_case_label (tmp, NULL, | |
eb8aa278 | 1423 | create_artificial_label (finally_loc)); |
f1f41a6c | 1424 | case_label_vec.quick_push (last_case); |
4ee9c684 | 1425 | last_case_index++; |
1426 | ||
75a70cf9 | 1427 | x = gimple_build_label (CASE_LABEL (last_case)); |
1428 | gimple_seq_add_stmt (&switch_body, x); | |
4ee9c684 | 1429 | |
75a70cf9 | 1430 | tmp = lower_try_finally_fallthru_label (tf); |
1431 | x = gimple_build_goto (tmp); | |
eb8aa278 | 1432 | gimple_set_location (x, finally_loc); |
75a70cf9 | 1433 | gimple_seq_add_stmt (&switch_body, x); |
4ee9c684 | 1434 | } |
1435 | ||
4c0315d0 | 1436 | /* For EH_ELSE, emit the exception path (plus resx) now, then |
1437 | subsequently we only need consider the normal path. */ | |
1438 | if (eh_else) | |
1439 | { | |
1440 | if (tf->may_throw) | |
1441 | { | |
1442 | finally = gimple_eh_else_e_body (eh_else); | |
e3a19533 | 1443 | lower_eh_constructs_1 (state, &finally); |
4c0315d0 | 1444 | |
1445 | emit_post_landing_pad (&eh_seq, tf->region); | |
1446 | gimple_seq_add_seq (&eh_seq, finally); | |
1447 | emit_resx (&eh_seq, tf->region); | |
1448 | } | |
1449 | ||
1450 | finally = gimple_eh_else_n_body (eh_else); | |
1451 | } | |
1452 | else if (tf->may_throw) | |
4ee9c684 | 1453 | { |
e38def9c | 1454 | emit_post_landing_pad (&eh_seq, tf->region); |
4ee9c684 | 1455 | |
e38def9c | 1456 | x = gimple_build_assign (finally_tmp, |
bad12c62 | 1457 | build_int_cst (integer_type_node, eh_index)); |
e38def9c | 1458 | gimple_seq_add_stmt (&eh_seq, x); |
1459 | ||
1460 | x = gimple_build_goto (finally_label); | |
ed4d69dc | 1461 | gimple_set_location (x, tf_loc); |
e38def9c | 1462 | gimple_seq_add_stmt (&eh_seq, x); |
4ee9c684 | 1463 | |
b6e3dd65 | 1464 | tmp = build_int_cst (integer_type_node, eh_index); |
1465 | last_case = build_case_label (tmp, NULL, | |
1466 | create_artificial_label (tf_loc)); | |
f1f41a6c | 1467 | case_label_vec.quick_push (last_case); |
4ee9c684 | 1468 | last_case_index++; |
1469 | ||
75a70cf9 | 1470 | x = gimple_build_label (CASE_LABEL (last_case)); |
e38def9c | 1471 | gimple_seq_add_stmt (&eh_seq, x); |
1472 | emit_resx (&eh_seq, tf->region); | |
4ee9c684 | 1473 | } |
1474 | ||
75a70cf9 | 1475 | x = gimple_build_label (finally_label); |
1476 | gimple_seq_add_stmt (&tf->top_p_seq, x); | |
4ee9c684 | 1477 | |
9ae6e329 | 1478 | lower_eh_constructs_1 (state, &finally); |
75a70cf9 | 1479 | gimple_seq_add_seq (&tf->top_p_seq, finally); |
4ee9c684 | 1480 | |
1481 | /* Redirect each incoming goto edge. */ | |
1482 | q = tf->goto_queue; | |
1483 | qe = q + tf->goto_queue_active; | |
1484 | j = last_case_index + tf->may_return; | |
75a70cf9 | 1485 | /* Prepare the assignments to finally_tmp that are executed upon the |
1486 | entrance through a particular edge. */ | |
4ee9c684 | 1487 | for (; q < qe; ++q) |
1488 | { | |
e3a19533 | 1489 | gimple_seq mod = NULL; |
75a70cf9 | 1490 | int switch_id; |
1491 | unsigned int case_index; | |
1492 | ||
4ee9c684 | 1493 | if (q->index < 0) |
1494 | { | |
75a70cf9 | 1495 | x = gimple_build_assign (finally_tmp, |
bad12c62 | 1496 | build_int_cst (integer_type_node, |
1497 | return_index)); | |
75a70cf9 | 1498 | gimple_seq_add_stmt (&mod, x); |
9a14ac4f | 1499 | do_return_redirection (q, finally_label, mod); |
4ee9c684 | 1500 | switch_id = return_index; |
1501 | } | |
1502 | else | |
1503 | { | |
75a70cf9 | 1504 | x = gimple_build_assign (finally_tmp, |
bad12c62 | 1505 | build_int_cst (integer_type_node, q->index)); |
75a70cf9 | 1506 | gimple_seq_add_stmt (&mod, x); |
1507 | do_goto_redirection (q, finally_label, mod, tf); | |
4ee9c684 | 1508 | switch_id = q->index; |
1509 | } | |
1510 | ||
1511 | case_index = j + q->index; | |
f1f41a6c | 1512 | if (case_label_vec.length () <= case_index || !case_label_vec[case_index]) |
75a70cf9 | 1513 | { |
1514 | tree case_lab; | |
b6e3dd65 | 1515 | tmp = build_int_cst (integer_type_node, switch_id); |
1516 | case_lab = build_case_label (tmp, NULL, | |
1517 | create_artificial_label (tf_loc)); | |
75a70cf9 | 1518 | /* We store the cont_stmt in the pointer map, so that we can recover |
75a2cdc8 | 1519 | it in the loop below. */ |
75a70cf9 | 1520 | if (!cont_map) |
42acab1c | 1521 | cont_map = new hash_map<tree, gimple *>; |
06ecf488 | 1522 | cont_map->put (case_lab, q->cont_stmt); |
f1f41a6c | 1523 | case_label_vec.quick_push (case_lab); |
75a70cf9 | 1524 | } |
22347b24 | 1525 | } |
1526 | for (j = last_case_index; j < last_case_index + nlabels; j++) | |
1527 | { | |
42acab1c | 1528 | gimple *cont_stmt; |
22347b24 | 1529 | |
f1f41a6c | 1530 | last_case = case_label_vec[j]; |
22347b24 | 1531 | |
1532 | gcc_assert (last_case); | |
75a70cf9 | 1533 | gcc_assert (cont_map); |
22347b24 | 1534 | |
06ecf488 | 1535 | cont_stmt = *cont_map->get (last_case); |
22347b24 | 1536 | |
75a2cdc8 | 1537 | x = gimple_build_label (CASE_LABEL (last_case)); |
75a70cf9 | 1538 | gimple_seq_add_stmt (&switch_body, x); |
1539 | gimple_seq_add_stmt (&switch_body, cont_stmt); | |
22347b24 | 1540 | maybe_record_in_goto_queue (state, cont_stmt); |
4ee9c684 | 1541 | } |
75a70cf9 | 1542 | if (cont_map) |
06ecf488 | 1543 | delete cont_map; |
75a70cf9 | 1544 | |
4ee9c684 | 1545 | replace_goto_queue (tf); |
4ee9c684 | 1546 | |
da41aa8e | 1547 | /* Make sure that the last case is the default label, as one is required. |
1548 | Then sort the labels, which is also required in GIMPLE. */ | |
4ee9c684 | 1549 | CASE_LOW (last_case) = NULL; |
71b65939 | 1550 | tree tem = case_label_vec.pop (); |
1551 | gcc_assert (tem == last_case); | |
da41aa8e | 1552 | sort_case_labels (case_label_vec); |
4ee9c684 | 1553 | |
75a70cf9 | 1554 | /* Build the switch statement, setting last_case to be the default |
1555 | label. */ | |
49a70175 | 1556 | switch_stmt = gimple_build_switch (finally_tmp, last_case, |
1557 | case_label_vec); | |
e60a6f7b | 1558 | gimple_set_location (switch_stmt, finally_loc); |
75a70cf9 | 1559 | |
1560 | /* Need to link SWITCH_STMT after running replace_goto_queue | |
1561 | due to not wanting to process the same goto stmts twice. */ | |
1562 | gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt); | |
1563 | gimple_seq_add_seq (&tf->top_p_seq, switch_body); | |
4ee9c684 | 1564 | } |
1565 | ||
1566 | /* Decide whether or not we are going to duplicate the finally block. | |
1567 | There are several considerations. | |
1568 | ||
4ee9c684 | 1569 | Second, we'd like to prevent egregious code growth. One way to |
1570 | do this is to estimate the size of the finally block, multiply | |
1571 | that by the number of copies we'd need to make, and compare against | |
1572 | the estimate of the size of the switch machinery we'd have to add. */ | |
1573 | ||
1574 | static bool | |
4c0315d0 | 1575 | decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally) |
4ee9c684 | 1576 | { |
1577 | int f_estimate, sw_estimate; | |
1a91d914 | 1578 | geh_else *eh_else; |
4c0315d0 | 1579 | |
1580 | /* If there's an EH_ELSE involved, the exception path is separate | |
1581 | and really doesn't come into play for this computation. */ | |
1582 | eh_else = get_eh_else (finally); | |
1583 | if (eh_else) | |
1584 | { | |
1585 | ndests -= may_throw; | |
1586 | finally = gimple_eh_else_n_body (eh_else); | |
1587 | } | |
4ee9c684 | 1588 | |
1589 | if (!optimize) | |
83480f35 | 1590 | { |
1591 | gimple_stmt_iterator gsi; | |
1592 | ||
1593 | if (ndests == 1) | |
1594 | return true; | |
1595 | ||
1596 | for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1597 | { | |
bb8dfc4e | 1598 | /* Duplicate __builtin_stack_restore in the hope of eliminating it |
1599 | on the EH paths and, consequently, useless cleanups. */ | |
42acab1c | 1600 | gimple *stmt = gsi_stmt (gsi); |
bb8dfc4e | 1601 | if (!is_gimple_debug (stmt) |
1602 | && !gimple_clobber_p (stmt) | |
1603 | && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
83480f35 | 1604 | return false; |
1605 | } | |
1606 | return true; | |
1607 | } | |
4ee9c684 | 1608 | |
1609 | /* Finally estimate N times, plus N gotos. */ | |
100f989e | 1610 | f_estimate = estimate_num_insns_seq (finally, &eni_size_weights); |
4ee9c684 | 1611 | f_estimate = (f_estimate + 1) * ndests; |
1612 | ||
1613 | /* Switch statement (cost 10), N variable assignments, N gotos. */ | |
1614 | sw_estimate = 10 + 2 * ndests; | |
1615 | ||
1616 | /* Optimize for size clearly wants our best guess. */ | |
0bfd8d5c | 1617 | if (optimize_function_for_size_p (cfun)) |
4ee9c684 | 1618 | return f_estimate < sw_estimate; |
1619 | ||
1620 | /* ??? These numbers are completely made up so far. */ | |
1621 | if (optimize > 1) | |
72c90b15 | 1622 | return f_estimate < 100 || f_estimate < sw_estimate * 2; |
4ee9c684 | 1623 | else |
72c90b15 | 1624 | return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3; |
4ee9c684 | 1625 | } |
1626 | ||
f340b9ff | 1627 | /* REG is the enclosing region for a possible cleanup region, or the region |
1628 | itself. Returns TRUE if such a region would be unreachable. | |
1629 | ||
1630 | Cleanup regions within a must-not-throw region aren't actually reachable | |
1631 | even if there are throwing stmts within them, because the personality | |
1632 | routine will call terminate before unwinding. */ | |
1633 | ||
1634 | static bool | |
1635 | cleanup_is_dead_in (eh_region reg) | |
1636 | { | |
1637 | while (reg && reg->type == ERT_CLEANUP) | |
1638 | reg = reg->outer; | |
1639 | return (reg && reg->type == ERT_MUST_NOT_THROW); | |
1640 | } | |
75a70cf9 | 1641 | |
1642 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes | |
4ee9c684 | 1643 | to a sequence of labels and blocks, plus the exception region trees |
ac13e8d9 | 1644 | that record all the magic. This is complicated by the need to |
4ee9c684 | 1645 | arrange for the FINALLY block to be executed on all exits. */ |
1646 | ||
75a70cf9 | 1647 | static gimple_seq |
1a91d914 | 1648 | lower_try_finally (struct leh_state *state, gtry *tp) |
4ee9c684 | 1649 | { |
1650 | struct leh_tf_state this_tf; | |
1651 | struct leh_state this_state; | |
1652 | int ndests; | |
fa5d8988 | 1653 | gimple_seq old_eh_seq; |
4ee9c684 | 1654 | |
1655 | /* Process the try block. */ | |
1656 | ||
1657 | memset (&this_tf, 0, sizeof (this_tf)); | |
75a70cf9 | 1658 | this_tf.try_finally_expr = tp; |
4ee9c684 | 1659 | this_tf.top_p = tp; |
1660 | this_tf.outer = state; | |
4e57e76d | 1661 | if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region)) |
f340b9ff | 1662 | { |
1663 | this_tf.region = gen_eh_region_cleanup (state->cur_region); | |
1664 | this_state.cur_region = this_tf.region; | |
1665 | } | |
4ee9c684 | 1666 | else |
f340b9ff | 1667 | { |
1668 | this_tf.region = NULL; | |
1669 | this_state.cur_region = state->cur_region; | |
1670 | } | |
4ee9c684 | 1671 | |
e38def9c | 1672 | this_state.ehp_region = state->ehp_region; |
4ee9c684 | 1673 | this_state.tf = &this_tf; |
1674 | ||
fa5d8988 | 1675 | old_eh_seq = eh_seq; |
1676 | eh_seq = NULL; | |
1677 | ||
e3a19533 | 1678 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
4ee9c684 | 1679 | |
1680 | /* Determine if the try block is escaped through the bottom. */ | |
75a70cf9 | 1681 | this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
4ee9c684 | 1682 | |
1683 | /* Determine if any exceptions are possible within the try block. */ | |
f340b9ff | 1684 | if (this_tf.region) |
55d6d4e4 | 1685 | this_tf.may_throw = eh_region_may_contain_throw (this_tf.region); |
4ee9c684 | 1686 | if (this_tf.may_throw) |
e38def9c | 1687 | honor_protect_cleanup_actions (state, &this_state, &this_tf); |
4ee9c684 | 1688 | |
4ee9c684 | 1689 | /* Determine how many edges (still) reach the finally block. Or rather, |
1690 | how many destinations are reached by the finally block. Use this to | |
1691 | determine how we process the finally block itself. */ | |
1692 | ||
f1f41a6c | 1693 | ndests = this_tf.dest_array.length (); |
4ee9c684 | 1694 | ndests += this_tf.may_fallthru; |
1695 | ndests += this_tf.may_return; | |
1696 | ndests += this_tf.may_throw; | |
1697 | ||
1698 | /* If the FINALLY block is not reachable, dike it out. */ | |
1699 | if (ndests == 0) | |
75a70cf9 | 1700 | { |
1701 | gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp)); | |
1702 | gimple_try_set_cleanup (tp, NULL); | |
1703 | } | |
4ee9c684 | 1704 | /* If the finally block doesn't fall through, then any destination |
1705 | we might try to impose there isn't reached either. There may be | |
1706 | some minor amount of cleanup and redirection still needed. */ | |
75a70cf9 | 1707 | else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp))) |
4ee9c684 | 1708 | lower_try_finally_nofallthru (state, &this_tf); |
1709 | ||
1710 | /* We can easily special-case redirection to a single destination. */ | |
1711 | else if (ndests == 1) | |
1712 | lower_try_finally_onedest (state, &this_tf); | |
4c0315d0 | 1713 | else if (decide_copy_try_finally (ndests, this_tf.may_throw, |
1714 | gimple_try_cleanup (tp))) | |
4ee9c684 | 1715 | lower_try_finally_copy (state, &this_tf); |
1716 | else | |
1717 | lower_try_finally_switch (state, &this_tf); | |
1718 | ||
1719 | /* If someone requested we add a label at the end of the transformed | |
1720 | block, do so. */ | |
1721 | if (this_tf.fallthru_label) | |
1722 | { | |
75a70cf9 | 1723 | /* This must be reached only if ndests == 0. */ |
42acab1c | 1724 | gimple *x = gimple_build_label (this_tf.fallthru_label); |
75a70cf9 | 1725 | gimple_seq_add_stmt (&this_tf.top_p_seq, x); |
4ee9c684 | 1726 | } |
1727 | ||
f1f41a6c | 1728 | this_tf.dest_array.release (); |
dd045aee | 1729 | free (this_tf.goto_queue); |
46699809 | 1730 | if (this_tf.goto_queue_map) |
06ecf488 | 1731 | delete this_tf.goto_queue_map; |
75a70cf9 | 1732 | |
fa5d8988 | 1733 | /* If there was an old (aka outer) eh_seq, append the current eh_seq. |
1734 | If there was no old eh_seq, then the append is trivially already done. */ | |
1735 | if (old_eh_seq) | |
1736 | { | |
1737 | if (eh_seq == NULL) | |
1738 | eh_seq = old_eh_seq; | |
1739 | else | |
1740 | { | |
1741 | gimple_seq new_eh_seq = eh_seq; | |
1742 | eh_seq = old_eh_seq; | |
9af5ce0c | 1743 | gimple_seq_add_seq (&eh_seq, new_eh_seq); |
fa5d8988 | 1744 | } |
1745 | } | |
1746 | ||
75a70cf9 | 1747 | return this_tf.top_p_seq; |
4ee9c684 | 1748 | } |
1749 | ||
75a70cf9 | 1750 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a |
1751 | list of GIMPLE_CATCH to a sequence of labels and blocks, plus the | |
1752 | exception region trees that records all the magic. */ | |
4ee9c684 | 1753 | |
75a70cf9 | 1754 | static gimple_seq |
1a91d914 | 1755 | lower_catch (struct leh_state *state, gtry *tp) |
4ee9c684 | 1756 | { |
55d6d4e4 | 1757 | eh_region try_region = NULL; |
1758 | struct leh_state this_state = *state; | |
75a70cf9 | 1759 | gimple_stmt_iterator gsi; |
4ee9c684 | 1760 | tree out_label; |
e3a19533 | 1761 | gimple_seq new_seq, cleanup; |
42acab1c | 1762 | gimple *x; |
fe02bede | 1763 | geh_dispatch *eh_dispatch; |
e60a6f7b | 1764 | location_t try_catch_loc = gimple_location (tp); |
fe02bede | 1765 | location_t catch_loc = UNKNOWN_LOCATION; |
4ee9c684 | 1766 | |
55d6d4e4 | 1767 | if (flag_exceptions) |
1768 | { | |
1769 | try_region = gen_eh_region_try (state->cur_region); | |
1770 | this_state.cur_region = try_region; | |
1771 | } | |
4ee9c684 | 1772 | |
e3a19533 | 1773 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
4ee9c684 | 1774 | |
55d6d4e4 | 1775 | if (!eh_region_may_contain_throw (try_region)) |
e38def9c | 1776 | return gimple_try_eval (tp); |
1777 | ||
1778 | new_seq = NULL; | |
fe02bede | 1779 | eh_dispatch = gimple_build_eh_dispatch (try_region->index); |
1780 | gimple_seq_add_stmt (&new_seq, eh_dispatch); | |
e38def9c | 1781 | emit_resx (&new_seq, try_region); |
1782 | ||
1783 | this_state.cur_region = state->cur_region; | |
1784 | this_state.ehp_region = try_region; | |
4ee9c684 | 1785 | |
4422041b | 1786 | /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup |
1787 | itself, so that e.g. for coverage purposes the nested cleanups don't | |
1788 | appear before the cleanup body. See PR64634 for details. */ | |
1789 | gimple_seq old_eh_seq = eh_seq; | |
1790 | eh_seq = NULL; | |
1791 | ||
4ee9c684 | 1792 | out_label = NULL; |
e3a19533 | 1793 | cleanup = gimple_try_cleanup (tp); |
1794 | for (gsi = gsi_start (cleanup); | |
e38def9c | 1795 | !gsi_end_p (gsi); |
1796 | gsi_next (&gsi)) | |
4ee9c684 | 1797 | { |
e38def9c | 1798 | eh_catch c; |
1a91d914 | 1799 | gcatch *catch_stmt; |
e38def9c | 1800 | gimple_seq handler; |
4ee9c684 | 1801 | |
1a91d914 | 1802 | catch_stmt = as_a <gcatch *> (gsi_stmt (gsi)); |
fe02bede | 1803 | if (catch_loc == UNKNOWN_LOCATION) |
1804 | catch_loc = gimple_location (catch_stmt); | |
1a91d914 | 1805 | c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt)); |
4ee9c684 | 1806 | |
1a91d914 | 1807 | handler = gimple_catch_handler (catch_stmt); |
e3a19533 | 1808 | lower_eh_constructs_1 (&this_state, &handler); |
4ee9c684 | 1809 | |
e38def9c | 1810 | c->label = create_artificial_label (UNKNOWN_LOCATION); |
1811 | x = gimple_build_label (c->label); | |
1812 | gimple_seq_add_stmt (&new_seq, x); | |
4ee9c684 | 1813 | |
e38def9c | 1814 | gimple_seq_add_seq (&new_seq, handler); |
4ee9c684 | 1815 | |
e38def9c | 1816 | if (gimple_seq_may_fallthru (new_seq)) |
4ee9c684 | 1817 | { |
1818 | if (!out_label) | |
e60a6f7b | 1819 | out_label = create_artificial_label (try_catch_loc); |
4ee9c684 | 1820 | |
75a70cf9 | 1821 | x = gimple_build_goto (out_label); |
e38def9c | 1822 | gimple_seq_add_stmt (&new_seq, x); |
4ee9c684 | 1823 | } |
3ded67b5 | 1824 | if (!c->type_list) |
1825 | break; | |
4ee9c684 | 1826 | } |
1827 | ||
fe02bede | 1828 | /* Try to set a location on the dispatching construct to avoid inheriting |
1829 | the location of the previous statement. */ | |
1830 | gimple_set_location (eh_dispatch, catch_loc); | |
1831 | ||
e38def9c | 1832 | gimple_try_set_cleanup (tp, new_seq); |
1833 | ||
4422041b | 1834 | gimple_seq new_eh_seq = eh_seq; |
1835 | eh_seq = old_eh_seq; | |
1836 | gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label); | |
1837 | gimple_seq_add_seq (&eh_seq, new_eh_seq); | |
1838 | return ret_seq; | |
4ee9c684 | 1839 | } |
1840 | ||
75a70cf9 | 1841 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a |
1842 | GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception | |
4ee9c684 | 1843 | region trees that record all the magic. */ |
1844 | ||
75a70cf9 | 1845 | static gimple_seq |
1a91d914 | 1846 | lower_eh_filter (struct leh_state *state, gtry *tp) |
4ee9c684 | 1847 | { |
55d6d4e4 | 1848 | struct leh_state this_state = *state; |
1849 | eh_region this_region = NULL; | |
42acab1c | 1850 | gimple *inner, *x; |
e38def9c | 1851 | gimple_seq new_seq; |
ac13e8d9 | 1852 | |
75a70cf9 | 1853 | inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
1854 | ||
55d6d4e4 | 1855 | if (flag_exceptions) |
1856 | { | |
1857 | this_region = gen_eh_region_allowed (state->cur_region, | |
1858 | gimple_eh_filter_types (inner)); | |
1859 | this_state.cur_region = this_region; | |
1860 | } | |
ac13e8d9 | 1861 | |
e3a19533 | 1862 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
4ee9c684 | 1863 | |
55d6d4e4 | 1864 | if (!eh_region_may_contain_throw (this_region)) |
e38def9c | 1865 | return gimple_try_eval (tp); |
1866 | ||
e38def9c | 1867 | this_state.cur_region = state->cur_region; |
1868 | this_state.ehp_region = this_region; | |
1869 | ||
fe02bede | 1870 | new_seq = NULL; |
1871 | x = gimple_build_eh_dispatch (this_region->index); | |
1872 | gimple_set_location (x, gimple_location (tp)); | |
1873 | gimple_seq_add_stmt (&new_seq, x); | |
e38def9c | 1874 | emit_resx (&new_seq, this_region); |
1875 | ||
1876 | this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION); | |
1877 | x = gimple_build_label (this_region->u.allowed.label); | |
1878 | gimple_seq_add_stmt (&new_seq, x); | |
1879 | ||
e3a19533 | 1880 | lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner)); |
e38def9c | 1881 | gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner)); |
1882 | ||
1883 | gimple_try_set_cleanup (tp, new_seq); | |
4ee9c684 | 1884 | |
e38def9c | 1885 | return frob_into_branch_around (tp, this_region, NULL); |
1886 | } | |
1887 | ||
1888 | /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with | |
1889 | an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks, | |
1890 | plus the exception region trees that record all the magic. */ | |
1891 | ||
1892 | static gimple_seq | |
1a91d914 | 1893 | lower_eh_must_not_throw (struct leh_state *state, gtry *tp) |
e38def9c | 1894 | { |
55d6d4e4 | 1895 | struct leh_state this_state = *state; |
e38def9c | 1896 | |
55d6d4e4 | 1897 | if (flag_exceptions) |
1898 | { | |
42acab1c | 1899 | gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp)); |
55d6d4e4 | 1900 | eh_region this_region; |
e38def9c | 1901 | |
55d6d4e4 | 1902 | this_region = gen_eh_region_must_not_throw (state->cur_region); |
1903 | this_region->u.must_not_throw.failure_decl | |
1a91d914 | 1904 | = gimple_eh_must_not_throw_fndecl ( |
1905 | as_a <geh_mnt *> (inner)); | |
0565e0b5 | 1906 | this_region->u.must_not_throw.failure_loc |
1907 | = LOCATION_LOCUS (gimple_location (tp)); | |
e38def9c | 1908 | |
55d6d4e4 | 1909 | /* In order to get mangling applied to this decl, we must mark it |
1910 | used now. Otherwise, pass_ipa_free_lang_data won't think it | |
1911 | needs to happen. */ | |
1912 | TREE_USED (this_region->u.must_not_throw.failure_decl) = 1; | |
e38def9c | 1913 | |
55d6d4e4 | 1914 | this_state.cur_region = this_region; |
1915 | } | |
4ee9c684 | 1916 | |
e3a19533 | 1917 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
4ee9c684 | 1918 | |
e38def9c | 1919 | return gimple_try_eval (tp); |
4ee9c684 | 1920 | } |
1921 | ||
1922 | /* Implement a cleanup expression. This is similar to try-finally, | |
1923 | except that we only execute the cleanup block for exception edges. */ | |
1924 | ||
75a70cf9 | 1925 | static gimple_seq |
1a91d914 | 1926 | lower_cleanup (struct leh_state *state, gtry *tp) |
4ee9c684 | 1927 | { |
55d6d4e4 | 1928 | struct leh_state this_state = *state; |
1929 | eh_region this_region = NULL; | |
4ee9c684 | 1930 | struct leh_tf_state fake_tf; |
75a70cf9 | 1931 | gimple_seq result; |
f340b9ff | 1932 | bool cleanup_dead = cleanup_is_dead_in (state->cur_region); |
4ee9c684 | 1933 | |
f340b9ff | 1934 | if (flag_exceptions && !cleanup_dead) |
4ee9c684 | 1935 | { |
55d6d4e4 | 1936 | this_region = gen_eh_region_cleanup (state->cur_region); |
1937 | this_state.cur_region = this_region; | |
4ee9c684 | 1938 | } |
1939 | ||
e3a19533 | 1940 | lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); |
4ee9c684 | 1941 | |
f340b9ff | 1942 | if (cleanup_dead || !eh_region_may_contain_throw (this_region)) |
e38def9c | 1943 | return gimple_try_eval (tp); |
4ee9c684 | 1944 | |
1945 | /* Build enough of a try-finally state so that we can reuse | |
1946 | honor_protect_cleanup_actions. */ | |
1947 | memset (&fake_tf, 0, sizeof (fake_tf)); | |
e60a6f7b | 1948 | fake_tf.top_p = fake_tf.try_finally_expr = tp; |
4ee9c684 | 1949 | fake_tf.outer = state; |
1950 | fake_tf.region = this_region; | |
75a70cf9 | 1951 | fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
4ee9c684 | 1952 | fake_tf.may_throw = true; |
1953 | ||
4ee9c684 | 1954 | honor_protect_cleanup_actions (state, NULL, &fake_tf); |
1955 | ||
1956 | if (fake_tf.may_throw) | |
1957 | { | |
1958 | /* In this case honor_protect_cleanup_actions had nothing to do, | |
1959 | and we should process this normally. */ | |
e3a19533 | 1960 | lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp)); |
e38def9c | 1961 | result = frob_into_branch_around (tp, this_region, |
1962 | fake_tf.fallthru_label); | |
4ee9c684 | 1963 | } |
1964 | else | |
1965 | { | |
1966 | /* In this case honor_protect_cleanup_actions did nearly all of | |
1967 | the work. All we have left is to append the fallthru_label. */ | |
1968 | ||
75a70cf9 | 1969 | result = gimple_try_eval (tp); |
4ee9c684 | 1970 | if (fake_tf.fallthru_label) |
1971 | { | |
42acab1c | 1972 | gimple *x = gimple_build_label (fake_tf.fallthru_label); |
75a70cf9 | 1973 | gimple_seq_add_stmt (&result, x); |
4ee9c684 | 1974 | } |
1975 | } | |
75a70cf9 | 1976 | return result; |
4ee9c684 | 1977 | } |
1978 | ||
e38def9c | 1979 | /* Main loop for lowering eh constructs. Also moves gsi to the next |
75a70cf9 | 1980 | statement. */ |
4ee9c684 | 1981 | |
1982 | static void | |
75a70cf9 | 1983 | lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi) |
4ee9c684 | 1984 | { |
75a70cf9 | 1985 | gimple_seq replace; |
42acab1c | 1986 | gimple *x; |
1987 | gimple *stmt = gsi_stmt (*gsi); | |
4ee9c684 | 1988 | |
75a70cf9 | 1989 | switch (gimple_code (stmt)) |
4ee9c684 | 1990 | { |
75a70cf9 | 1991 | case GIMPLE_CALL: |
e38def9c | 1992 | { |
1993 | tree fndecl = gimple_call_fndecl (stmt); | |
1994 | tree rhs, lhs; | |
1995 | ||
a0e9bfbb | 1996 | if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)) |
e38def9c | 1997 | switch (DECL_FUNCTION_CODE (fndecl)) |
1998 | { | |
1999 | case BUILT_IN_EH_POINTER: | |
2000 | /* The front end may have generated a call to | |
2001 | __builtin_eh_pointer (0) within a catch region. Replace | |
2002 | this zero argument with the current catch region number. */ | |
2003 | if (state->ehp_region) | |
2004 | { | |
bad12c62 | 2005 | tree nr = build_int_cst (integer_type_node, |
2006 | state->ehp_region->index); | |
e38def9c | 2007 | gimple_call_set_arg (stmt, 0, nr); |
2008 | } | |
2009 | else | |
2010 | { | |
2011 | /* The user has dome something silly. Remove it. */ | |
2512209b | 2012 | rhs = null_pointer_node; |
e38def9c | 2013 | goto do_replace; |
2014 | } | |
2015 | break; | |
2016 | ||
2017 | case BUILT_IN_EH_FILTER: | |
2018 | /* ??? This should never appear, but since it's a builtin it | |
2019 | is accessible to abuse by users. Just remove it and | |
2020 | replace the use with the arbitrary value zero. */ | |
2021 | rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0); | |
2022 | do_replace: | |
2023 | lhs = gimple_call_lhs (stmt); | |
2024 | x = gimple_build_assign (lhs, rhs); | |
2025 | gsi_insert_before (gsi, x, GSI_SAME_STMT); | |
2026 | /* FALLTHRU */ | |
2027 | ||
2028 | case BUILT_IN_EH_COPY_VALUES: | |
2029 | /* Likewise this should not appear. Remove it. */ | |
2030 | gsi_remove (gsi, true); | |
2031 | return; | |
2032 | ||
2033 | default: | |
2034 | break; | |
2035 | } | |
2036 | } | |
2037 | /* FALLTHRU */ | |
2038 | ||
75a70cf9 | 2039 | case GIMPLE_ASSIGN: |
580f57ec | 2040 | /* If the stmt can throw, use a new temporary for the assignment |
47f11e84 | 2041 | to a LHS. This makes sure the old value of the LHS is |
fa916956 | 2042 | available on the EH edge. Only do so for statements that |
9d75589a | 2043 | potentially fall through (no noreturn calls e.g.), otherwise |
fa916956 | 2044 | this new assignment might create fake fallthru regions. */ |
aac19106 | 2045 | if (stmt_could_throw_p (cfun, stmt) |
47f11e84 | 2046 | && gimple_has_lhs (stmt) |
fa916956 | 2047 | && gimple_stmt_may_fallthru (stmt) |
47f11e84 | 2048 | && !tree_could_throw_p (gimple_get_lhs (stmt)) |
2049 | && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt)))) | |
2050 | { | |
2051 | tree lhs = gimple_get_lhs (stmt); | |
f9e245b2 | 2052 | tree tmp = create_tmp_var (TREE_TYPE (lhs)); |
42acab1c | 2053 | gimple *s = gimple_build_assign (lhs, tmp); |
47f11e84 | 2054 | gimple_set_location (s, gimple_location (stmt)); |
2055 | gimple_set_block (s, gimple_block (stmt)); | |
2056 | gimple_set_lhs (stmt, tmp); | |
2057 | if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE | |
2058 | || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE) | |
2059 | DECL_GIMPLE_REG_P (tmp) = 1; | |
2060 | gsi_insert_after (gsi, s, GSI_SAME_STMT); | |
2061 | } | |
4ee9c684 | 2062 | /* Look for things that can throw exceptions, and record them. */ |
aac19106 | 2063 | if (state->cur_region && stmt_could_throw_p (cfun, stmt)) |
4ee9c684 | 2064 | { |
75a70cf9 | 2065 | record_stmt_eh_region (state->cur_region, stmt); |
4ee9c684 | 2066 | note_eh_region_may_contain_throw (state->cur_region); |
4ee9c684 | 2067 | } |
2068 | break; | |
2069 | ||
75a70cf9 | 2070 | case GIMPLE_COND: |
2071 | case GIMPLE_GOTO: | |
2072 | case GIMPLE_RETURN: | |
2073 | maybe_record_in_goto_queue (state, stmt); | |
4ee9c684 | 2074 | break; |
2075 | ||
75a70cf9 | 2076 | case GIMPLE_SWITCH: |
1a91d914 | 2077 | verify_norecord_switch_expr (state, as_a <gswitch *> (stmt)); |
4ee9c684 | 2078 | break; |
2079 | ||
75a70cf9 | 2080 | case GIMPLE_TRY: |
1a91d914 | 2081 | { |
2082 | gtry *try_stmt = as_a <gtry *> (stmt); | |
2083 | if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) | |
2084 | replace = lower_try_finally (state, try_stmt); | |
2085 | else | |
2086 | { | |
2087 | x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt)); | |
2088 | if (!x) | |
c90b5d40 | 2089 | { |
1a91d914 | 2090 | replace = gimple_try_eval (try_stmt); |
2091 | lower_eh_constructs_1 (state, &replace); | |
2092 | } | |
2093 | else | |
2094 | switch (gimple_code (x)) | |
2095 | { | |
c90b5d40 | 2096 | case GIMPLE_CATCH: |
1a91d914 | 2097 | replace = lower_catch (state, try_stmt); |
2098 | break; | |
c90b5d40 | 2099 | case GIMPLE_EH_FILTER: |
1a91d914 | 2100 | replace = lower_eh_filter (state, try_stmt); |
2101 | break; | |
c90b5d40 | 2102 | case GIMPLE_EH_MUST_NOT_THROW: |
1a91d914 | 2103 | replace = lower_eh_must_not_throw (state, try_stmt); |
2104 | break; | |
4c0315d0 | 2105 | case GIMPLE_EH_ELSE: |
1a91d914 | 2106 | /* This code is only valid with GIMPLE_TRY_FINALLY. */ |
2107 | gcc_unreachable (); | |
c90b5d40 | 2108 | default: |
1a91d914 | 2109 | replace = lower_cleanup (state, try_stmt); |
2110 | break; | |
2111 | } | |
2112 | } | |
2113 | } | |
75a70cf9 | 2114 | |
2115 | /* Remove the old stmt and insert the transformed sequence | |
2116 | instead. */ | |
2117 | gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT); | |
2118 | gsi_remove (gsi, true); | |
2119 | ||
2120 | /* Return since we don't want gsi_next () */ | |
2121 | return; | |
4ee9c684 | 2122 | |
4c0315d0 | 2123 | case GIMPLE_EH_ELSE: |
2124 | /* We should be eliminating this in lower_try_finally et al. */ | |
2125 | gcc_unreachable (); | |
2126 | ||
4ee9c684 | 2127 | default: |
2128 | /* A type, a decl, or some kind of statement that we're not | |
2129 | interested in. Don't walk them. */ | |
2130 | break; | |
2131 | } | |
75a70cf9 | 2132 | |
2133 | gsi_next (gsi); | |
2134 | } | |
2135 | ||
2136 | /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */ | |
2137 | ||
2138 | static void | |
e3a19533 | 2139 | lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq) |
75a70cf9 | 2140 | { |
2141 | gimple_stmt_iterator gsi; | |
e3a19533 | 2142 | for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);) |
75a70cf9 | 2143 | lower_eh_constructs_2 (state, &gsi); |
4ee9c684 | 2144 | } |
2145 | ||
7620bc82 | 2146 | namespace { |
2147 | ||
2148 | const pass_data pass_data_lower_eh = | |
65b0537f | 2149 | { |
2150 | GIMPLE_PASS, /* type */ | |
2151 | "eh", /* name */ | |
2152 | OPTGROUP_NONE, /* optinfo_flags */ | |
65b0537f | 2153 | TV_TREE_EH, /* tv_id */ |
2154 | PROP_gimple_lcf, /* properties_required */ | |
2155 | PROP_gimple_leh, /* properties_provided */ | |
2156 | 0, /* properties_destroyed */ | |
2157 | 0, /* todo_flags_start */ | |
2158 | 0, /* todo_flags_finish */ | |
2159 | }; | |
2160 | ||
7620bc82 | 2161 | class pass_lower_eh : public gimple_opt_pass |
65b0537f | 2162 | { |
2163 | public: | |
2164 | pass_lower_eh (gcc::context *ctxt) | |
2165 | : gimple_opt_pass (pass_data_lower_eh, ctxt) | |
2166 | {} | |
2167 | ||
2168 | /* opt_pass methods: */ | |
2169 | virtual unsigned int execute (function *); | |
2170 | ||
2171 | }; // class pass_lower_eh | |
2172 | ||
2173 | unsigned int | |
2174 | pass_lower_eh::execute (function *fun) | |
4ee9c684 | 2175 | { |
2176 | struct leh_state null_state; | |
e38def9c | 2177 | gimple_seq bodyp; |
75a70cf9 | 2178 | |
e38def9c | 2179 | bodyp = gimple_body (current_function_decl); |
2180 | if (bodyp == NULL) | |
2181 | return 0; | |
4ee9c684 | 2182 | |
c1f445d2 | 2183 | finally_tree = new hash_table<finally_tree_hasher> (31); |
55d6d4e4 | 2184 | eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL); |
e38def9c | 2185 | memset (&null_state, 0, sizeof (null_state)); |
4ee9c684 | 2186 | |
75a70cf9 | 2187 | collect_finally_tree_1 (bodyp, NULL); |
e3a19533 | 2188 | lower_eh_constructs_1 (&null_state, &bodyp); |
2189 | gimple_set_body (current_function_decl, bodyp); | |
4ee9c684 | 2190 | |
e38def9c | 2191 | /* We assume there's a return statement, or something, at the end of |
2192 | the function, and thus ploping the EH sequence afterward won't | |
2193 | change anything. */ | |
2194 | gcc_assert (!gimple_seq_may_fallthru (bodyp)); | |
2195 | gimple_seq_add_seq (&bodyp, eh_seq); | |
2196 | ||
2197 | /* We assume that since BODYP already existed, adding EH_SEQ to it | |
2198 | didn't change its value, and we don't have to re-set the function. */ | |
2199 | gcc_assert (bodyp == gimple_body (current_function_decl)); | |
4ee9c684 | 2200 | |
c1f445d2 | 2201 | delete finally_tree; |
2202 | finally_tree = NULL; | |
55d6d4e4 | 2203 | BITMAP_FREE (eh_region_may_contain_throw_map); |
e38def9c | 2204 | eh_seq = NULL; |
58d82cd0 | 2205 | |
2206 | /* If this function needs a language specific EH personality routine | |
2207 | and the frontend didn't already set one do so now. */ | |
65b0537f | 2208 | if (function_needs_eh_personality (fun) == eh_personality_lang |
58d82cd0 | 2209 | && !DECL_FUNCTION_PERSONALITY (current_function_decl)) |
2210 | DECL_FUNCTION_PERSONALITY (current_function_decl) | |
2211 | = lang_hooks.eh_personality (); | |
2212 | ||
2a1990e9 | 2213 | return 0; |
4ee9c684 | 2214 | } |
2215 | ||
7620bc82 | 2216 | } // anon namespace |
2217 | ||
cbe8bda8 | 2218 | gimple_opt_pass * |
2219 | make_pass_lower_eh (gcc::context *ctxt) | |
2220 | { | |
2221 | return new pass_lower_eh (ctxt); | |
2222 | } | |
4ee9c684 | 2223 | \f |
e38def9c | 2224 | /* Create the multiple edges from an EH_DISPATCH statement to all of |
2225 | the possible handlers for its EH region. Return true if there's | |
2226 | no fallthru edge; false if there is. */ | |
4ee9c684 | 2227 | |
e38def9c | 2228 | bool |
1a91d914 | 2229 | make_eh_dispatch_edges (geh_dispatch *stmt) |
4ee9c684 | 2230 | { |
e38def9c | 2231 | eh_region r; |
2232 | eh_catch c; | |
4ee9c684 | 2233 | basic_block src, dst; |
2234 | ||
e38def9c | 2235 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); |
75a70cf9 | 2236 | src = gimple_bb (stmt); |
4ee9c684 | 2237 | |
e38def9c | 2238 | switch (r->type) |
2239 | { | |
2240 | case ERT_TRY: | |
2241 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
2242 | { | |
0fb4f2ce | 2243 | dst = label_to_block (cfun, c->label); |
e38def9c | 2244 | make_edge (src, dst, 0); |
ac13e8d9 | 2245 | |
e38def9c | 2246 | /* A catch-all handler doesn't have a fallthru. */ |
2247 | if (c->type_list == NULL) | |
2248 | return false; | |
2249 | } | |
2250 | break; | |
a5bfef5b | 2251 | |
e38def9c | 2252 | case ERT_ALLOWED_EXCEPTIONS: |
0fb4f2ce | 2253 | dst = label_to_block (cfun, r->u.allowed.label); |
e38def9c | 2254 | make_edge (src, dst, 0); |
2255 | break; | |
2256 | ||
2257 | default: | |
2258 | gcc_unreachable (); | |
2259 | } | |
2260 | ||
2261 | return true; | |
a5bfef5b | 2262 | } |
2263 | ||
e38def9c | 2264 | /* Create the single EH edge from STMT to its nearest landing pad, |
2265 | if there is such a landing pad within the current function. */ | |
2266 | ||
4ee9c684 | 2267 | void |
42acab1c | 2268 | make_eh_edges (gimple *stmt) |
4ee9c684 | 2269 | { |
e38def9c | 2270 | basic_block src, dst; |
2271 | eh_landing_pad lp; | |
2272 | int lp_nr; | |
4ee9c684 | 2273 | |
e38def9c | 2274 | lp_nr = lookup_stmt_eh_lp (stmt); |
2275 | if (lp_nr <= 0) | |
2276 | return; | |
4ee9c684 | 2277 | |
e38def9c | 2278 | lp = get_eh_landing_pad_from_number (lp_nr); |
2279 | gcc_assert (lp != NULL); | |
d6d5ab2d | 2280 | |
e38def9c | 2281 | src = gimple_bb (stmt); |
0fb4f2ce | 2282 | dst = label_to_block (cfun, lp->post_landing_pad); |
e38def9c | 2283 | make_edge (src, dst, EDGE_EH); |
4ee9c684 | 2284 | } |
2285 | ||
e38def9c | 2286 | /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree; |
2287 | do not actually perform the final edge redirection. | |
927a6b6b | 2288 | |
e38def9c | 2289 | CHANGE_REGION is true when we're being called from cleanup_empty_eh and |
2290 | we intend to change the destination EH region as well; this means | |
2291 | EH_LANDING_PAD_NR must already be set on the destination block label. | |
2292 | If false, we're being called from generic cfg manipulation code and we | |
2293 | should preserve our place within the region tree. */ | |
2294 | ||
2295 | static void | |
2296 | redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region) | |
927a6b6b | 2297 | { |
e38def9c | 2298 | eh_landing_pad old_lp, new_lp; |
2299 | basic_block old_bb; | |
42acab1c | 2300 | gimple *throw_stmt; |
e38def9c | 2301 | int old_lp_nr, new_lp_nr; |
2302 | tree old_label, new_label; | |
2303 | edge_iterator ei; | |
2304 | edge e; | |
2305 | ||
2306 | old_bb = edge_in->dest; | |
2307 | old_label = gimple_block_label (old_bb); | |
2308 | old_lp_nr = EH_LANDING_PAD_NR (old_label); | |
2309 | gcc_assert (old_lp_nr > 0); | |
2310 | old_lp = get_eh_landing_pad_from_number (old_lp_nr); | |
2311 | ||
2312 | throw_stmt = last_stmt (edge_in->src); | |
2313 | gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr); | |
2314 | ||
2315 | new_label = gimple_block_label (new_bb); | |
927a6b6b | 2316 | |
e38def9c | 2317 | /* Look for an existing region that might be using NEW_BB already. */ |
2318 | new_lp_nr = EH_LANDING_PAD_NR (new_label); | |
2319 | if (new_lp_nr) | |
927a6b6b | 2320 | { |
e38def9c | 2321 | new_lp = get_eh_landing_pad_from_number (new_lp_nr); |
2322 | gcc_assert (new_lp); | |
48e1416a | 2323 | |
e38def9c | 2324 | /* Unless CHANGE_REGION is true, the new and old landing pad |
2325 | had better be associated with the same EH region. */ | |
2326 | gcc_assert (change_region || new_lp->region == old_lp->region); | |
927a6b6b | 2327 | } |
2328 | else | |
2329 | { | |
e38def9c | 2330 | new_lp = NULL; |
2331 | gcc_assert (!change_region); | |
927a6b6b | 2332 | } |
2333 | ||
e38def9c | 2334 | /* Notice when we redirect the last EH edge away from OLD_BB. */ |
2335 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
2336 | if (e != edge_in && (e->flags & EDGE_EH)) | |
2337 | break; | |
b4ba5e9d | 2338 | |
e38def9c | 2339 | if (new_lp) |
b4ba5e9d | 2340 | { |
e38def9c | 2341 | /* NEW_LP already exists. If there are still edges into OLD_LP, |
2342 | there's nothing to do with the EH tree. If there are no more | |
2343 | edges into OLD_LP, then we want to remove OLD_LP as it is unused. | |
2344 | If CHANGE_REGION is true, then our caller is expecting to remove | |
2345 | the landing pad. */ | |
2346 | if (e == NULL && !change_region) | |
2347 | remove_eh_landing_pad (old_lp); | |
b4ba5e9d | 2348 | } |
e38def9c | 2349 | else |
b4ba5e9d | 2350 | { |
e38def9c | 2351 | /* No correct landing pad exists. If there are no more edges |
2352 | into OLD_LP, then we can simply re-use the existing landing pad. | |
2353 | Otherwise, we have to create a new landing pad. */ | |
2354 | if (e == NULL) | |
2355 | { | |
2356 | EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0; | |
2357 | new_lp = old_lp; | |
2358 | } | |
2359 | else | |
2360 | new_lp = gen_eh_landing_pad (old_lp->region); | |
2361 | new_lp->post_landing_pad = new_label; | |
2362 | EH_LANDING_PAD_NR (new_label) = new_lp->index; | |
b4ba5e9d | 2363 | } |
e38def9c | 2364 | |
2365 | /* Maybe move the throwing statement to the new region. */ | |
2366 | if (old_lp != new_lp) | |
b4ba5e9d | 2367 | { |
e38def9c | 2368 | remove_stmt_from_eh_lp (throw_stmt); |
2369 | add_stmt_to_eh_lp (throw_stmt, new_lp->index); | |
b4ba5e9d | 2370 | } |
b4ba5e9d | 2371 | } |
2372 | ||
e38def9c | 2373 | /* Redirect EH edge E to NEW_BB. */ |
75a70cf9 | 2374 | |
e38def9c | 2375 | edge |
2376 | redirect_eh_edge (edge edge_in, basic_block new_bb) | |
b4ba5e9d | 2377 | { |
e38def9c | 2378 | redirect_eh_edge_1 (edge_in, new_bb, false); |
2379 | return ssa_redirect_edge (edge_in, new_bb); | |
2380 | } | |
b4ba5e9d | 2381 | |
e38def9c | 2382 | /* This is a subroutine of gimple_redirect_edge_and_branch. Update the |
2383 | labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB. | |
2384 | The actual edge update will happen in the caller. */ | |
b4ba5e9d | 2385 | |
e38def9c | 2386 | void |
1a91d914 | 2387 | redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb) |
e38def9c | 2388 | { |
2389 | tree new_lab = gimple_block_label (new_bb); | |
2390 | bool any_changed = false; | |
2391 | basic_block old_bb; | |
2392 | eh_region r; | |
2393 | eh_catch c; | |
2394 | ||
2395 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); | |
2396 | switch (r->type) | |
b4ba5e9d | 2397 | { |
e38def9c | 2398 | case ERT_TRY: |
2399 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
b4ba5e9d | 2400 | { |
0fb4f2ce | 2401 | old_bb = label_to_block (cfun, c->label); |
e38def9c | 2402 | if (old_bb == e->dest) |
2403 | { | |
2404 | c->label = new_lab; | |
2405 | any_changed = true; | |
2406 | } | |
b4ba5e9d | 2407 | } |
e38def9c | 2408 | break; |
2409 | ||
2410 | case ERT_ALLOWED_EXCEPTIONS: | |
0fb4f2ce | 2411 | old_bb = label_to_block (cfun, r->u.allowed.label); |
e38def9c | 2412 | gcc_assert (old_bb == e->dest); |
2413 | r->u.allowed.label = new_lab; | |
2414 | any_changed = true; | |
2415 | break; | |
2416 | ||
2417 | default: | |
2418 | gcc_unreachable (); | |
b4ba5e9d | 2419 | } |
75a70cf9 | 2420 | |
e38def9c | 2421 | gcc_assert (any_changed); |
b4ba5e9d | 2422 | } |
4ee9c684 | 2423 | \f |
75a70cf9 | 2424 | /* Helper function for operation_could_trap_p and stmt_could_throw_p. */ |
2425 | ||
2ac47fdf | 2426 | bool |
75a70cf9 | 2427 | operation_could_trap_helper_p (enum tree_code op, |
2428 | bool fp_operation, | |
2429 | bool honor_trapv, | |
2430 | bool honor_nans, | |
2431 | bool honor_snans, | |
2432 | tree divisor, | |
2433 | bool *handled) | |
2434 | { | |
2435 | *handled = true; | |
2436 | switch (op) | |
2437 | { | |
2438 | case TRUNC_DIV_EXPR: | |
2439 | case CEIL_DIV_EXPR: | |
2440 | case FLOOR_DIV_EXPR: | |
2441 | case ROUND_DIV_EXPR: | |
2442 | case EXACT_DIV_EXPR: | |
2443 | case CEIL_MOD_EXPR: | |
2444 | case FLOOR_MOD_EXPR: | |
2445 | case ROUND_MOD_EXPR: | |
2446 | case TRUNC_MOD_EXPR: | |
2447 | case RDIV_EXPR: | |
a7775217 | 2448 | if (honor_snans) |
75a70cf9 | 2449 | return true; |
2450 | if (fp_operation) | |
2451 | return flag_trapping_math; | |
2452 | if (!TREE_CONSTANT (divisor) || integer_zerop (divisor)) | |
2453 | return true; | |
2454 | return false; | |
2455 | ||
2456 | case LT_EXPR: | |
2457 | case LE_EXPR: | |
2458 | case GT_EXPR: | |
2459 | case GE_EXPR: | |
2460 | case LTGT_EXPR: | |
2461 | /* Some floating point comparisons may trap. */ | |
2462 | return honor_nans; | |
2463 | ||
2464 | case EQ_EXPR: | |
2465 | case NE_EXPR: | |
2466 | case UNORDERED_EXPR: | |
2467 | case ORDERED_EXPR: | |
2468 | case UNLT_EXPR: | |
2469 | case UNLE_EXPR: | |
2470 | case UNGT_EXPR: | |
2471 | case UNGE_EXPR: | |
2472 | case UNEQ_EXPR: | |
2473 | return honor_snans; | |
2474 | ||
75a70cf9 | 2475 | case NEGATE_EXPR: |
2476 | case ABS_EXPR: | |
2477 | case CONJ_EXPR: | |
2478 | /* These operations don't trap with floating point. */ | |
2479 | if (honor_trapv) | |
2480 | return true; | |
2481 | return false; | |
2482 | ||
1c67942e | 2483 | case ABSU_EXPR: |
2484 | /* ABSU_EXPR never traps. */ | |
2485 | return false; | |
2486 | ||
75a70cf9 | 2487 | case PLUS_EXPR: |
2488 | case MINUS_EXPR: | |
2489 | case MULT_EXPR: | |
2490 | /* Any floating arithmetic may trap. */ | |
2491 | if (fp_operation && flag_trapping_math) | |
2492 | return true; | |
2493 | if (honor_trapv) | |
2494 | return true; | |
2495 | return false; | |
2496 | ||
aa9d6f35 | 2497 | case COMPLEX_EXPR: |
2498 | case CONSTRUCTOR: | |
2499 | /* Constructing an object cannot trap. */ | |
2500 | return false; | |
2501 | ||
75a70cf9 | 2502 | default: |
2503 | /* Any floating arithmetic may trap. */ | |
2504 | if (fp_operation && flag_trapping_math) | |
2505 | return true; | |
2506 | ||
2507 | *handled = false; | |
2508 | return false; | |
2509 | } | |
2510 | } | |
2511 | ||
2512 | /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied | |
2513 | on floating-point values. HONOR_TRAPV is true if OP is applied on integer | |
2514 | type operands that may trap. If OP is a division operator, DIVISOR contains | |
2515 | the value of the divisor. */ | |
2516 | ||
2517 | bool | |
2518 | operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv, | |
2519 | tree divisor) | |
2520 | { | |
2521 | bool honor_nans = (fp_operation && flag_trapping_math | |
2522 | && !flag_finite_math_only); | |
2523 | bool honor_snans = fp_operation && flag_signaling_nans != 0; | |
2524 | bool handled; | |
2525 | ||
2526 | if (TREE_CODE_CLASS (op) != tcc_comparison | |
2527 | && TREE_CODE_CLASS (op) != tcc_unary | |
143c3c9a | 2528 | && TREE_CODE_CLASS (op) != tcc_binary) |
75a70cf9 | 2529 | return false; |
2530 | ||
2531 | return operation_could_trap_helper_p (op, fp_operation, honor_trapv, | |
2532 | honor_nans, honor_snans, divisor, | |
2533 | &handled); | |
2534 | } | |
2535 | ||
0e80b01d | 2536 | |
2537 | /* Returns true if it is possible to prove that the index of | |
2538 | an array access REF (an ARRAY_REF expression) falls into the | |
2539 | array bounds. */ | |
2540 | ||
2541 | static bool | |
2542 | in_array_bounds_p (tree ref) | |
2543 | { | |
2544 | tree idx = TREE_OPERAND (ref, 1); | |
2545 | tree min, max; | |
2546 | ||
2547 | if (TREE_CODE (idx) != INTEGER_CST) | |
2548 | return false; | |
2549 | ||
2550 | min = array_ref_low_bound (ref); | |
2551 | max = array_ref_up_bound (ref); | |
2552 | if (!min | |
2553 | || !max | |
2554 | || TREE_CODE (min) != INTEGER_CST | |
2555 | || TREE_CODE (max) != INTEGER_CST) | |
2556 | return false; | |
2557 | ||
2558 | if (tree_int_cst_lt (idx, min) | |
2559 | || tree_int_cst_lt (max, idx)) | |
2560 | return false; | |
2561 | ||
2562 | return true; | |
2563 | } | |
2564 | ||
2565 | /* Returns true if it is possible to prove that the range of | |
2566 | an array access REF (an ARRAY_RANGE_REF expression) falls | |
2567 | into the array bounds. */ | |
2568 | ||
2569 | static bool | |
2570 | range_in_array_bounds_p (tree ref) | |
2571 | { | |
2572 | tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref)); | |
2573 | tree range_min, range_max, min, max; | |
2574 | ||
2575 | range_min = TYPE_MIN_VALUE (domain_type); | |
2576 | range_max = TYPE_MAX_VALUE (domain_type); | |
2577 | if (!range_min | |
2578 | || !range_max | |
2579 | || TREE_CODE (range_min) != INTEGER_CST | |
2580 | || TREE_CODE (range_max) != INTEGER_CST) | |
2581 | return false; | |
2582 | ||
2583 | min = array_ref_low_bound (ref); | |
2584 | max = array_ref_up_bound (ref); | |
2585 | if (!min | |
2586 | || !max | |
2587 | || TREE_CODE (min) != INTEGER_CST | |
2588 | || TREE_CODE (max) != INTEGER_CST) | |
2589 | return false; | |
2590 | ||
2591 | if (tree_int_cst_lt (range_min, min) | |
2592 | || tree_int_cst_lt (max, range_max)) | |
2593 | return false; | |
2594 | ||
2595 | return true; | |
2596 | } | |
2597 | ||
75a70cf9 | 2598 | /* Return true if EXPR can trap, as in dereferencing an invalid pointer |
35c15734 | 2599 | location or floating point arithmetic. C.f. the rtl version, may_trap_p. |
2600 | This routine expects only GIMPLE lhs or rhs input. */ | |
4ee9c684 | 2601 | |
2602 | bool | |
2603 | tree_could_trap_p (tree expr) | |
2604 | { | |
75a70cf9 | 2605 | enum tree_code code; |
35c15734 | 2606 | bool fp_operation = false; |
db97ad41 | 2607 | bool honor_trapv = false; |
75a70cf9 | 2608 | tree t, base, div = NULL_TREE; |
4ee9c684 | 2609 | |
75a70cf9 | 2610 | if (!expr) |
2611 | return false; | |
e38def9c | 2612 | |
75a70cf9 | 2613 | code = TREE_CODE (expr); |
2614 | t = TREE_TYPE (expr); | |
2615 | ||
2616 | if (t) | |
35c15734 | 2617 | { |
7076cb5d | 2618 | if (COMPARISON_CLASS_P (expr)) |
2619 | fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))); | |
2620 | else | |
2621 | fp_operation = FLOAT_TYPE_P (t); | |
75a70cf9 | 2622 | honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t); |
35c15734 | 2623 | } |
2624 | ||
75a70cf9 | 2625 | if (TREE_CODE_CLASS (code) == tcc_binary) |
2626 | div = TREE_OPERAND (expr, 1); | |
2627 | if (operation_could_trap_p (code, fp_operation, honor_trapv, div)) | |
2628 | return true; | |
2629 | ||
80f06481 | 2630 | restart: |
4ee9c684 | 2631 | switch (code) |
2632 | { | |
4ee9c684 | 2633 | case COMPONENT_REF: |
2634 | case REALPART_EXPR: | |
2635 | case IMAGPART_EXPR: | |
2636 | case BIT_FIELD_REF: | |
26d2ad79 | 2637 | case VIEW_CONVERT_EXPR: |
80f06481 | 2638 | case WITH_SIZE_EXPR: |
2639 | expr = TREE_OPERAND (expr, 0); | |
2640 | code = TREE_CODE (expr); | |
2641 | goto restart; | |
7d23383d | 2642 | |
2643 | case ARRAY_RANGE_REF: | |
2100c228 | 2644 | base = TREE_OPERAND (expr, 0); |
2645 | if (tree_could_trap_p (base)) | |
7d23383d | 2646 | return true; |
2100c228 | 2647 | if (TREE_THIS_NOTRAP (expr)) |
2648 | return false; | |
2100c228 | 2649 | return !range_in_array_bounds_p (expr); |
7d23383d | 2650 | |
2651 | case ARRAY_REF: | |
2652 | base = TREE_OPERAND (expr, 0); | |
7d23383d | 2653 | if (tree_could_trap_p (base)) |
2654 | return true; | |
7d23383d | 2655 | if (TREE_THIS_NOTRAP (expr)) |
2656 | return false; | |
7d23383d | 2657 | return !in_array_bounds_p (expr); |
4ee9c684 | 2658 | |
cdf2f9e4 | 2659 | case TARGET_MEM_REF: |
182cf5a9 | 2660 | case MEM_REF: |
cdf2f9e4 | 2661 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR |
2662 | && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))) | |
2663 | return true; | |
2664 | if (TREE_THIS_NOTRAP (expr)) | |
182cf5a9 | 2665 | return false; |
cdf2f9e4 | 2666 | /* We cannot prove that the access is in-bounds when we have |
2667 | variable-index TARGET_MEM_REFs. */ | |
2668 | if (code == TARGET_MEM_REF | |
2669 | && (TMR_INDEX (expr) || TMR_INDEX2 (expr))) | |
2670 | return true; | |
2671 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | |
2672 | { | |
2673 | tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0); | |
90ca1268 | 2674 | poly_offset_int off = mem_ref_offset (expr); |
2675 | if (maybe_lt (off, 0)) | |
cdf2f9e4 | 2676 | return true; |
2677 | if (TREE_CODE (base) == STRING_CST) | |
90ca1268 | 2678 | return maybe_le (TREE_STRING_LENGTH (base), off); |
2679 | tree size = DECL_SIZE_UNIT (base); | |
2680 | if (size == NULL_TREE | |
2681 | || !poly_int_tree_p (size) | |
2682 | || maybe_le (wi::to_poly_offset (size), off)) | |
cdf2f9e4 | 2683 | return true; |
2684 | /* Now we are sure the first byte of the access is inside | |
2685 | the object. */ | |
2686 | return false; | |
2687 | } | |
2688 | return true; | |
2689 | ||
4ee9c684 | 2690 | case INDIRECT_REF: |
35c15734 | 2691 | return !TREE_THIS_NOTRAP (expr); |
2692 | ||
2693 | case ASM_EXPR: | |
2694 | return TREE_THIS_VOLATILE (expr); | |
010d0641 | 2695 | |
75a70cf9 | 2696 | case CALL_EXPR: |
2697 | t = get_callee_fndecl (expr); | |
2698 | /* Assume that calls to weak functions may trap. */ | |
7e49f1a1 | 2699 | if (!t || !DECL_P (t)) |
35c15734 | 2700 | return true; |
7e49f1a1 | 2701 | if (DECL_WEAK (t)) |
2702 | return tree_could_trap_p (t); | |
2703 | return false; | |
2704 | ||
2705 | case FUNCTION_DECL: | |
2706 | /* Assume that accesses to weak functions may trap, unless we know | |
2707 | they are certainly defined in current TU or in some other | |
2708 | LTO partition. */ | |
9427cf46 | 2709 | if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr)) |
7e49f1a1 | 2710 | { |
9427cf46 | 2711 | cgraph_node *node = cgraph_node::get (expr); |
2712 | if (node) | |
2713 | node = node->function_symbol (); | |
2714 | return !(node && node->in_other_partition); | |
7e49f1a1 | 2715 | } |
2716 | return false; | |
2717 | ||
2718 | case VAR_DECL: | |
2719 | /* Assume that accesses to weak vars may trap, unless we know | |
2720 | they are certainly defined in current TU or in some other | |
2721 | LTO partition. */ | |
9427cf46 | 2722 | if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr)) |
7e49f1a1 | 2723 | { |
9427cf46 | 2724 | varpool_node *node = varpool_node::get (expr); |
2725 | if (node) | |
2726 | node = node->ultimate_alias_target (); | |
2727 | return !(node && node->in_other_partition); | |
7e49f1a1 | 2728 | } |
35c15734 | 2729 | return false; |
2730 | ||
75a70cf9 | 2731 | default: |
2732 | return false; | |
2733 | } | |
2734 | } | |
35c15734 | 2735 | |
04936b7c | 2736 | /* Return non-NULL if there is an integer operation with trapping overflow |
2737 | we can rewrite into non-trapping. Called via walk_tree from | |
2738 | rewrite_to_non_trapping_overflow. */ | |
2739 | ||
2740 | static tree | |
2741 | find_trapping_overflow (tree *tp, int *walk_subtrees, void *data) | |
2742 | { | |
2743 | if (EXPR_P (*tp) | |
c0ea47cb | 2744 | && ANY_INTEGRAL_TYPE_P (TREE_TYPE (*tp)) |
04936b7c | 2745 | && !operation_no_trapping_overflow (TREE_TYPE (*tp), TREE_CODE (*tp))) |
2746 | return *tp; | |
2747 | if (IS_TYPE_OR_DECL_P (*tp) | |
2748 | || (TREE_CODE (*tp) == SAVE_EXPR && data == NULL)) | |
2749 | *walk_subtrees = 0; | |
2750 | return NULL_TREE; | |
2751 | } | |
2752 | ||
2753 | /* Rewrite selected operations into unsigned arithmetics, so that they | |
2754 | don't trap on overflow. */ | |
2755 | ||
2756 | static tree | |
2757 | replace_trapping_overflow (tree *tp, int *walk_subtrees, void *data) | |
2758 | { | |
2759 | if (find_trapping_overflow (tp, walk_subtrees, data)) | |
2760 | { | |
2761 | tree type = TREE_TYPE (*tp); | |
2762 | tree utype = unsigned_type_for (type); | |
2763 | *walk_subtrees = 0; | |
2764 | int len = TREE_OPERAND_LENGTH (*tp); | |
2765 | for (int i = 0; i < len; ++i) | |
2766 | walk_tree (&TREE_OPERAND (*tp, i), replace_trapping_overflow, | |
2767 | data, (hash_set<tree> *) data); | |
2768 | ||
2769 | if (TREE_CODE (*tp) == ABS_EXPR) | |
2770 | { | |
461f2ec7 | 2771 | TREE_SET_CODE (*tp, ABSU_EXPR); |
2772 | TREE_TYPE (*tp) = utype; | |
2773 | *tp = fold_convert (type, *tp); | |
04936b7c | 2774 | } |
2775 | else | |
2776 | { | |
2777 | TREE_TYPE (*tp) = utype; | |
2778 | len = TREE_OPERAND_LENGTH (*tp); | |
2779 | for (int i = 0; i < len; ++i) | |
2780 | TREE_OPERAND (*tp, i) | |
2781 | = fold_convert (utype, TREE_OPERAND (*tp, i)); | |
2782 | *tp = fold_convert (type, *tp); | |
2783 | } | |
2784 | } | |
2785 | return NULL_TREE; | |
2786 | } | |
2787 | ||
2788 | /* If any subexpression of EXPR can trap due to -ftrapv, rewrite it | |
2789 | using unsigned arithmetics to avoid traps in it. */ | |
2790 | ||
2791 | tree | |
2792 | rewrite_to_non_trapping_overflow (tree expr) | |
2793 | { | |
2794 | if (!flag_trapv) | |
2795 | return expr; | |
2796 | hash_set<tree> pset; | |
2797 | if (!walk_tree (&expr, find_trapping_overflow, &pset, &pset)) | |
2798 | return expr; | |
2799 | expr = unshare_expr (expr); | |
2800 | pset.empty (); | |
2801 | walk_tree (&expr, replace_trapping_overflow, &pset, &pset); | |
2802 | return expr; | |
2803 | } | |
35c15734 | 2804 | |
75a70cf9 | 2805 | /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a |
2806 | an assignment or a conditional) may throw. */ | |
35c15734 | 2807 | |
75a70cf9 | 2808 | static bool |
46b24e8b | 2809 | stmt_could_throw_1_p (gassign *stmt) |
75a70cf9 | 2810 | { |
46b24e8b | 2811 | enum tree_code code = gimple_assign_rhs_code (stmt); |
75a70cf9 | 2812 | bool honor_nans = false; |
2813 | bool honor_snans = false; | |
2814 | bool fp_operation = false; | |
2815 | bool honor_trapv = false; | |
2816 | tree t; | |
2817 | size_t i; | |
2818 | bool handled, ret; | |
db97ad41 | 2819 | |
75a70cf9 | 2820 | if (TREE_CODE_CLASS (code) == tcc_comparison |
2821 | || TREE_CODE_CLASS (code) == tcc_unary | |
143c3c9a | 2822 | || TREE_CODE_CLASS (code) == tcc_binary) |
75a70cf9 | 2823 | { |
46b24e8b | 2824 | if (TREE_CODE_CLASS (code) == tcc_comparison) |
25f48be0 | 2825 | t = TREE_TYPE (gimple_assign_rhs1 (stmt)); |
25f48be0 | 2826 | else |
2827 | t = gimple_expr_type (stmt); | |
75a70cf9 | 2828 | fp_operation = FLOAT_TYPE_P (t); |
2829 | if (fp_operation) | |
2830 | { | |
2831 | honor_nans = flag_trapping_math && !flag_finite_math_only; | |
2832 | honor_snans = flag_signaling_nans != 0; | |
2833 | } | |
2834 | else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t)) | |
2835 | honor_trapv = true; | |
2836 | } | |
2837 | ||
46b24e8b | 2838 | /* First check the LHS. */ |
2839 | if (tree_could_trap_p (gimple_assign_lhs (stmt))) | |
2840 | return true; | |
2841 | ||
75a70cf9 | 2842 | /* Check if the main expression may trap. */ |
75a70cf9 | 2843 | ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv, |
46b24e8b | 2844 | honor_nans, honor_snans, |
2845 | gimple_assign_rhs2 (stmt), | |
75a70cf9 | 2846 | &handled); |
2847 | if (handled) | |
2848 | return ret; | |
2849 | ||
2850 | /* If the expression does not trap, see if any of the individual operands may | |
2851 | trap. */ | |
46b24e8b | 2852 | for (i = 1; i < gimple_num_ops (stmt); i++) |
75a70cf9 | 2853 | if (tree_could_trap_p (gimple_op (stmt, i))) |
2854 | return true; | |
2855 | ||
2856 | return false; | |
2857 | } | |
2858 | ||
2859 | ||
aac19106 | 2860 | /* Return true if statement STMT within FUN could throw an exception. */ |
75a70cf9 | 2861 | |
2862 | bool | |
aac19106 | 2863 | stmt_could_throw_p (function *fun, gimple *stmt) |
75a70cf9 | 2864 | { |
75a70cf9 | 2865 | if (!flag_exceptions) |
2866 | return false; | |
2867 | ||
2868 | /* The only statements that can throw an exception are assignments, | |
e38def9c | 2869 | conditionals, calls, resx, and asms. */ |
2870 | switch (gimple_code (stmt)) | |
2871 | { | |
2872 | case GIMPLE_RESX: | |
2873 | return true; | |
75a70cf9 | 2874 | |
e38def9c | 2875 | case GIMPLE_CALL: |
1a91d914 | 2876 | return !gimple_call_nothrow_p (as_a <gcall *> (stmt)); |
75a70cf9 | 2877 | |
e38def9c | 2878 | case GIMPLE_COND: |
46b24e8b | 2879 | { |
aac19106 | 2880 | if (fun && !fun->can_throw_non_call_exceptions) |
46b24e8b | 2881 | return false; |
2882 | gcond *cond = as_a <gcond *> (stmt); | |
2883 | tree lhs = gimple_cond_lhs (cond); | |
2884 | return operation_could_trap_p (gimple_cond_code (cond), | |
2885 | FLOAT_TYPE_P (TREE_TYPE (lhs)), | |
2886 | false, NULL_TREE); | |
2887 | } | |
2888 | ||
2889 | case GIMPLE_ASSIGN: | |
aac19106 | 2890 | if ((fun && !fun->can_throw_non_call_exceptions) |
46b24e8b | 2891 | || gimple_clobber_p (stmt)) |
e38def9c | 2892 | return false; |
46b24e8b | 2893 | return stmt_could_throw_1_p (as_a <gassign *> (stmt)); |
75a70cf9 | 2894 | |
e38def9c | 2895 | case GIMPLE_ASM: |
aac19106 | 2896 | if (fun && !fun->can_throw_non_call_exceptions) |
e38def9c | 2897 | return false; |
1a91d914 | 2898 | return gimple_asm_volatile_p (as_a <gasm *> (stmt)); |
e38def9c | 2899 | |
2900 | default: | |
2901 | return false; | |
2902 | } | |
4ee9c684 | 2903 | } |
2904 | ||
75a70cf9 | 2905 | |
2906 | /* Return true if expression T could throw an exception. */ | |
2907 | ||
4ee9c684 | 2908 | bool |
2909 | tree_could_throw_p (tree t) | |
2910 | { | |
2911 | if (!flag_exceptions) | |
2912 | return false; | |
75a70cf9 | 2913 | if (TREE_CODE (t) == MODIFY_EXPR) |
4ee9c684 | 2914 | { |
cbeb677e | 2915 | if (cfun->can_throw_non_call_exceptions |
e38def9c | 2916 | && tree_could_trap_p (TREE_OPERAND (t, 0))) |
2917 | return true; | |
75a70cf9 | 2918 | t = TREE_OPERAND (t, 1); |
4ee9c684 | 2919 | } |
2920 | ||
80f06481 | 2921 | if (TREE_CODE (t) == WITH_SIZE_EXPR) |
2922 | t = TREE_OPERAND (t, 0); | |
4ee9c684 | 2923 | if (TREE_CODE (t) == CALL_EXPR) |
2924 | return (call_expr_flags (t) & ECF_NOTHROW) == 0; | |
cbeb677e | 2925 | if (cfun->can_throw_non_call_exceptions) |
3864ad30 | 2926 | return tree_could_trap_p (t); |
4ee9c684 | 2927 | return false; |
2928 | } | |
2929 | ||
aac19106 | 2930 | /* Return true if STMT can throw an exception that is not caught within its |
2931 | function FUN. FUN can be NULL but the function is extra conservative | |
2932 | then. */ | |
b5cebd44 | 2933 | |
2934 | bool | |
aac19106 | 2935 | stmt_can_throw_external (function *fun, gimple *stmt) |
b5cebd44 | 2936 | { |
e38def9c | 2937 | int lp_nr; |
b5cebd44 | 2938 | |
aac19106 | 2939 | if (!stmt_could_throw_p (fun, stmt)) |
b5cebd44 | 2940 | return false; |
aac19106 | 2941 | if (!fun) |
2942 | return true; | |
b5cebd44 | 2943 | |
aac19106 | 2944 | lp_nr = lookup_stmt_eh_lp_fn (fun, stmt); |
e38def9c | 2945 | return lp_nr == 0; |
b5cebd44 | 2946 | } |
75a70cf9 | 2947 | |
aac19106 | 2948 | /* Return true if STMT can throw an exception that is caught within its |
2949 | function FUN. */ | |
75a70cf9 | 2950 | |
4ee9c684 | 2951 | bool |
aac19106 | 2952 | stmt_can_throw_internal (function *fun, gimple *stmt) |
4ee9c684 | 2953 | { |
e38def9c | 2954 | int lp_nr; |
75a70cf9 | 2955 | |
aac19106 | 2956 | gcc_checking_assert (fun); |
2957 | if (!stmt_could_throw_p (fun, stmt)) | |
4ee9c684 | 2958 | return false; |
75a70cf9 | 2959 | |
aac19106 | 2960 | lp_nr = lookup_stmt_eh_lp_fn (fun, stmt); |
e38def9c | 2961 | return lp_nr > 0; |
2962 | } | |
2963 | ||
2964 | /* Given a statement STMT in IFUN, if STMT can no longer throw, then | |
2965 | remove any entry it might have from the EH table. Return true if | |
2966 | any change was made. */ | |
2967 | ||
2968 | bool | |
42acab1c | 2969 | maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt) |
e38def9c | 2970 | { |
aac19106 | 2971 | if (stmt_could_throw_p (ifun, stmt)) |
e38def9c | 2972 | return false; |
2973 | return remove_stmt_from_eh_lp_fn (ifun, stmt); | |
4ee9c684 | 2974 | } |
2975 | ||
e38def9c | 2976 | /* Likewise, but always use the current function. */ |
2977 | ||
2978 | bool | |
42acab1c | 2979 | maybe_clean_eh_stmt (gimple *stmt) |
e38def9c | 2980 | { |
2981 | return maybe_clean_eh_stmt_fn (cfun, stmt); | |
2982 | } | |
4ee9c684 | 2983 | |
4c27dd45 | 2984 | /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced |
2985 | OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT | |
2986 | in the table if it should be in there. Return TRUE if a replacement was | |
2987 | done that my require an EH edge purge. */ | |
2988 | ||
e38def9c | 2989 | bool |
42acab1c | 2990 | maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt) |
35c15734 | 2991 | { |
e38def9c | 2992 | int lp_nr = lookup_stmt_eh_lp (old_stmt); |
4c27dd45 | 2993 | |
e38def9c | 2994 | if (lp_nr != 0) |
4c27dd45 | 2995 | { |
aac19106 | 2996 | bool new_stmt_could_throw = stmt_could_throw_p (cfun, new_stmt); |
4c27dd45 | 2997 | |
2998 | if (new_stmt == old_stmt && new_stmt_could_throw) | |
2999 | return false; | |
3000 | ||
e38def9c | 3001 | remove_stmt_from_eh_lp (old_stmt); |
4c27dd45 | 3002 | if (new_stmt_could_throw) |
3003 | { | |
e38def9c | 3004 | add_stmt_to_eh_lp (new_stmt, lp_nr); |
4c27dd45 | 3005 | return false; |
3006 | } | |
3007 | else | |
3008 | return true; | |
3009 | } | |
3010 | ||
35c15734 | 3011 | return false; |
3012 | } | |
e38def9c | 3013 | |
9d75589a | 3014 | /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT |
e38def9c | 3015 | in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP |
3016 | operand is the return value of duplicate_eh_regions. */ | |
3017 | ||
3018 | bool | |
42acab1c | 3019 | maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt, |
3020 | struct function *old_fun, gimple *old_stmt, | |
06ecf488 | 3021 | hash_map<void *, void *> *map, |
3022 | int default_lp_nr) | |
e38def9c | 3023 | { |
3024 | int old_lp_nr, new_lp_nr; | |
e38def9c | 3025 | |
aac19106 | 3026 | if (!stmt_could_throw_p (new_fun, new_stmt)) |
e38def9c | 3027 | return false; |
3028 | ||
3029 | old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt); | |
3030 | if (old_lp_nr == 0) | |
3031 | { | |
3032 | if (default_lp_nr == 0) | |
3033 | return false; | |
3034 | new_lp_nr = default_lp_nr; | |
3035 | } | |
3036 | else if (old_lp_nr > 0) | |
3037 | { | |
3038 | eh_landing_pad old_lp, new_lp; | |
3039 | ||
f1f41a6c | 3040 | old_lp = (*old_fun->eh->lp_array)[old_lp_nr]; |
06ecf488 | 3041 | new_lp = static_cast<eh_landing_pad> (*map->get (old_lp)); |
e38def9c | 3042 | new_lp_nr = new_lp->index; |
3043 | } | |
3044 | else | |
3045 | { | |
3046 | eh_region old_r, new_r; | |
3047 | ||
f1f41a6c | 3048 | old_r = (*old_fun->eh->region_array)[-old_lp_nr]; |
06ecf488 | 3049 | new_r = static_cast<eh_region> (*map->get (old_r)); |
e38def9c | 3050 | new_lp_nr = -new_r->index; |
3051 | } | |
3052 | ||
3053 | add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr); | |
3054 | return true; | |
3055 | } | |
3056 | ||
3057 | /* Similar, but both OLD_STMT and NEW_STMT are within the current function, | |
3058 | and thus no remapping is required. */ | |
3059 | ||
3060 | bool | |
42acab1c | 3061 | maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt) |
e38def9c | 3062 | { |
3063 | int lp_nr; | |
3064 | ||
aac19106 | 3065 | if (!stmt_could_throw_p (cfun, new_stmt)) |
e38def9c | 3066 | return false; |
3067 | ||
3068 | lp_nr = lookup_stmt_eh_lp (old_stmt); | |
3069 | if (lp_nr == 0) | |
3070 | return false; | |
3071 | ||
3072 | add_stmt_to_eh_lp (new_stmt, lp_nr); | |
3073 | return true; | |
3074 | } | |
4888ab9a | 3075 | \f |
75a70cf9 | 3076 | /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of |
3077 | GIMPLE_TRY) that are similar enough to be considered the same. Currently | |
3078 | this only handles handlers consisting of a single call, as that's the | |
3079 | important case for C++: a destructor call for a particular object showing | |
3080 | up in multiple handlers. */ | |
4888ab9a | 3081 | |
3082 | static bool | |
75a70cf9 | 3083 | same_handler_p (gimple_seq oneh, gimple_seq twoh) |
4888ab9a | 3084 | { |
75a70cf9 | 3085 | gimple_stmt_iterator gsi; |
42acab1c | 3086 | gimple *ones, *twos; |
75a70cf9 | 3087 | unsigned int ai; |
4888ab9a | 3088 | |
75a70cf9 | 3089 | gsi = gsi_start (oneh); |
3090 | if (!gsi_one_before_end_p (gsi)) | |
4888ab9a | 3091 | return false; |
75a70cf9 | 3092 | ones = gsi_stmt (gsi); |
4888ab9a | 3093 | |
75a70cf9 | 3094 | gsi = gsi_start (twoh); |
3095 | if (!gsi_one_before_end_p (gsi)) | |
4888ab9a | 3096 | return false; |
75a70cf9 | 3097 | twos = gsi_stmt (gsi); |
3098 | ||
3099 | if (!is_gimple_call (ones) | |
3100 | || !is_gimple_call (twos) | |
3101 | || gimple_call_lhs (ones) | |
3102 | || gimple_call_lhs (twos) | |
3103 | || gimple_call_chain (ones) | |
3104 | || gimple_call_chain (twos) | |
fb049fba | 3105 | || !gimple_call_same_target_p (ones, twos) |
75a70cf9 | 3106 | || gimple_call_num_args (ones) != gimple_call_num_args (twos)) |
4888ab9a | 3107 | return false; |
3108 | ||
75a70cf9 | 3109 | for (ai = 0; ai < gimple_call_num_args (ones); ++ai) |
3110 | if (!operand_equal_p (gimple_call_arg (ones, ai), | |
e38def9c | 3111 | gimple_call_arg (twos, ai), 0)) |
4888ab9a | 3112 | return false; |
3113 | ||
3114 | return true; | |
3115 | } | |
3116 | ||
3117 | /* Optimize | |
3118 | try { A() } finally { try { ~B() } catch { ~A() } } | |
3119 | try { ... } finally { ~A() } | |
3120 | into | |
3121 | try { A() } catch { ~B() } | |
3122 | try { ~B() ... } finally { ~A() } | |
3123 | ||
3124 | This occurs frequently in C++, where A is a local variable and B is a | |
3125 | temporary used in the initializer for A. */ | |
3126 | ||
3127 | static void | |
1a91d914 | 3128 | optimize_double_finally (gtry *one, gtry *two) |
4888ab9a | 3129 | { |
42acab1c | 3130 | gimple *oneh; |
75a70cf9 | 3131 | gimple_stmt_iterator gsi; |
e3a19533 | 3132 | gimple_seq cleanup; |
4888ab9a | 3133 | |
e3a19533 | 3134 | cleanup = gimple_try_cleanup (one); |
3135 | gsi = gsi_start (cleanup); | |
75a70cf9 | 3136 | if (!gsi_one_before_end_p (gsi)) |
4888ab9a | 3137 | return; |
3138 | ||
75a70cf9 | 3139 | oneh = gsi_stmt (gsi); |
3140 | if (gimple_code (oneh) != GIMPLE_TRY | |
3141 | || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH) | |
4888ab9a | 3142 | return; |
3143 | ||
75a70cf9 | 3144 | if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two))) |
4888ab9a | 3145 | { |
75a70cf9 | 3146 | gimple_seq seq = gimple_try_eval (oneh); |
4888ab9a | 3147 | |
75a70cf9 | 3148 | gimple_try_set_cleanup (one, seq); |
3149 | gimple_try_set_kind (one, GIMPLE_TRY_CATCH); | |
3150 | seq = copy_gimple_seq_and_replace_locals (seq); | |
3151 | gimple_seq_add_seq (&seq, gimple_try_eval (two)); | |
3152 | gimple_try_set_eval (two, seq); | |
4888ab9a | 3153 | } |
3154 | } | |
3155 | ||
3156 | /* Perform EH refactoring optimizations that are simpler to do when code | |
c7684b8e | 3157 | flow has been lowered but EH structures haven't. */ |
4888ab9a | 3158 | |
3159 | static void | |
75a70cf9 | 3160 | refactor_eh_r (gimple_seq seq) |
4888ab9a | 3161 | { |
75a70cf9 | 3162 | gimple_stmt_iterator gsi; |
42acab1c | 3163 | gimple *one, *two; |
4888ab9a | 3164 | |
75a70cf9 | 3165 | one = NULL; |
3166 | two = NULL; | |
3167 | gsi = gsi_start (seq); | |
3168 | while (1) | |
3169 | { | |
3170 | one = two; | |
3171 | if (gsi_end_p (gsi)) | |
3172 | two = NULL; | |
3173 | else | |
3174 | two = gsi_stmt (gsi); | |
1a91d914 | 3175 | if (one && two) |
3176 | if (gtry *try_one = dyn_cast <gtry *> (one)) | |
3177 | if (gtry *try_two = dyn_cast <gtry *> (two)) | |
3178 | if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY | |
3179 | && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY) | |
3180 | optimize_double_finally (try_one, try_two); | |
75a70cf9 | 3181 | if (one) |
3182 | switch (gimple_code (one)) | |
4888ab9a | 3183 | { |
75a70cf9 | 3184 | case GIMPLE_TRY: |
3185 | refactor_eh_r (gimple_try_eval (one)); | |
3186 | refactor_eh_r (gimple_try_cleanup (one)); | |
3187 | break; | |
3188 | case GIMPLE_CATCH: | |
1a91d914 | 3189 | refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one))); |
75a70cf9 | 3190 | break; |
3191 | case GIMPLE_EH_FILTER: | |
3192 | refactor_eh_r (gimple_eh_filter_failure (one)); | |
3193 | break; | |
4c0315d0 | 3194 | case GIMPLE_EH_ELSE: |
1a91d914 | 3195 | { |
3196 | geh_else *eh_else_stmt = as_a <geh_else *> (one); | |
3197 | refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt)); | |
3198 | refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt)); | |
3199 | } | |
4c0315d0 | 3200 | break; |
75a70cf9 | 3201 | default: |
3202 | break; | |
4888ab9a | 3203 | } |
75a70cf9 | 3204 | if (two) |
3205 | gsi_next (&gsi); | |
3206 | else | |
3207 | break; | |
4888ab9a | 3208 | } |
3209 | } | |
3210 | ||
7620bc82 | 3211 | namespace { |
3212 | ||
3213 | const pass_data pass_data_refactor_eh = | |
4888ab9a | 3214 | { |
cbe8bda8 | 3215 | GIMPLE_PASS, /* type */ |
3216 | "ehopt", /* name */ | |
3217 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 3218 | TV_TREE_EH, /* tv_id */ |
3219 | PROP_gimple_lcf, /* properties_required */ | |
3220 | 0, /* properties_provided */ | |
3221 | 0, /* properties_destroyed */ | |
3222 | 0, /* todo_flags_start */ | |
3223 | 0, /* todo_flags_finish */ | |
4888ab9a | 3224 | }; |
cbe8bda8 | 3225 | |
7620bc82 | 3226 | class pass_refactor_eh : public gimple_opt_pass |
cbe8bda8 | 3227 | { |
3228 | public: | |
9af5ce0c | 3229 | pass_refactor_eh (gcc::context *ctxt) |
3230 | : gimple_opt_pass (pass_data_refactor_eh, ctxt) | |
cbe8bda8 | 3231 | {} |
3232 | ||
3233 | /* opt_pass methods: */ | |
31315c24 | 3234 | virtual bool gate (function *) { return flag_exceptions != 0; } |
65b0537f | 3235 | virtual unsigned int execute (function *) |
3236 | { | |
3237 | refactor_eh_r (gimple_body (current_function_decl)); | |
3238 | return 0; | |
3239 | } | |
cbe8bda8 | 3240 | |
3241 | }; // class pass_refactor_eh | |
3242 | ||
7620bc82 | 3243 | } // anon namespace |
3244 | ||
cbe8bda8 | 3245 | gimple_opt_pass * |
3246 | make_pass_refactor_eh (gcc::context *ctxt) | |
3247 | { | |
3248 | return new pass_refactor_eh (ctxt); | |
3249 | } | |
e38def9c | 3250 | \f |
3251 | /* At the end of gimple optimization, we can lower RESX. */ | |
4c5fcca6 | 3252 | |
e38def9c | 3253 | static bool |
1a91d914 | 3254 | lower_resx (basic_block bb, gresx *stmt, |
3255 | hash_map<eh_region, tree> *mnt_map) | |
4c5fcca6 | 3256 | { |
e38def9c | 3257 | int lp_nr; |
3258 | eh_region src_r, dst_r; | |
3259 | gimple_stmt_iterator gsi; | |
42acab1c | 3260 | gimple *x; |
e38def9c | 3261 | tree fn, src_nr; |
3262 | bool ret = false; | |
4c5fcca6 | 3263 | |
e38def9c | 3264 | lp_nr = lookup_stmt_eh_lp (stmt); |
3265 | if (lp_nr != 0) | |
3266 | dst_r = get_eh_region_from_lp_number (lp_nr); | |
3267 | else | |
3268 | dst_r = NULL; | |
4c5fcca6 | 3269 | |
e38def9c | 3270 | src_r = get_eh_region_from_number (gimple_resx_region (stmt)); |
e38def9c | 3271 | gsi = gsi_last_bb (bb); |
4c5fcca6 | 3272 | |
395fc2bb | 3273 | if (src_r == NULL) |
3274 | { | |
3275 | /* We can wind up with no source region when pass_cleanup_eh shows | |
3276 | that there are no entries into an eh region and deletes it, but | |
3277 | then the block that contains the resx isn't removed. This can | |
3278 | happen without optimization when the switch statement created by | |
3279 | lower_try_finally_switch isn't simplified to remove the eh case. | |
3280 | ||
3281 | Resolve this by expanding the resx node to an abort. */ | |
3282 | ||
b9a16870 | 3283 | fn = builtin_decl_implicit (BUILT_IN_TRAP); |
395fc2bb | 3284 | x = gimple_build_call (fn, 0); |
3285 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3286 | ||
3287 | while (EDGE_COUNT (bb->succs) > 0) | |
3288 | remove_edge (EDGE_SUCC (bb, 0)); | |
3289 | } | |
3290 | else if (dst_r) | |
e38def9c | 3291 | { |
3292 | /* When we have a destination region, we resolve this by copying | |
3293 | the excptr and filter values into place, and changing the edge | |
3294 | to immediately after the landing pad. */ | |
3295 | edge e; | |
4c5fcca6 | 3296 | |
e38def9c | 3297 | if (lp_nr < 0) |
3298 | { | |
3299 | basic_block new_bb; | |
e38def9c | 3300 | tree lab; |
3d1eacdb | 3301 | |
e38def9c | 3302 | /* We are resuming into a MUST_NOT_CALL region. Expand a call to |
3303 | the failure decl into a new block, if needed. */ | |
3304 | gcc_assert (dst_r->type == ERT_MUST_NOT_THROW); | |
4c5fcca6 | 3305 | |
06ecf488 | 3306 | tree *slot = mnt_map->get (dst_r); |
e38def9c | 3307 | if (slot == NULL) |
3308 | { | |
3309 | gimple_stmt_iterator gsi2; | |
4c5fcca6 | 3310 | |
e38def9c | 3311 | new_bb = create_empty_bb (bb); |
205ce1aa | 3312 | new_bb->count = bb->count; |
b3083327 | 3313 | add_bb_to_loop (new_bb, bb->loop_father); |
e38def9c | 3314 | lab = gimple_block_label (new_bb); |
3315 | gsi2 = gsi_start_bb (new_bb); | |
4c5fcca6 | 3316 | |
e38def9c | 3317 | fn = dst_r->u.must_not_throw.failure_decl; |
3318 | x = gimple_build_call (fn, 0); | |
3319 | gimple_set_location (x, dst_r->u.must_not_throw.failure_loc); | |
3320 | gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING); | |
3bd82487 | 3321 | |
06ecf488 | 3322 | mnt_map->put (dst_r, lab); |
e38def9c | 3323 | } |
3324 | else | |
3325 | { | |
06ecf488 | 3326 | lab = *slot; |
0fb4f2ce | 3327 | new_bb = label_to_block (cfun, lab); |
e38def9c | 3328 | } |
4c5fcca6 | 3329 | |
e38def9c | 3330 | gcc_assert (EDGE_COUNT (bb->succs) == 0); |
720cfc43 | 3331 | e = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU); |
e38def9c | 3332 | } |
3333 | else | |
3334 | { | |
3335 | edge_iterator ei; | |
bad12c62 | 3336 | tree dst_nr = build_int_cst (integer_type_node, dst_r->index); |
4c5fcca6 | 3337 | |
b9a16870 | 3338 | fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES); |
bad12c62 | 3339 | src_nr = build_int_cst (integer_type_node, src_r->index); |
e38def9c | 3340 | x = gimple_build_call (fn, 2, dst_nr, src_nr); |
3341 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
4c5fcca6 | 3342 | |
e38def9c | 3343 | /* Update the flags for the outgoing edge. */ |
3344 | e = single_succ_edge (bb); | |
3345 | gcc_assert (e->flags & EDGE_EH); | |
3346 | e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU; | |
720cfc43 | 3347 | e->probability = profile_probability::always (); |
4c5fcca6 | 3348 | |
e38def9c | 3349 | /* If there are no more EH users of the landing pad, delete it. */ |
3350 | FOR_EACH_EDGE (e, ei, e->dest->preds) | |
3351 | if (e->flags & EDGE_EH) | |
3352 | break; | |
3353 | if (e == NULL) | |
3354 | { | |
3355 | eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr); | |
3356 | remove_eh_landing_pad (lp); | |
3357 | } | |
3358 | } | |
4c5fcca6 | 3359 | |
e38def9c | 3360 | ret = true; |
3361 | } | |
3362 | else | |
3363 | { | |
3364 | tree var; | |
4c5fcca6 | 3365 | |
e38def9c | 3366 | /* When we don't have a destination region, this exception escapes |
3367 | up the call chain. We resolve this by generating a call to the | |
3368 | _Unwind_Resume library function. */ | |
4c5fcca6 | 3369 | |
471eff36 | 3370 | /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup |
61b3ef70 | 3371 | with no arguments for C++. Check for that. */ |
471eff36 | 3372 | if (src_r->use_cxa_end_cleanup) |
3373 | { | |
b9a16870 | 3374 | fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP); |
471eff36 | 3375 | x = gimple_build_call (fn, 0); |
3376 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3377 | } | |
3378 | else | |
3bd82487 | 3379 | { |
b9a16870 | 3380 | fn = builtin_decl_implicit (BUILT_IN_EH_POINTER); |
bad12c62 | 3381 | src_nr = build_int_cst (integer_type_node, src_r->index); |
e38def9c | 3382 | x = gimple_build_call (fn, 1, src_nr); |
f9e245b2 | 3383 | var = create_tmp_var (ptr_type_node); |
e38def9c | 3384 | var = make_ssa_name (var, x); |
3385 | gimple_call_set_lhs (x, var); | |
3386 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3387 | ||
5b64e274 | 3388 | /* When exception handling is delegated to a caller function, we |
3389 | have to guarantee that shadow memory variables living on stack | |
3390 | will be cleaner before control is given to a parent function. */ | |
3391 | if (sanitize_flags_p (SANITIZE_ADDRESS)) | |
3392 | { | |
3393 | tree decl | |
3394 | = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN); | |
3395 | gimple *g = gimple_build_call (decl, 0); | |
3396 | gimple_set_location (g, gimple_location (stmt)); | |
3397 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); | |
3398 | } | |
3399 | ||
b9a16870 | 3400 | fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME); |
e38def9c | 3401 | x = gimple_build_call (fn, 1, var); |
3402 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3bd82487 | 3403 | } |
4c5fcca6 | 3404 | |
e38def9c | 3405 | gcc_assert (EDGE_COUNT (bb->succs) == 0); |
3bd82487 | 3406 | } |
3d1eacdb | 3407 | |
e38def9c | 3408 | gsi_remove (&gsi, true); |
3409 | ||
3410 | return ret; | |
3bd82487 | 3411 | } |
3412 | ||
7620bc82 | 3413 | namespace { |
3414 | ||
3415 | const pass_data pass_data_lower_resx = | |
3bd82487 | 3416 | { |
cbe8bda8 | 3417 | GIMPLE_PASS, /* type */ |
3418 | "resx", /* name */ | |
3419 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 3420 | TV_TREE_EH, /* tv_id */ |
3421 | PROP_gimple_lcf, /* properties_required */ | |
3422 | 0, /* properties_provided */ | |
3423 | 0, /* properties_destroyed */ | |
3424 | 0, /* todo_flags_start */ | |
8b88439e | 3425 | 0, /* todo_flags_finish */ |
3bd82487 | 3426 | }; |
3427 | ||
7620bc82 | 3428 | class pass_lower_resx : public gimple_opt_pass |
cbe8bda8 | 3429 | { |
3430 | public: | |
9af5ce0c | 3431 | pass_lower_resx (gcc::context *ctxt) |
3432 | : gimple_opt_pass (pass_data_lower_resx, ctxt) | |
cbe8bda8 | 3433 | {} |
3434 | ||
3435 | /* opt_pass methods: */ | |
31315c24 | 3436 | virtual bool gate (function *) { return flag_exceptions != 0; } |
65b0537f | 3437 | virtual unsigned int execute (function *); |
cbe8bda8 | 3438 | |
3439 | }; // class pass_lower_resx | |
3440 | ||
65b0537f | 3441 | unsigned |
3442 | pass_lower_resx::execute (function *fun) | |
e38def9c | 3443 | { |
3444 | basic_block bb; | |
e38def9c | 3445 | bool dominance_invalidated = false; |
3446 | bool any_rewritten = false; | |
3bd82487 | 3447 | |
06ecf488 | 3448 | hash_map<eh_region, tree> mnt_map; |
3bd82487 | 3449 | |
65b0537f | 3450 | FOR_EACH_BB_FN (bb, fun) |
e38def9c | 3451 | { |
42acab1c | 3452 | gimple *last = last_stmt (bb); |
e38def9c | 3453 | if (last && is_gimple_resx (last)) |
3454 | { | |
1a91d914 | 3455 | dominance_invalidated |= |
3456 | lower_resx (bb, as_a <gresx *> (last), &mnt_map); | |
e38def9c | 3457 | any_rewritten = true; |
3458 | } | |
3459 | } | |
3460 | ||
e38def9c | 3461 | if (dominance_invalidated) |
3462 | { | |
3463 | free_dominance_info (CDI_DOMINATORS); | |
3464 | free_dominance_info (CDI_POST_DOMINATORS); | |
3bd82487 | 3465 | } |
4c5fcca6 | 3466 | |
e38def9c | 3467 | return any_rewritten ? TODO_update_ssa_only_virtuals : 0; |
3468 | } | |
4c5fcca6 | 3469 | |
7620bc82 | 3470 | } // anon namespace |
3471 | ||
cbe8bda8 | 3472 | gimple_opt_pass * |
3473 | make_pass_lower_resx (gcc::context *ctxt) | |
3474 | { | |
3475 | return new pass_lower_resx (ctxt); | |
3476 | } | |
3477 | ||
1227a337 | 3478 | /* Try to optimize var = {v} {CLOBBER} stmts followed just by |
3479 | external throw. */ | |
3480 | ||
3481 | static void | |
3482 | optimize_clobbers (basic_block bb) | |
3483 | { | |
3484 | gimple_stmt_iterator gsi = gsi_last_bb (bb); | |
896a0c42 | 3485 | bool any_clobbers = false; |
3486 | bool seen_stack_restore = false; | |
3487 | edge_iterator ei; | |
3488 | edge e; | |
3489 | ||
3490 | /* Only optimize anything if the bb contains at least one clobber, | |
3491 | ends with resx (checked by caller), optionally contains some | |
3492 | debug stmts or labels, or at most one __builtin_stack_restore | |
3493 | call, and has an incoming EH edge. */ | |
d1d905ee | 3494 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) |
1227a337 | 3495 | { |
42acab1c | 3496 | gimple *stmt = gsi_stmt (gsi); |
1227a337 | 3497 | if (is_gimple_debug (stmt)) |
d1d905ee | 3498 | continue; |
896a0c42 | 3499 | if (gimple_clobber_p (stmt)) |
3500 | { | |
3501 | any_clobbers = true; | |
3502 | continue; | |
3503 | } | |
3504 | if (!seen_stack_restore | |
3505 | && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE)) | |
3506 | { | |
3507 | seen_stack_restore = true; | |
3508 | continue; | |
3509 | } | |
3510 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3511 | break; | |
3512 | return; | |
3513 | } | |
3514 | if (!any_clobbers) | |
3515 | return; | |
3516 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3517 | if (e->flags & EDGE_EH) | |
3518 | break; | |
3519 | if (e == NULL) | |
3520 | return; | |
3521 | gsi = gsi_last_bb (bb); | |
3522 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3523 | { | |
42acab1c | 3524 | gimple *stmt = gsi_stmt (gsi); |
896a0c42 | 3525 | if (!gimple_clobber_p (stmt)) |
3526 | continue; | |
1227a337 | 3527 | unlink_stmt_vdef (stmt); |
3528 | gsi_remove (&gsi, true); | |
3529 | release_defs (stmt); | |
3530 | } | |
3531 | } | |
e38def9c | 3532 | |
07428872 | 3533 | /* Try to sink var = {v} {CLOBBER} stmts followed just by |
3534 | internal throw to successor BB. */ | |
3535 | ||
3536 | static int | |
3537 | sink_clobbers (basic_block bb) | |
3538 | { | |
3539 | edge e; | |
3540 | edge_iterator ei; | |
3541 | gimple_stmt_iterator gsi, dgsi; | |
3542 | basic_block succbb; | |
3543 | bool any_clobbers = false; | |
8aacb2c5 | 3544 | unsigned todo = 0; |
07428872 | 3545 | |
3546 | /* Only optimize if BB has a single EH successor and | |
3547 | all predecessor edges are EH too. */ | |
3548 | if (!single_succ_p (bb) | |
3549 | || (single_succ_edge (bb)->flags & EDGE_EH) == 0) | |
3550 | return 0; | |
3551 | ||
3552 | FOR_EACH_EDGE (e, ei, bb->preds) | |
3553 | { | |
3554 | if ((e->flags & EDGE_EH) == 0) | |
3555 | return 0; | |
3556 | } | |
3557 | ||
3558 | /* And BB contains only CLOBBER stmts before the final | |
3559 | RESX. */ | |
3560 | gsi = gsi_last_bb (bb); | |
3561 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3562 | { | |
42acab1c | 3563 | gimple *stmt = gsi_stmt (gsi); |
07428872 | 3564 | if (is_gimple_debug (stmt)) |
3565 | continue; | |
3566 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3567 | break; | |
896a0c42 | 3568 | if (!gimple_clobber_p (stmt)) |
07428872 | 3569 | return 0; |
3570 | any_clobbers = true; | |
3571 | } | |
3572 | if (!any_clobbers) | |
3573 | return 0; | |
3574 | ||
0ba38440 | 3575 | edge succe = single_succ_edge (bb); |
3576 | succbb = succe->dest; | |
3577 | ||
3578 | /* See if there is a virtual PHI node to take an updated virtual | |
3579 | operand from. */ | |
1a91d914 | 3580 | gphi *vphi = NULL; |
0ba38440 | 3581 | tree vuse = NULL_TREE; |
1a91d914 | 3582 | for (gphi_iterator gpi = gsi_start_phis (succbb); |
3583 | !gsi_end_p (gpi); gsi_next (&gpi)) | |
0ba38440 | 3584 | { |
1a91d914 | 3585 | tree res = gimple_phi_result (gpi.phi ()); |
0ba38440 | 3586 | if (virtual_operand_p (res)) |
3587 | { | |
1a91d914 | 3588 | vphi = gpi.phi (); |
0ba38440 | 3589 | vuse = res; |
3590 | break; | |
3591 | } | |
3592 | } | |
3593 | ||
07428872 | 3594 | dgsi = gsi_after_labels (succbb); |
3595 | gsi = gsi_last_bb (bb); | |
3596 | for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) | |
3597 | { | |
42acab1c | 3598 | gimple *stmt = gsi_stmt (gsi); |
896a0c42 | 3599 | tree lhs; |
07428872 | 3600 | if (is_gimple_debug (stmt)) |
3601 | continue; | |
3602 | if (gimple_code (stmt) == GIMPLE_LABEL) | |
3603 | break; | |
896a0c42 | 3604 | lhs = gimple_assign_lhs (stmt); |
3605 | /* Unfortunately we don't have dominance info updated at this | |
3606 | point, so checking if | |
3607 | dominated_by_p (CDI_DOMINATORS, succbb, | |
3608 | gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0))) | |
3609 | would be too costly. Thus, avoid sinking any clobbers that | |
3610 | refer to non-(D) SSA_NAMEs. */ | |
3611 | if (TREE_CODE (lhs) == MEM_REF | |
3612 | && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME | |
3613 | && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))) | |
3614 | { | |
0ba38440 | 3615 | unlink_stmt_vdef (stmt); |
896a0c42 | 3616 | gsi_remove (&gsi, true); |
3617 | release_defs (stmt); | |
3618 | continue; | |
3619 | } | |
0ba38440 | 3620 | |
3621 | /* As we do not change stmt order when sinking across a | |
3622 | forwarder edge we can keep virtual operands in place. */ | |
07428872 | 3623 | gsi_remove (&gsi, false); |
0ba38440 | 3624 | gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT); |
3625 | ||
3626 | /* But adjust virtual operands if we sunk across a PHI node. */ | |
3627 | if (vuse) | |
3628 | { | |
42acab1c | 3629 | gimple *use_stmt; |
0ba38440 | 3630 | imm_use_iterator iter; |
3631 | use_operand_p use_p; | |
3632 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse) | |
3633 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
3634 | SET_USE (use_p, gimple_vdef (stmt)); | |
587a19f1 | 3635 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse)) |
3636 | { | |
3637 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1; | |
3638 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0; | |
3639 | } | |
0ba38440 | 3640 | /* Adjust the incoming virtual operand. */ |
3641 | SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt)); | |
3642 | SET_USE (gimple_vuse_op (stmt), vuse); | |
3643 | } | |
8aacb2c5 | 3644 | /* If there isn't a single predecessor but no virtual PHI node |
3645 | arrange for virtual operands to be renamed. */ | |
3646 | else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P | |
3647 | && !single_pred_p (succbb)) | |
3648 | { | |
3649 | /* In this case there will be no use of the VDEF of this stmt. | |
3650 | ??? Unless this is a secondary opportunity and we have not | |
3651 | removed unreachable blocks yet, so we cannot assert this. | |
3652 | Which also means we will end up renaming too many times. */ | |
3653 | SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun)); | |
3654 | mark_virtual_operands_for_renaming (cfun); | |
3655 | todo |= TODO_update_ssa_only_virtuals; | |
3656 | } | |
07428872 | 3657 | } |
3658 | ||
8aacb2c5 | 3659 | return todo; |
07428872 | 3660 | } |
3661 | ||
778f5bdd | 3662 | /* At the end of inlining, we can lower EH_DISPATCH. Return true when |
3663 | we have found some duplicate labels and removed some edges. */ | |
3bd82487 | 3664 | |
778f5bdd | 3665 | static bool |
1a91d914 | 3666 | lower_eh_dispatch (basic_block src, geh_dispatch *stmt) |
3bd82487 | 3667 | { |
e38def9c | 3668 | gimple_stmt_iterator gsi; |
3669 | int region_nr; | |
3670 | eh_region r; | |
3671 | tree filter, fn; | |
42acab1c | 3672 | gimple *x; |
778f5bdd | 3673 | bool redirected = false; |
3bd82487 | 3674 | |
e38def9c | 3675 | region_nr = gimple_eh_dispatch_region (stmt); |
3676 | r = get_eh_region_from_number (region_nr); | |
3bd82487 | 3677 | |
e38def9c | 3678 | gsi = gsi_last_bb (src); |
3bd82487 | 3679 | |
e38def9c | 3680 | switch (r->type) |
3bd82487 | 3681 | { |
e38def9c | 3682 | case ERT_TRY: |
3683 | { | |
c2078b80 | 3684 | auto_vec<tree> labels; |
e38def9c | 3685 | tree default_label = NULL; |
3686 | eh_catch c; | |
3687 | edge_iterator ei; | |
3688 | edge e; | |
431205b7 | 3689 | hash_set<tree> seen_values; |
e38def9c | 3690 | |
3691 | /* Collect the labels for a switch. Zero the post_landing_pad | |
3692 | field becase we'll no longer have anything keeping these labels | |
9d75589a | 3693 | in existence and the optimizer will be free to merge these |
e38def9c | 3694 | blocks at will. */ |
3695 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
3696 | { | |
3697 | tree tp_node, flt_node, lab = c->label; | |
778f5bdd | 3698 | bool have_label = false; |
3bd82487 | 3699 | |
e38def9c | 3700 | c->label = NULL; |
3701 | tp_node = c->type_list; | |
3702 | flt_node = c->filter_list; | |
3703 | ||
3704 | if (tp_node == NULL) | |
3705 | { | |
3706 | default_label = lab; | |
3707 | break; | |
3708 | } | |
3709 | do | |
3710 | { | |
778f5bdd | 3711 | /* Filter out duplicate labels that arise when this handler |
3712 | is shadowed by an earlier one. When no labels are | |
3713 | attached to the handler anymore, we remove | |
3714 | the corresponding edge and then we delete unreachable | |
3715 | blocks at the end of this pass. */ | |
431205b7 | 3716 | if (! seen_values.contains (TREE_VALUE (flt_node))) |
778f5bdd | 3717 | { |
b6e3dd65 | 3718 | tree t = build_case_label (TREE_VALUE (flt_node), |
3719 | NULL, lab); | |
f1f41a6c | 3720 | labels.safe_push (t); |
431205b7 | 3721 | seen_values.add (TREE_VALUE (flt_node)); |
778f5bdd | 3722 | have_label = true; |
3723 | } | |
e38def9c | 3724 | |
3725 | tp_node = TREE_CHAIN (tp_node); | |
3726 | flt_node = TREE_CHAIN (flt_node); | |
3727 | } | |
3728 | while (tp_node); | |
778f5bdd | 3729 | if (! have_label) |
3730 | { | |
0fb4f2ce | 3731 | remove_edge (find_edge (src, label_to_block (cfun, lab))); |
778f5bdd | 3732 | redirected = true; |
3733 | } | |
e38def9c | 3734 | } |
3735 | ||
3736 | /* Clean up the edge flags. */ | |
3737 | FOR_EACH_EDGE (e, ei, src->succs) | |
3738 | { | |
3739 | if (e->flags & EDGE_FALLTHRU) | |
3740 | { | |
3741 | /* If there was no catch-all, use the fallthru edge. */ | |
3742 | if (default_label == NULL) | |
3743 | default_label = gimple_block_label (e->dest); | |
3744 | e->flags &= ~EDGE_FALLTHRU; | |
3745 | } | |
3746 | } | |
3747 | gcc_assert (default_label != NULL); | |
3748 | ||
3749 | /* Don't generate a switch if there's only a default case. | |
3750 | This is common in the form of try { A; } catch (...) { B; }. */ | |
f1f41a6c | 3751 | if (!labels.exists ()) |
e38def9c | 3752 | { |
3753 | e = single_succ_edge (src); | |
3754 | e->flags |= EDGE_FALLTHRU; | |
3755 | } | |
3756 | else | |
3757 | { | |
b9a16870 | 3758 | fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
bad12c62 | 3759 | x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3760 | region_nr)); | |
f9e245b2 | 3761 | filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); |
e38def9c | 3762 | filter = make_ssa_name (filter, x); |
3763 | gimple_call_set_lhs (x, filter); | |
fe02bede | 3764 | gimple_set_location (x, gimple_location (stmt)); |
e38def9c | 3765 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3766 | ||
3767 | /* Turn the default label into a default case. */ | |
b6e3dd65 | 3768 | default_label = build_case_label (NULL, NULL, default_label); |
e38def9c | 3769 | sort_case_labels (labels); |
3770 | ||
49a70175 | 3771 | x = gimple_build_switch (filter, default_label, labels); |
fe02bede | 3772 | gimple_set_location (x, gimple_location (stmt)); |
e38def9c | 3773 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
e38def9c | 3774 | } |
3775 | } | |
3776 | break; | |
3777 | ||
3778 | case ERT_ALLOWED_EXCEPTIONS: | |
3779 | { | |
3780 | edge b_e = BRANCH_EDGE (src); | |
3781 | edge f_e = FALLTHRU_EDGE (src); | |
3782 | ||
b9a16870 | 3783 | fn = builtin_decl_implicit (BUILT_IN_EH_FILTER); |
bad12c62 | 3784 | x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, |
3785 | region_nr)); | |
f9e245b2 | 3786 | filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); |
e38def9c | 3787 | filter = make_ssa_name (filter, x); |
3788 | gimple_call_set_lhs (x, filter); | |
fe02bede | 3789 | gimple_set_location (x, gimple_location (stmt)); |
e38def9c | 3790 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3791 | ||
3792 | r->u.allowed.label = NULL; | |
3793 | x = gimple_build_cond (EQ_EXPR, filter, | |
3794 | build_int_cst (TREE_TYPE (filter), | |
3795 | r->u.allowed.filter), | |
3796 | NULL_TREE, NULL_TREE); | |
3797 | gsi_insert_before (&gsi, x, GSI_SAME_STMT); | |
3798 | ||
3799 | b_e->flags = b_e->flags | EDGE_TRUE_VALUE; | |
3800 | f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE; | |
3801 | } | |
3802 | break; | |
3803 | ||
3804 | default: | |
3805 | gcc_unreachable (); | |
3bd82487 | 3806 | } |
e38def9c | 3807 | |
3808 | /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */ | |
3809 | gsi_remove (&gsi, true); | |
778f5bdd | 3810 | return redirected; |
3bd82487 | 3811 | } |
3812 | ||
7620bc82 | 3813 | namespace { |
3814 | ||
3815 | const pass_data pass_data_lower_eh_dispatch = | |
65b0537f | 3816 | { |
3817 | GIMPLE_PASS, /* type */ | |
3818 | "ehdisp", /* name */ | |
3819 | OPTGROUP_NONE, /* optinfo_flags */ | |
65b0537f | 3820 | TV_TREE_EH, /* tv_id */ |
3821 | PROP_gimple_lcf, /* properties_required */ | |
3822 | 0, /* properties_provided */ | |
3823 | 0, /* properties_destroyed */ | |
3824 | 0, /* todo_flags_start */ | |
8b88439e | 3825 | 0, /* todo_flags_finish */ |
65b0537f | 3826 | }; |
3827 | ||
7620bc82 | 3828 | class pass_lower_eh_dispatch : public gimple_opt_pass |
65b0537f | 3829 | { |
3830 | public: | |
3831 | pass_lower_eh_dispatch (gcc::context *ctxt) | |
3832 | : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt) | |
3833 | {} | |
3834 | ||
3835 | /* opt_pass methods: */ | |
3836 | virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; } | |
3837 | virtual unsigned int execute (function *); | |
3838 | ||
3839 | }; // class pass_lower_eh_dispatch | |
3840 | ||
3841 | unsigned | |
3842 | pass_lower_eh_dispatch::execute (function *fun) | |
e38def9c | 3843 | { |
3844 | basic_block bb; | |
07428872 | 3845 | int flags = 0; |
778f5bdd | 3846 | bool redirected = false; |
3bd82487 | 3847 | |
e38def9c | 3848 | assign_filter_values (); |
3d1eacdb | 3849 | |
65b0537f | 3850 | FOR_EACH_BB_FN (bb, fun) |
e38def9c | 3851 | { |
42acab1c | 3852 | gimple *last = last_stmt (bb); |
1227a337 | 3853 | if (last == NULL) |
3854 | continue; | |
3855 | if (gimple_code (last) == GIMPLE_EH_DISPATCH) | |
e38def9c | 3856 | { |
1a91d914 | 3857 | redirected |= lower_eh_dispatch (bb, |
3858 | as_a <geh_dispatch *> (last)); | |
07428872 | 3859 | flags |= TODO_update_ssa_only_virtuals; |
3860 | } | |
3861 | else if (gimple_code (last) == GIMPLE_RESX) | |
3862 | { | |
aac19106 | 3863 | if (stmt_can_throw_external (cfun, last)) |
07428872 | 3864 | optimize_clobbers (bb); |
3865 | else | |
3866 | flags |= sink_clobbers (bb); | |
e38def9c | 3867 | } |
3868 | } | |
3869 | ||
778f5bdd | 3870 | if (redirected) |
3a2c20a1 | 3871 | { |
3872 | free_dominance_info (CDI_DOMINATORS); | |
3873 | delete_unreachable_blocks (); | |
3874 | } | |
07428872 | 3875 | return flags; |
e38def9c | 3876 | } |
3877 | ||
7620bc82 | 3878 | } // anon namespace |
3879 | ||
cbe8bda8 | 3880 | gimple_opt_pass * |
3881 | make_pass_lower_eh_dispatch (gcc::context *ctxt) | |
3882 | { | |
3883 | return new pass_lower_eh_dispatch (ctxt); | |
3884 | } | |
e38def9c | 3885 | \f |
390f4a4b | 3886 | /* Walk statements, see what regions and, optionally, landing pads |
3887 | are really referenced. | |
3888 | ||
3889 | Returns in R_REACHABLEP an sbitmap with bits set for reachable regions, | |
3890 | and in LP_REACHABLE an sbitmap with bits set for reachable landing pads. | |
3891 | ||
3892 | Passing NULL for LP_REACHABLE is valid, in this case only reachable | |
3893 | regions are marked. | |
3894 | ||
3895 | The caller is responsible for freeing the returned sbitmaps. */ | |
e38def9c | 3896 | |
3897 | static void | |
390f4a4b | 3898 | mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep) |
e38def9c | 3899 | { |
3900 | sbitmap r_reachable, lp_reachable; | |
e38def9c | 3901 | basic_block bb; |
390f4a4b | 3902 | bool mark_landing_pads = (lp_reachablep != NULL); |
3903 | gcc_checking_assert (r_reachablep != NULL); | |
3bd82487 | 3904 | |
f1f41a6c | 3905 | r_reachable = sbitmap_alloc (cfun->eh->region_array->length ()); |
53c5d9d4 | 3906 | bitmap_clear (r_reachable); |
390f4a4b | 3907 | *r_reachablep = r_reachable; |
3908 | ||
3909 | if (mark_landing_pads) | |
3910 | { | |
3911 | lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ()); | |
3912 | bitmap_clear (lp_reachable); | |
3913 | *lp_reachablep = lp_reachable; | |
3914 | } | |
3915 | else | |
3916 | lp_reachable = NULL; | |
3bd82487 | 3917 | |
fc00614f | 3918 | FOR_EACH_BB_FN (bb, cfun) |
3bd82487 | 3919 | { |
d0ac3b8a | 3920 | gimple_stmt_iterator gsi; |
e38def9c | 3921 | |
3922 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
3923 | { | |
42acab1c | 3924 | gimple *stmt = gsi_stmt (gsi); |
e38def9c | 3925 | |
390f4a4b | 3926 | if (mark_landing_pads) |
e38def9c | 3927 | { |
390f4a4b | 3928 | int lp_nr = lookup_stmt_eh_lp (stmt); |
3929 | ||
3930 | /* Negative LP numbers are MUST_NOT_THROW regions which | |
3931 | are not considered BB enders. */ | |
3932 | if (lp_nr < 0) | |
3933 | bitmap_set_bit (r_reachable, -lp_nr); | |
3934 | ||
3935 | /* Positive LP numbers are real landing pads, and BB enders. */ | |
3936 | else if (lp_nr > 0) | |
3937 | { | |
3938 | gcc_assert (gsi_one_before_end_p (gsi)); | |
3939 | eh_region region = get_eh_region_from_lp_number (lp_nr); | |
3940 | bitmap_set_bit (r_reachable, region->index); | |
3941 | bitmap_set_bit (lp_reachable, lp_nr); | |
3942 | } | |
e38def9c | 3943 | } |
4e392ca1 | 3944 | |
3945 | /* Avoid removing regions referenced from RESX/EH_DISPATCH. */ | |
3946 | switch (gimple_code (stmt)) | |
3947 | { | |
3948 | case GIMPLE_RESX: | |
1a91d914 | 3949 | bitmap_set_bit (r_reachable, |
3950 | gimple_resx_region (as_a <gresx *> (stmt))); | |
4e392ca1 | 3951 | break; |
3952 | case GIMPLE_EH_DISPATCH: | |
1a91d914 | 3953 | bitmap_set_bit (r_reachable, |
3954 | gimple_eh_dispatch_region ( | |
3955 | as_a <geh_dispatch *> (stmt))); | |
4e392ca1 | 3956 | break; |
9eb0aee3 | 3957 | case GIMPLE_CALL: |
3958 | if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES)) | |
3959 | for (int i = 0; i < 2; ++i) | |
3960 | { | |
3961 | tree rt = gimple_call_arg (stmt, i); | |
3962 | HOST_WIDE_INT ri = tree_to_shwi (rt); | |
3963 | ||
dd0b0596 | 3964 | gcc_assert (ri == (int)ri); |
9eb0aee3 | 3965 | bitmap_set_bit (r_reachable, ri); |
3966 | } | |
3967 | break; | |
4e392ca1 | 3968 | default: |
3969 | break; | |
3970 | } | |
e38def9c | 3971 | } |
3bd82487 | 3972 | } |
390f4a4b | 3973 | } |
3974 | ||
3975 | /* Remove unreachable handlers and unreachable landing pads. */ | |
3976 | ||
3977 | static void | |
3978 | remove_unreachable_handlers (void) | |
3979 | { | |
3980 | sbitmap r_reachable, lp_reachable; | |
3981 | eh_region region; | |
3982 | eh_landing_pad lp; | |
3983 | unsigned i; | |
3984 | ||
3985 | mark_reachable_handlers (&r_reachable, &lp_reachable); | |
e38def9c | 3986 | |
3987 | if (dump_file) | |
3bd82487 | 3988 | { |
e38def9c | 3989 | fprintf (dump_file, "Before removal of unreachable regions:\n"); |
3990 | dump_eh_tree (dump_file, cfun); | |
3991 | fprintf (dump_file, "Reachable regions: "); | |
53c5d9d4 | 3992 | dump_bitmap_file (dump_file, r_reachable); |
e38def9c | 3993 | fprintf (dump_file, "Reachable landing pads: "); |
53c5d9d4 | 3994 | dump_bitmap_file (dump_file, lp_reachable); |
3bd82487 | 3995 | } |
3996 | ||
390f4a4b | 3997 | if (dump_file) |
3998 | { | |
3999 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region) | |
4000 | if (region && !bitmap_bit_p (r_reachable, region->index)) | |
4001 | fprintf (dump_file, | |
4002 | "Removing unreachable region %d\n", | |
4003 | region->index); | |
4004 | } | |
4005 | ||
4006 | remove_unreachable_eh_regions (r_reachable); | |
3bd82487 | 4007 | |
390f4a4b | 4008 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp) |
4009 | if (lp && !bitmap_bit_p (lp_reachable, lp->index)) | |
e38def9c | 4010 | { |
4011 | if (dump_file) | |
390f4a4b | 4012 | fprintf (dump_file, |
4013 | "Removing unreachable landing pad %d\n", | |
4014 | lp->index); | |
e38def9c | 4015 | remove_eh_landing_pad (lp); |
4016 | } | |
48e1416a | 4017 | |
e38def9c | 4018 | if (dump_file) |
3bd82487 | 4019 | { |
e38def9c | 4020 | fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n"); |
4021 | dump_eh_tree (dump_file, cfun); | |
4022 | fprintf (dump_file, "\n\n"); | |
3bd82487 | 4023 | } |
4024 | ||
e38def9c | 4025 | sbitmap_free (r_reachable); |
4026 | sbitmap_free (lp_reachable); | |
4027 | ||
382ecba7 | 4028 | if (flag_checking) |
4029 | verify_eh_tree (cfun); | |
e38def9c | 4030 | } |
4031 | ||
b00b0dc4 | 4032 | /* Remove unreachable handlers if any landing pads have been removed after |
4033 | last ehcleanup pass (due to gimple_purge_dead_eh_edges). */ | |
4034 | ||
4035 | void | |
4036 | maybe_remove_unreachable_handlers (void) | |
4037 | { | |
4038 | eh_landing_pad lp; | |
390f4a4b | 4039 | unsigned i; |
b00b0dc4 | 4040 | |
4041 | if (cfun->eh == NULL) | |
4042 | return; | |
390f4a4b | 4043 | |
4044 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp) | |
b00b0dc4 | 4045 | if (lp && lp->post_landing_pad) |
4046 | { | |
0fb4f2ce | 4047 | if (label_to_block (cfun, lp->post_landing_pad) == NULL) |
b00b0dc4 | 4048 | { |
4049 | remove_unreachable_handlers (); | |
4050 | return; | |
4051 | } | |
4052 | } | |
4053 | } | |
4054 | ||
e38def9c | 4055 | /* Remove regions that do not have landing pads. This assumes |
4056 | that remove_unreachable_handlers has already been run, and | |
390f4a4b | 4057 | that we've just manipulated the landing pads since then. |
4058 | ||
4059 | Preserve regions with landing pads and regions that prevent | |
4060 | exceptions from propagating further, even if these regions | |
4061 | are not reachable. */ | |
e38def9c | 4062 | |
4063 | static void | |
4064 | remove_unreachable_handlers_no_lp (void) | |
4065 | { | |
390f4a4b | 4066 | eh_region region; |
4b393c71 | 4067 | sbitmap r_reachable; |
390f4a4b | 4068 | unsigned i; |
4b393c71 | 4069 | |
390f4a4b | 4070 | mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL); |
4b393c71 | 4071 | |
390f4a4b | 4072 | FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region) |
4b393c71 | 4073 | { |
390f4a4b | 4074 | if (! region) |
4075 | continue; | |
4076 | ||
4077 | if (region->landing_pads != NULL | |
4078 | || region->type == ERT_MUST_NOT_THROW) | |
4079 | bitmap_set_bit (r_reachable, region->index); | |
4080 | ||
4081 | if (dump_file | |
4082 | && !bitmap_bit_p (r_reachable, region->index)) | |
4083 | fprintf (dump_file, | |
4084 | "Removing unreachable region %d\n", | |
4085 | region->index); | |
4b393c71 | 4086 | } |
e38def9c | 4087 | |
390f4a4b | 4088 | remove_unreachable_eh_regions (r_reachable); |
4b393c71 | 4089 | |
4090 | sbitmap_free (r_reachable); | |
3bd82487 | 4091 | } |
4092 | ||
e38def9c | 4093 | /* Undo critical edge splitting on an EH landing pad. Earlier, we |
4094 | optimisticaly split all sorts of edges, including EH edges. The | |
4095 | optimization passes in between may not have needed them; if not, | |
4096 | we should undo the split. | |
4097 | ||
4098 | Recognize this case by having one EH edge incoming to the BB and | |
4099 | one normal edge outgoing; BB should be empty apart from the | |
4100 | post_landing_pad label. | |
4101 | ||
4102 | Note that this is slightly different from the empty handler case | |
4103 | handled by cleanup_empty_eh, in that the actual handler may yet | |
4104 | have actual code but the landing pad has been separated from the | |
4105 | handler. As such, cleanup_empty_eh relies on this transformation | |
4106 | having been done first. */ | |
4c5fcca6 | 4107 | |
4108 | static bool | |
e38def9c | 4109 | unsplit_eh (eh_landing_pad lp) |
4c5fcca6 | 4110 | { |
0fb4f2ce | 4111 | basic_block bb = label_to_block (cfun, lp->post_landing_pad); |
e38def9c | 4112 | gimple_stmt_iterator gsi; |
4113 | edge e_in, e_out; | |
4114 | ||
4115 | /* Quickly check the edge counts on BB for singularity. */ | |
896a0c42 | 4116 | if (!single_pred_p (bb) || !single_succ_p (bb)) |
e38def9c | 4117 | return false; |
896a0c42 | 4118 | e_in = single_pred_edge (bb); |
4119 | e_out = single_succ_edge (bb); | |
4c5fcca6 | 4120 | |
e38def9c | 4121 | /* Input edge must be EH and output edge must be normal. */ |
4122 | if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0) | |
4123 | return false; | |
4124 | ||
0b76e49c | 4125 | /* The block must be empty except for the labels and debug insns. */ |
4126 | gsi = gsi_after_labels (bb); | |
4127 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4128 | gsi_next_nondebug (&gsi); | |
4129 | if (!gsi_end_p (gsi)) | |
e38def9c | 4130 | return false; |
4131 | ||
4132 | /* The destination block must not already have a landing pad | |
4133 | for a different region. */ | |
4134 | for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4c5fcca6 | 4135 | { |
1a91d914 | 4136 | glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)); |
e38def9c | 4137 | tree lab; |
4138 | int lp_nr; | |
4c5fcca6 | 4139 | |
1a91d914 | 4140 | if (!label_stmt) |
e38def9c | 4141 | break; |
1a91d914 | 4142 | lab = gimple_label_label (label_stmt); |
e38def9c | 4143 | lp_nr = EH_LANDING_PAD_NR (lab); |
4144 | if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | |
4145 | return false; | |
4146 | } | |
4c5fcca6 | 4147 | |
e9d5f86f | 4148 | /* The new destination block must not already be a destination of |
4149 | the source block, lest we merge fallthru and eh edges and get | |
4150 | all sorts of confused. */ | |
4151 | if (find_edge (e_in->src, e_out->dest)) | |
4152 | return false; | |
4153 | ||
c57e3b9d | 4154 | /* ??? We can get degenerate phis due to cfg cleanups. I would have |
4155 | thought this should have been cleaned up by a phicprop pass, but | |
4156 | that doesn't appear to handle virtuals. Propagate by hand. */ | |
4157 | if (!gimple_seq_empty_p (phi_nodes (bb))) | |
4158 | { | |
1a91d914 | 4159 | for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); ) |
c57e3b9d | 4160 | { |
42acab1c | 4161 | gimple *use_stmt; |
1a91d914 | 4162 | gphi *phi = gpi.phi (); |
c57e3b9d | 4163 | tree lhs = gimple_phi_result (phi); |
4164 | tree rhs = gimple_phi_arg_def (phi, 0); | |
4165 | use_operand_p use_p; | |
4166 | imm_use_iterator iter; | |
4167 | ||
4168 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs) | |
4169 | { | |
4170 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
4171 | SET_USE (use_p, rhs); | |
4172 | } | |
4173 | ||
4174 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) | |
4175 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1; | |
4176 | ||
1a91d914 | 4177 | remove_phi_node (&gpi, true); |
c57e3b9d | 4178 | } |
4179 | } | |
3d1eacdb | 4180 | |
e38def9c | 4181 | if (dump_file && (dump_flags & TDF_DETAILS)) |
4182 | fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n", | |
4183 | lp->index, e_out->dest->index); | |
4184 | ||
4185 | /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving | |
4186 | a successor edge, humor it. But do the real CFG change with the | |
4187 | predecessor of E_OUT in order to preserve the ordering of arguments | |
4188 | to the PHI nodes in E_OUT->DEST. */ | |
4189 | redirect_eh_edge_1 (e_in, e_out->dest, false); | |
4190 | redirect_edge_pred (e_out, e_in->src); | |
4191 | e_out->flags = e_in->flags; | |
4192 | e_out->probability = e_in->probability; | |
e38def9c | 4193 | remove_edge (e_in); |
3d1eacdb | 4194 | |
e38def9c | 4195 | return true; |
4196 | } | |
3d1eacdb | 4197 | |
e38def9c | 4198 | /* Examine each landing pad block and see if it matches unsplit_eh. */ |
3d1eacdb | 4199 | |
e38def9c | 4200 | static bool |
4201 | unsplit_all_eh (void) | |
4202 | { | |
4203 | bool changed = false; | |
4204 | eh_landing_pad lp; | |
4205 | int i; | |
3d1eacdb | 4206 | |
f1f41a6c | 4207 | for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) |
e38def9c | 4208 | if (lp) |
4209 | changed |= unsplit_eh (lp); | |
4210 | ||
4211 | return changed; | |
4212 | } | |
4213 | ||
4214 | /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming | |
4215 | to OLD_BB to NEW_BB; return true on success, false on failure. | |
4216 | ||
4217 | OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any | |
4218 | PHI variables from OLD_BB we can pick them up from OLD_BB_OUT. | |
4219 | Virtual PHIs may be deleted and marked for renaming. */ | |
4220 | ||
4221 | static bool | |
4222 | cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb, | |
c57e3b9d | 4223 | edge old_bb_out, bool change_region) |
e38def9c | 4224 | { |
1a91d914 | 4225 | gphi_iterator ngsi, ogsi; |
e38def9c | 4226 | edge_iterator ei; |
4227 | edge e; | |
e38def9c | 4228 | bitmap ophi_handled; |
4229 | ||
19bcc424 | 4230 | /* The destination block must not be a regular successor for any |
4231 | of the preds of the landing pad. Thus, avoid turning | |
4232 | <..> | |
4233 | | \ EH | |
4234 | | <..> | |
4235 | | / | |
4236 | <..> | |
4237 | into | |
4238 | <..> | |
4239 | | | EH | |
4240 | <..> | |
4241 | which CFG verification would choke on. See PR45172 and PR51089. */ | |
4242 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
4243 | if (find_edge (e->src, new_bb)) | |
4244 | return false; | |
4245 | ||
e38def9c | 4246 | FOR_EACH_EDGE (e, ei, old_bb->preds) |
4247 | redirect_edge_var_map_clear (e); | |
4248 | ||
4249 | ophi_handled = BITMAP_ALLOC (NULL); | |
e38def9c | 4250 | |
4251 | /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map | |
4252 | for the edges we're going to move. */ | |
4253 | for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi)) | |
4254 | { | |
1a91d914 | 4255 | gphi *ophi, *nphi = ngsi.phi (); |
e38def9c | 4256 | tree nresult, nop; |
4257 | ||
4258 | nresult = gimple_phi_result (nphi); | |
4259 | nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx); | |
4260 | ||
4261 | /* Find the corresponding PHI in OLD_BB so we can forward-propagate | |
4262 | the source ssa_name. */ | |
4263 | ophi = NULL; | |
4264 | for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | |
4265 | { | |
1a91d914 | 4266 | ophi = ogsi.phi (); |
e38def9c | 4267 | if (gimple_phi_result (ophi) == nop) |
4268 | break; | |
4269 | ophi = NULL; | |
927a6b6b | 4270 | } |
3d1eacdb | 4271 | |
e38def9c | 4272 | /* If we did find the corresponding PHI, copy those inputs. */ |
4273 | if (ophi) | |
4c5fcca6 | 4274 | { |
6e21b2e0 | 4275 | /* If NOP is used somewhere else beyond phis in new_bb, give up. */ |
4276 | if (!has_single_use (nop)) | |
4277 | { | |
4278 | imm_use_iterator imm_iter; | |
4279 | use_operand_p use_p; | |
4280 | ||
4281 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop) | |
4282 | { | |
4283 | if (!gimple_debug_bind_p (USE_STMT (use_p)) | |
4284 | && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI | |
4285 | || gimple_bb (USE_STMT (use_p)) != new_bb)) | |
4286 | goto fail; | |
4287 | } | |
4288 | } | |
e38def9c | 4289 | bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop)); |
4290 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
3d1eacdb | 4291 | { |
e38def9c | 4292 | location_t oloc; |
4293 | tree oop; | |
4294 | ||
4295 | if ((e->flags & EDGE_EH) == 0) | |
4296 | continue; | |
4297 | oop = gimple_phi_arg_def (ophi, e->dest_idx); | |
4298 | oloc = gimple_phi_arg_location (ophi, e->dest_idx); | |
60d535d2 | 4299 | redirect_edge_var_map_add (e, nresult, oop, oloc); |
3d1eacdb | 4300 | } |
e38def9c | 4301 | } |
077b5b8a | 4302 | /* If we didn't find the PHI, if it's a real variable or a VOP, we know |
e38def9c | 4303 | from the fact that OLD_BB is tree_empty_eh_handler_p that the |
4304 | variable is unchanged from input to the block and we can simply | |
4305 | re-use the input to NEW_BB from the OLD_BB_OUT edge. */ | |
4306 | else | |
4307 | { | |
4308 | location_t nloc | |
4309 | = gimple_phi_arg_location (nphi, old_bb_out->dest_idx); | |
4310 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
60d535d2 | 4311 | redirect_edge_var_map_add (e, nresult, nop, nloc); |
e38def9c | 4312 | } |
4313 | } | |
4314 | ||
4315 | /* Second, verify that all PHIs from OLD_BB have been handled. If not, | |
4316 | we don't know what values from the other edges into NEW_BB to use. */ | |
4317 | for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi)) | |
4318 | { | |
1a91d914 | 4319 | gphi *ophi = ogsi.phi (); |
e38def9c | 4320 | tree oresult = gimple_phi_result (ophi); |
4321 | if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult))) | |
4322 | goto fail; | |
4323 | } | |
4324 | ||
e38def9c | 4325 | /* Finally, move the edges and update the PHIs. */ |
4326 | for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); ) | |
4327 | if (e->flags & EDGE_EH) | |
4328 | { | |
3a37f7bd | 4329 | /* ??? CFG manipluation routines do not try to update loop |
4330 | form on edge redirection. Do so manually here for now. */ | |
4331 | /* If we redirect a loop entry or latch edge that will either create | |
4332 | a multiple entry loop or rotate the loop. If the loops merge | |
4333 | we may have created a loop with multiple latches. | |
4334 | All of this isn't easily fixed thus cancel the affected loop | |
4335 | and mark the other loop as possibly having multiple latches. */ | |
b3083327 | 4336 | if (e->dest == e->dest->loop_father->header) |
3a37f7bd | 4337 | { |
d25159cc | 4338 | mark_loop_for_removal (e->dest->loop_father); |
3a37f7bd | 4339 | new_bb->loop_father->latch = NULL; |
d25159cc | 4340 | loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES); |
3a37f7bd | 4341 | } |
c57e3b9d | 4342 | redirect_eh_edge_1 (e, new_bb, change_region); |
e38def9c | 4343 | redirect_edge_succ (e, new_bb); |
4344 | flush_pending_stmts (e); | |
4345 | } | |
4346 | else | |
4347 | ei_next (&ei); | |
3bd82487 | 4348 | |
e38def9c | 4349 | BITMAP_FREE (ophi_handled); |
e38def9c | 4350 | return true; |
4351 | ||
4352 | fail: | |
4353 | FOR_EACH_EDGE (e, ei, old_bb->preds) | |
4354 | redirect_edge_var_map_clear (e); | |
4355 | BITMAP_FREE (ophi_handled); | |
e38def9c | 4356 | return false; |
4357 | } | |
4358 | ||
4359 | /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its | |
4360 | old region to NEW_REGION at BB. */ | |
4361 | ||
4362 | static void | |
4363 | cleanup_empty_eh_move_lp (basic_block bb, edge e_out, | |
4364 | eh_landing_pad lp, eh_region new_region) | |
4365 | { | |
4366 | gimple_stmt_iterator gsi; | |
4367 | eh_landing_pad *pp; | |
4368 | ||
4369 | for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp) | |
4370 | continue; | |
4371 | *pp = lp->next_lp; | |
4372 | ||
4373 | lp->region = new_region; | |
4374 | lp->next_lp = new_region->landing_pads; | |
4375 | new_region->landing_pads = lp; | |
4376 | ||
4377 | /* Delete the RESX that was matched within the empty handler block. */ | |
4378 | gsi = gsi_last_bb (bb); | |
bc8a8451 | 4379 | unlink_stmt_vdef (gsi_stmt (gsi)); |
e38def9c | 4380 | gsi_remove (&gsi, true); |
4381 | ||
4382 | /* Clean up E_OUT for the fallthru. */ | |
4383 | e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU; | |
720cfc43 | 4384 | e_out->probability = profile_probability::always (); |
e38def9c | 4385 | } |
4386 | ||
4387 | /* A subroutine of cleanup_empty_eh. Handle more complex cases of | |
48e1416a | 4388 | unsplitting than unsplit_eh was prepared to handle, e.g. when |
e38def9c | 4389 | multiple incoming edges and phis are involved. */ |
4390 | ||
4391 | static bool | |
c57e3b9d | 4392 | cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp) |
e38def9c | 4393 | { |
4394 | gimple_stmt_iterator gsi; | |
e38def9c | 4395 | tree lab; |
4396 | ||
4397 | /* We really ought not have totally lost everything following | |
4398 | a landing pad label. Given that BB is empty, there had better | |
4399 | be a successor. */ | |
4400 | gcc_assert (e_out != NULL); | |
4401 | ||
c57e3b9d | 4402 | /* The destination block must not already have a landing pad |
4403 | for a different region. */ | |
e38def9c | 4404 | lab = NULL; |
4405 | for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | |
4406 | { | |
1a91d914 | 4407 | glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)); |
c57e3b9d | 4408 | int lp_nr; |
4409 | ||
1a91d914 | 4410 | if (!stmt) |
e38def9c | 4411 | break; |
4412 | lab = gimple_label_label (stmt); | |
c57e3b9d | 4413 | lp_nr = EH_LANDING_PAD_NR (lab); |
4414 | if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region) | |
4415 | return false; | |
e38def9c | 4416 | } |
e38def9c | 4417 | |
4418 | /* Attempt to move the PHIs into the successor block. */ | |
c57e3b9d | 4419 | if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false)) |
e38def9c | 4420 | { |
4421 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4422 | fprintf (dump_file, | |
c57e3b9d | 4423 | "Unsplit EH landing pad %d to block %i " |
4424 | "(via cleanup_empty_eh).\n", | |
4425 | lp->index, e_out->dest->index); | |
e38def9c | 4426 | return true; |
4427 | } | |
4428 | ||
4429 | return false; | |
4430 | } | |
4431 | ||
a9309f85 | 4432 | /* Return true if edge E_FIRST is part of an empty infinite loop |
4433 | or leads to such a loop through a series of single successor | |
4434 | empty bbs. */ | |
4435 | ||
4436 | static bool | |
4437 | infinite_empty_loop_p (edge e_first) | |
4438 | { | |
4439 | bool inf_loop = false; | |
4440 | edge e; | |
4441 | ||
4442 | if (e_first->dest == e_first->src) | |
4443 | return true; | |
4444 | ||
4445 | e_first->src->aux = (void *) 1; | |
4446 | for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest)) | |
4447 | { | |
4448 | gimple_stmt_iterator gsi; | |
4449 | if (e->dest->aux) | |
4450 | { | |
4451 | inf_loop = true; | |
4452 | break; | |
4453 | } | |
4454 | e->dest->aux = (void *) 1; | |
4455 | gsi = gsi_after_labels (e->dest); | |
4456 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4457 | gsi_next_nondebug (&gsi); | |
4458 | if (!gsi_end_p (gsi)) | |
4459 | break; | |
4460 | } | |
4461 | e_first->src->aux = NULL; | |
4462 | for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest)) | |
4463 | e->dest->aux = NULL; | |
4464 | ||
4465 | return inf_loop; | |
4466 | } | |
4467 | ||
e38def9c | 4468 | /* Examine the block associated with LP to determine if it's an empty |
4469 | handler for its EH region. If so, attempt to redirect EH edges to | |
4470 | an outer region. Return true the CFG was updated in any way. This | |
4471 | is similar to jump forwarding, just across EH edges. */ | |
4472 | ||
4473 | static bool | |
4474 | cleanup_empty_eh (eh_landing_pad lp) | |
4475 | { | |
0fb4f2ce | 4476 | basic_block bb = label_to_block (cfun, lp->post_landing_pad); |
e38def9c | 4477 | gimple_stmt_iterator gsi; |
42acab1c | 4478 | gimple *resx; |
e38def9c | 4479 | eh_region new_region; |
4480 | edge_iterator ei; | |
4481 | edge e, e_out; | |
4482 | bool has_non_eh_pred; | |
b74338cf | 4483 | bool ret = false; |
e38def9c | 4484 | int new_lp_nr; |
4485 | ||
4486 | /* There can be zero or one edges out of BB. This is the quickest test. */ | |
4487 | switch (EDGE_COUNT (bb->succs)) | |
4488 | { | |
4489 | case 0: | |
4490 | e_out = NULL; | |
4491 | break; | |
4492 | case 1: | |
896a0c42 | 4493 | e_out = single_succ_edge (bb); |
e38def9c | 4494 | break; |
4495 | default: | |
4496 | return false; | |
4497 | } | |
b74338cf | 4498 | |
3aad3afc | 4499 | gsi = gsi_last_nondebug_bb (bb); |
4500 | resx = gsi_stmt (gsi); | |
b74338cf | 4501 | if (resx && is_gimple_resx (resx)) |
4502 | { | |
aac19106 | 4503 | if (stmt_can_throw_external (cfun, resx)) |
b74338cf | 4504 | optimize_clobbers (bb); |
4505 | else if (sink_clobbers (bb)) | |
4506 | ret = true; | |
4507 | } | |
4508 | ||
e38def9c | 4509 | gsi = gsi_after_labels (bb); |
4510 | ||
4511 | /* Make sure to skip debug statements. */ | |
4512 | if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
4513 | gsi_next_nondebug (&gsi); | |
4514 | ||
4515 | /* If the block is totally empty, look for more unsplitting cases. */ | |
4516 | if (gsi_end_p (gsi)) | |
e54fce5c | 4517 | { |
ae3a21c9 | 4518 | /* For the degenerate case of an infinite loop bail out. |
4519 | If bb has no successors and is totally empty, which can happen e.g. | |
4520 | because of incorrect noreturn attribute, bail out too. */ | |
4521 | if (e_out == NULL | |
4522 | || infinite_empty_loop_p (e_out)) | |
b74338cf | 4523 | return ret; |
e54fce5c | 4524 | |
b74338cf | 4525 | return ret | cleanup_empty_eh_unsplit (bb, e_out, lp); |
e54fce5c | 4526 | } |
e38def9c | 4527 | |
367113ea | 4528 | /* The block should consist only of a single RESX statement, modulo a |
4529 | preceding call to __builtin_stack_restore if there is no outgoing | |
4530 | edge, since the call can be eliminated in this case. */ | |
e38def9c | 4531 | resx = gsi_stmt (gsi); |
367113ea | 4532 | if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE)) |
4533 | { | |
3aad3afc | 4534 | gsi_next_nondebug (&gsi); |
367113ea | 4535 | resx = gsi_stmt (gsi); |
4536 | } | |
e38def9c | 4537 | if (!is_gimple_resx (resx)) |
b74338cf | 4538 | return ret; |
3aad3afc | 4539 | gcc_assert (gsi_one_nondebug_before_end_p (gsi)); |
e38def9c | 4540 | |
4541 | /* Determine if there are non-EH edges, or resx edges into the handler. */ | |
4542 | has_non_eh_pred = false; | |
4543 | FOR_EACH_EDGE (e, ei, bb->preds) | |
4544 | if (!(e->flags & EDGE_EH)) | |
4545 | has_non_eh_pred = true; | |
4546 | ||
4547 | /* Find the handler that's outer of the empty handler by looking at | |
4548 | where the RESX instruction was vectored. */ | |
4549 | new_lp_nr = lookup_stmt_eh_lp (resx); | |
4550 | new_region = get_eh_region_from_lp_number (new_lp_nr); | |
4551 | ||
4552 | /* If there's no destination region within the current function, | |
4553 | redirection is trivial via removing the throwing statements from | |
4554 | the EH region, removing the EH edges, and allowing the block | |
4555 | to go unreachable. */ | |
4556 | if (new_region == NULL) | |
4557 | { | |
4558 | gcc_assert (e_out == NULL); | |
4559 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
4560 | if (e->flags & EDGE_EH) | |
4561 | { | |
42acab1c | 4562 | gimple *stmt = last_stmt (e->src); |
e38def9c | 4563 | remove_stmt_from_eh_lp (stmt); |
4564 | remove_edge (e); | |
4565 | } | |
4566 | else | |
4567 | ei_next (&ei); | |
4568 | goto succeed; | |
4569 | } | |
4570 | ||
4571 | /* If the destination region is a MUST_NOT_THROW, allow the runtime | |
4572 | to handle the abort and allow the blocks to go unreachable. */ | |
4573 | if (new_region->type == ERT_MUST_NOT_THROW) | |
4574 | { | |
4575 | for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); ) | |
4576 | if (e->flags & EDGE_EH) | |
4577 | { | |
42acab1c | 4578 | gimple *stmt = last_stmt (e->src); |
e38def9c | 4579 | remove_stmt_from_eh_lp (stmt); |
4580 | add_stmt_to_eh_lp (stmt, new_lp_nr); | |
4581 | remove_edge (e); | |
4582 | } | |
4583 | else | |
4584 | ei_next (&ei); | |
4585 | goto succeed; | |
4586 | } | |
4587 | ||
4588 | /* Try to redirect the EH edges and merge the PHIs into the destination | |
4589 | landing pad block. If the merge succeeds, we'll already have redirected | |
4590 | all the EH edges. The handler itself will go unreachable if there were | |
4591 | no normal edges. */ | |
c57e3b9d | 4592 | if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true)) |
e38def9c | 4593 | goto succeed; |
4594 | ||
4595 | /* Finally, if all input edges are EH edges, then we can (potentially) | |
4596 | reduce the number of transfers from the runtime by moving the landing | |
4597 | pad from the original region to the new region. This is a win when | |
4598 | we remove the last CLEANUP region along a particular exception | |
4599 | propagation path. Since nothing changes except for the region with | |
4600 | which the landing pad is associated, the PHI nodes do not need to be | |
4601 | adjusted at all. */ | |
4602 | if (!has_non_eh_pred) | |
4603 | { | |
4604 | cleanup_empty_eh_move_lp (bb, e_out, lp, new_region); | |
4605 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4606 | fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n", | |
4607 | lp->index, new_region->index); | |
4608 | ||
4609 | /* ??? The CFG didn't change, but we may have rendered the | |
4610 | old EH region unreachable. Trigger a cleanup there. */ | |
4c5fcca6 | 4611 | return true; |
4612 | } | |
e38def9c | 4613 | |
b74338cf | 4614 | return ret; |
e38def9c | 4615 | |
4616 | succeed: | |
4617 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
4618 | fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index); | |
4619 | remove_eh_landing_pad (lp); | |
4620 | return true; | |
4c5fcca6 | 4621 | } |
4622 | ||
e38def9c | 4623 | /* Do a post-order traversal of the EH region tree. Examine each |
4624 | post_landing_pad block and see if we can eliminate it as empty. */ | |
4625 | ||
4626 | static bool | |
4627 | cleanup_all_empty_eh (void) | |
4628 | { | |
4629 | bool changed = false; | |
4630 | eh_landing_pad lp; | |
4631 | int i; | |
4632 | ||
f1f41a6c | 4633 | for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) |
e38def9c | 4634 | if (lp) |
4635 | changed |= cleanup_empty_eh (lp); | |
4636 | ||
4637 | return changed; | |
4638 | } | |
4c5fcca6 | 4639 | |
4640 | /* Perform cleanups and lowering of exception handling | |
4641 | 1) cleanups regions with handlers doing nothing are optimized out | |
4642 | 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out | |
4643 | 3) Info about regions that are containing instructions, and regions | |
4644 | reachable via local EH edges is collected | |
c31fb425 | 4645 | 4) Eh tree is pruned for regions no longer necessary. |
e38def9c | 4646 | |
4647 | TODO: Push MUST_NOT_THROW regions to the root of the EH tree. | |
4648 | Unify those that have the same failure decl and locus. | |
4649 | */ | |
4c5fcca6 | 4650 | |
4651 | static unsigned int | |
15100018 | 4652 | execute_cleanup_eh_1 (void) |
4c5fcca6 | 4653 | { |
e38def9c | 4654 | /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die |
4655 | looking up unreachable landing pads. */ | |
4656 | remove_unreachable_handlers (); | |
4c5fcca6 | 4657 | |
e38def9c | 4658 | /* Watch out for the region tree vanishing due to all unreachable. */ |
6b3688f5 | 4659 | if (cfun->eh->region_tree) |
4c5fcca6 | 4660 | { |
e38def9c | 4661 | bool changed = false; |
4c5fcca6 | 4662 | |
6b3688f5 | 4663 | if (optimize) |
4664 | changed |= unsplit_all_eh (); | |
e38def9c | 4665 | changed |= cleanup_all_empty_eh (); |
4666 | ||
4667 | if (changed) | |
48d5ef93 | 4668 | { |
4669 | free_dominance_info (CDI_DOMINATORS); | |
4670 | free_dominance_info (CDI_POST_DOMINATORS); | |
4c5fcca6 | 4671 | |
e38def9c | 4672 | /* We delayed all basic block deletion, as we may have performed |
4673 | cleanups on EH edges while non-EH edges were still present. */ | |
4674 | delete_unreachable_blocks (); | |
4c5fcca6 | 4675 | |
e38def9c | 4676 | /* We manipulated the landing pads. Remove any region that no |
4677 | longer has a landing pad. */ | |
4678 | remove_unreachable_handlers_no_lp (); | |
4679 | ||
4680 | return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals; | |
4681 | } | |
4c5fcca6 | 4682 | } |
4683 | ||
e38def9c | 4684 | return 0; |
4685 | } | |
4686 | ||
7620bc82 | 4687 | namespace { |
4688 | ||
4689 | const pass_data pass_data_cleanup_eh = | |
cbe8bda8 | 4690 | { |
4691 | GIMPLE_PASS, /* type */ | |
4692 | "ehcleanup", /* name */ | |
4693 | OPTGROUP_NONE, /* optinfo_flags */ | |
cbe8bda8 | 4694 | TV_TREE_EH, /* tv_id */ |
4695 | PROP_gimple_lcf, /* properties_required */ | |
4696 | 0, /* properties_provided */ | |
4697 | 0, /* properties_destroyed */ | |
4698 | 0, /* todo_flags_start */ | |
8b88439e | 4699 | 0, /* todo_flags_finish */ |
4c5fcca6 | 4700 | }; |
cbe8bda8 | 4701 | |
7620bc82 | 4702 | class pass_cleanup_eh : public gimple_opt_pass |
cbe8bda8 | 4703 | { |
4704 | public: | |
9af5ce0c | 4705 | pass_cleanup_eh (gcc::context *ctxt) |
4706 | : gimple_opt_pass (pass_data_cleanup_eh, ctxt) | |
cbe8bda8 | 4707 | {} |
4708 | ||
4709 | /* opt_pass methods: */ | |
ae84f584 | 4710 | opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); } |
31315c24 | 4711 | virtual bool gate (function *fun) |
4712 | { | |
4713 | return fun->eh != NULL && fun->eh->region_tree != NULL; | |
4714 | } | |
4715 | ||
65b0537f | 4716 | virtual unsigned int execute (function *); |
cbe8bda8 | 4717 | |
4718 | }; // class pass_cleanup_eh | |
4719 | ||
65b0537f | 4720 | unsigned int |
4721 | pass_cleanup_eh::execute (function *fun) | |
4722 | { | |
4723 | int ret = execute_cleanup_eh_1 (); | |
4724 | ||
4725 | /* If the function no longer needs an EH personality routine | |
4726 | clear it. This exposes cross-language inlining opportunities | |
4727 | and avoids references to a never defined personality routine. */ | |
4728 | if (DECL_FUNCTION_PERSONALITY (current_function_decl) | |
4729 | && function_needs_eh_personality (fun) != eh_personality_lang) | |
4730 | DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE; | |
4731 | ||
4732 | return ret; | |
4733 | } | |
4734 | ||
7620bc82 | 4735 | } // anon namespace |
4736 | ||
cbe8bda8 | 4737 | gimple_opt_pass * |
4738 | make_pass_cleanup_eh (gcc::context *ctxt) | |
4739 | { | |
4740 | return new pass_cleanup_eh (ctxt); | |
4741 | } | |
e38def9c | 4742 | \f |
62c34df8 | 4743 | /* Disable warnings about missing quoting in GCC diagnostics for |
4744 | the verification errors. Their format strings don't follow GCC | |
4745 | diagnostic conventions but are only used for debugging. */ | |
4746 | #if __GNUC__ >= 10 | |
4747 | # pragma GCC diagnostic push | |
4748 | # pragma GCC diagnostic ignored "-Wformat-diag" | |
4749 | #endif | |
4750 | ||
e38def9c | 4751 | /* Verify that BB containing STMT as the last statement, has precisely the |
4752 | edge that make_eh_edges would create. */ | |
4753 | ||
4b987fac | 4754 | DEBUG_FUNCTION bool |
42acab1c | 4755 | verify_eh_edges (gimple *stmt) |
e38def9c | 4756 | { |
4757 | basic_block bb = gimple_bb (stmt); | |
4758 | eh_landing_pad lp = NULL; | |
4759 | int lp_nr; | |
4760 | edge_iterator ei; | |
4761 | edge e, eh_edge; | |
4762 | ||
4763 | lp_nr = lookup_stmt_eh_lp (stmt); | |
4764 | if (lp_nr > 0) | |
4765 | lp = get_eh_landing_pad_from_number (lp_nr); | |
4766 | ||
4767 | eh_edge = NULL; | |
4768 | FOR_EACH_EDGE (e, ei, bb->succs) | |
4769 | { | |
4770 | if (e->flags & EDGE_EH) | |
4771 | { | |
4772 | if (eh_edge) | |
4773 | { | |
4774 | error ("BB %i has multiple EH edges", bb->index); | |
4775 | return true; | |
4776 | } | |
4777 | else | |
4778 | eh_edge = e; | |
4779 | } | |
4780 | } | |
4781 | ||
4782 | if (lp == NULL) | |
4783 | { | |
4784 | if (eh_edge) | |
4785 | { | |
f4d3c071 | 4786 | error ("BB %i cannot throw but has an EH edge", bb->index); |
e38def9c | 4787 | return true; |
4788 | } | |
4789 | return false; | |
4790 | } | |
4791 | ||
aac19106 | 4792 | if (!stmt_could_throw_p (cfun, stmt)) |
e38def9c | 4793 | { |
4794 | error ("BB %i last statement has incorrectly set lp", bb->index); | |
4795 | return true; | |
4796 | } | |
4797 | ||
4798 | if (eh_edge == NULL) | |
4799 | { | |
4800 | error ("BB %i is missing an EH edge", bb->index); | |
4801 | return true; | |
4802 | } | |
4803 | ||
0fb4f2ce | 4804 | if (eh_edge->dest != label_to_block (cfun, lp->post_landing_pad)) |
e38def9c | 4805 | { |
4806 | error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index); | |
4807 | return true; | |
4808 | } | |
4809 | ||
4810 | return false; | |
4811 | } | |
4812 | ||
4813 | /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */ | |
4814 | ||
4b987fac | 4815 | DEBUG_FUNCTION bool |
1a91d914 | 4816 | verify_eh_dispatch_edge (geh_dispatch *stmt) |
e38def9c | 4817 | { |
4818 | eh_region r; | |
4819 | eh_catch c; | |
4820 | basic_block src, dst; | |
4821 | bool want_fallthru = true; | |
4822 | edge_iterator ei; | |
4823 | edge e, fall_edge; | |
4824 | ||
4825 | r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt)); | |
4826 | src = gimple_bb (stmt); | |
4827 | ||
4828 | FOR_EACH_EDGE (e, ei, src->succs) | |
4829 | gcc_assert (e->aux == NULL); | |
4830 | ||
4831 | switch (r->type) | |
4832 | { | |
4833 | case ERT_TRY: | |
4834 | for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | |
4835 | { | |
0fb4f2ce | 4836 | dst = label_to_block (cfun, c->label); |
e38def9c | 4837 | e = find_edge (src, dst); |
4838 | if (e == NULL) | |
4839 | { | |
4840 | error ("BB %i is missing an edge", src->index); | |
4841 | return true; | |
4842 | } | |
4843 | e->aux = (void *)e; | |
4844 | ||
4845 | /* A catch-all handler doesn't have a fallthru. */ | |
4846 | if (c->type_list == NULL) | |
4847 | { | |
4848 | want_fallthru = false; | |
4849 | break; | |
4850 | } | |
4851 | } | |
4852 | break; | |
4853 | ||
4854 | case ERT_ALLOWED_EXCEPTIONS: | |
0fb4f2ce | 4855 | dst = label_to_block (cfun, r->u.allowed.label); |
e38def9c | 4856 | e = find_edge (src, dst); |
4857 | if (e == NULL) | |
4858 | { | |
4859 | error ("BB %i is missing an edge", src->index); | |
4860 | return true; | |
4861 | } | |
4862 | e->aux = (void *)e; | |
4863 | break; | |
4864 | ||
4865 | default: | |
4866 | gcc_unreachable (); | |
4867 | } | |
4868 | ||
4869 | fall_edge = NULL; | |
4870 | FOR_EACH_EDGE (e, ei, src->succs) | |
4871 | { | |
4872 | if (e->flags & EDGE_FALLTHRU) | |
4873 | { | |
4874 | if (fall_edge != NULL) | |
4875 | { | |
4876 | error ("BB %i too many fallthru edges", src->index); | |
4877 | return true; | |
4878 | } | |
4879 | fall_edge = e; | |
4880 | } | |
4881 | else if (e->aux) | |
4882 | e->aux = NULL; | |
4883 | else | |
4884 | { | |
4885 | error ("BB %i has incorrect edge", src->index); | |
4886 | return true; | |
4887 | } | |
4888 | } | |
4889 | if ((fall_edge != NULL) ^ want_fallthru) | |
4890 | { | |
4891 | error ("BB %i has incorrect fallthru edge", src->index); | |
4892 | return true; | |
4893 | } | |
4894 | ||
4895 | return false; | |
4896 | } | |
62c34df8 | 4897 | |
4898 | #if __GNUC__ >= 10 | |
4899 | # pragma GCC diagnostic pop | |
4900 | #endif |