]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-eh.c
2019-04-15 Richard Biener <rguenther@suse.de>
[thirdparty/gcc.git] / gcc / tree-eh.c
CommitLineData
4ee9c684 1/* Exception handling semantics and decomposition for trees.
fbd26352 2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4ee9c684 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
4ee9c684 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
4ee9c684 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "rtl.h"
4ee9c684 25#include "tree.h"
9ef16211 26#include "gimple.h"
7c29e30e 27#include "cfghooks.h"
28#include "tree-pass.h"
9ef16211 29#include "ssa.h"
7c29e30e 30#include "cgraph.h"
31#include "diagnostic-core.h"
9ef16211 32#include "fold-const.h"
d53441c8 33#include "calls.h"
4ee9c684 34#include "except.h"
94ea8568 35#include "cfganal.h"
36#include "cfgcleanup.h"
bc61cadb 37#include "tree-eh.h"
dcf1a1ec 38#include "gimple-iterator.h"
073c1fd5 39#include "tree-cfg.h"
073c1fd5 40#include "tree-into-ssa.h"
69ee5dbb 41#include "tree-ssa.h"
4ee9c684 42#include "tree-inline.h"
4ee9c684 43#include "langhooks.h"
79f958cb 44#include "cfgloop.h"
424a4a92 45#include "gimple-low.h"
30a86690 46#include "stringpool.h"
47#include "attribs.h"
5b64e274 48#include "asan.h"
04936b7c 49#include "gimplify.h"
75a70cf9 50
51/* In some instances a tree and a gimple need to be stored in a same table,
52 i.e. in hash tables. This is a structure to do this. */
42acab1c 53typedef union {tree *tp; tree t; gimple *g;} treemple;
4ee9c684 54
4ee9c684 55/* Misc functions used in this file. */
56
e38def9c 57/* Remember and lookup EH landing pad data for arbitrary statements.
4ee9c684 58 Really this means any statement that could_throw_p. We could
59 stuff this information into the stmt_ann data structure, but:
60
61 (1) We absolutely rely on this information being kept until
62 we get to rtl. Once we're done with lowering here, if we lose
63 the information there's no way to recover it!
64
ac13e8d9 65 (2) There are many more statements that *cannot* throw as
4ee9c684 66 compared to those that can. We should be saving some amount
67 of space by only allocating memory for those that can throw. */
68
e38def9c 69/* Add statement T in function IFUN to landing pad NUM. */
75a70cf9 70
4e57e76d 71static void
42acab1c 72add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num)
4ee9c684 73{
e38def9c 74 gcc_assert (num != 0);
4ee9c684 75
0de999f1 76 if (!get_eh_throw_stmt_table (ifun))
42acab1c 77 set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31));
0de999f1 78
8f359205 79 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
4ee9c684 80}
35c15734 81
e38def9c 82/* Add statement T in the current function (cfun) to EH landing pad NUM. */
75a70cf9 83
b3f1469f 84void
42acab1c 85add_stmt_to_eh_lp (gimple *t, int num)
b3f1469f 86{
e38def9c 87 add_stmt_to_eh_lp_fn (cfun, t, num);
88}
89
90/* Add statement T to the single EH landing pad in REGION. */
91
92static void
42acab1c 93record_stmt_eh_region (eh_region region, gimple *t)
e38def9c 94{
95 if (region == NULL)
96 return;
97 if (region->type == ERT_MUST_NOT_THROW)
98 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
99 else
100 {
101 eh_landing_pad lp = region->landing_pads;
102 if (lp == NULL)
103 lp = gen_eh_landing_pad (region);
104 else
105 gcc_assert (lp->next_lp == NULL);
106 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
107 }
b3f1469f 108}
109
75a70cf9 110
e38def9c 111/* Remove statement T in function IFUN from its EH landing pad. */
75a70cf9 112
35c15734 113bool
42acab1c 114remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t)
35c15734 115{
b3f1469f 116 if (!get_eh_throw_stmt_table (ifun))
35c15734 117 return false;
118
8f359205 119 if (!get_eh_throw_stmt_table (ifun)->get (t))
35c15734 120 return false;
8f359205 121
122 get_eh_throw_stmt_table (ifun)->remove (t);
123 return true;
35c15734 124}
125
75a70cf9 126
e38def9c 127/* Remove statement T in the current function (cfun) from its
128 EH landing pad. */
75a70cf9 129
b3f1469f 130bool
42acab1c 131remove_stmt_from_eh_lp (gimple *t)
b3f1469f 132{
e38def9c 133 return remove_stmt_from_eh_lp_fn (cfun, t);
b3f1469f 134}
135
75a70cf9 136/* Determine if statement T is inside an EH region in function IFUN.
e38def9c 137 Positive numbers indicate a landing pad index; negative numbers
138 indicate a MUST_NOT_THROW region index; zero indicates that the
139 statement is not recorded in the region table. */
75a70cf9 140
4ee9c684 141int
42acab1c 142lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t)
4ee9c684 143{
e38def9c 144 if (ifun->eh->throw_stmt_table == NULL)
145 return 0;
4ee9c684 146
8f359205 147 int *lp_nr = ifun->eh->throw_stmt_table->get (t);
148 return lp_nr ? *lp_nr : 0;
4ee9c684 149}
150
e38def9c 151/* Likewise, but always use the current function. */
75a70cf9 152
b3f1469f 153int
42acab1c 154lookup_stmt_eh_lp (gimple *t)
b3f1469f 155{
156 /* We can get called from initialized data when -fnon-call-exceptions
157 is on; prevent crash. */
158 if (!cfun)
e38def9c 159 return 0;
160 return lookup_stmt_eh_lp_fn (cfun, t);
b3f1469f 161}
4ee9c684 162
75a70cf9 163/* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
4ee9c684 164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
166
167struct finally_tree_node
168{
75a70cf9 169 /* When storing a GIMPLE_TRY, we have to record a gimple. However
170 when deciding whether a GOTO to a certain LABEL_DECL (which is a
171 tree) leaves the TRY block, its necessary to record a tree in
172 this field. Thus a treemple is used. */
e38def9c 173 treemple child;
1a91d914 174 gtry *parent;
4ee9c684 175};
176
d9dd21a8 177/* Hashtable helpers. */
178
298e7f9a 179struct finally_tree_hasher : free_ptr_hash <finally_tree_node>
d9dd21a8 180{
9969c043 181 static inline hashval_t hash (const finally_tree_node *);
182 static inline bool equal (const finally_tree_node *,
183 const finally_tree_node *);
d9dd21a8 184};
185
186inline hashval_t
9969c043 187finally_tree_hasher::hash (const finally_tree_node *v)
d9dd21a8 188{
189 return (intptr_t)v->child.t >> 4;
190}
191
192inline bool
9969c043 193finally_tree_hasher::equal (const finally_tree_node *v,
194 const finally_tree_node *c)
d9dd21a8 195{
196 return v->child.t == c->child.t;
197}
198
4ee9c684 199/* Note that this table is *not* marked GTY. It is short-lived. */
c1f445d2 200static hash_table<finally_tree_hasher> *finally_tree;
4ee9c684 201
202static void
1a91d914 203record_in_finally_tree (treemple child, gtry *parent)
4ee9c684 204{
205 struct finally_tree_node *n;
d9dd21a8 206 finally_tree_node **slot;
4ee9c684 207
680a19b9 208 n = XNEW (struct finally_tree_node);
4ee9c684 209 n->child = child;
210 n->parent = parent;
211
c1f445d2 212 slot = finally_tree->find_slot (n, INSERT);
8c0963c4 213 gcc_assert (!*slot);
4ee9c684 214 *slot = n;
215}
216
217static void
42acab1c 218collect_finally_tree (gimple *stmt, gtry *region);
75a70cf9 219
e38def9c 220/* Go through the gimple sequence. Works with collect_finally_tree to
75a70cf9 221 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
222
223static void
1a91d914 224collect_finally_tree_1 (gimple_seq seq, gtry *region)
4ee9c684 225{
75a70cf9 226 gimple_stmt_iterator gsi;
4ee9c684 227
75a70cf9 228 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
229 collect_finally_tree (gsi_stmt (gsi), region);
230}
4ee9c684 231
75a70cf9 232static void
42acab1c 233collect_finally_tree (gimple *stmt, gtry *region)
75a70cf9 234{
235 treemple temp;
236
237 switch (gimple_code (stmt))
238 {
239 case GIMPLE_LABEL:
1a91d914 240 temp.t = gimple_label_label (as_a <glabel *> (stmt));
75a70cf9 241 record_in_finally_tree (temp, region);
242 break;
4ee9c684 243
75a70cf9 244 case GIMPLE_TRY:
245 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
246 {
247 temp.g = stmt;
248 record_in_finally_tree (temp, region);
1a91d914 249 collect_finally_tree_1 (gimple_try_eval (stmt),
250 as_a <gtry *> (stmt));
75a70cf9 251 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
252 }
253 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
254 {
255 collect_finally_tree_1 (gimple_try_eval (stmt), region);
256 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
257 }
258 break;
4ee9c684 259
75a70cf9 260 case GIMPLE_CATCH:
1a91d914 261 collect_finally_tree_1 (gimple_catch_handler (
262 as_a <gcatch *> (stmt)),
263 region);
75a70cf9 264 break;
4ee9c684 265
75a70cf9 266 case GIMPLE_EH_FILTER:
267 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
4ee9c684 268 break;
269
4c0315d0 270 case GIMPLE_EH_ELSE:
1a91d914 271 {
272 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
273 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
274 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
275 }
4c0315d0 276 break;
277
4ee9c684 278 default:
279 /* A type, a decl, or some kind of statement that we're not
280 interested in. Don't walk them. */
281 break;
282 }
283}
284
75a70cf9 285
4ee9c684 286/* Use the finally tree to determine if a jump from START to TARGET
287 would leave the try_finally node that START lives in. */
288
289static bool
42acab1c 290outside_finally_tree (treemple start, gimple *target)
4ee9c684 291{
292 struct finally_tree_node n, *p;
293
294 do
295 {
296 n.child = start;
c1f445d2 297 p = finally_tree->find (&n);
4ee9c684 298 if (!p)
299 return true;
75a70cf9 300 start.g = p->parent;
4ee9c684 301 }
75a70cf9 302 while (start.g != target);
4ee9c684 303
304 return false;
305}
75a70cf9 306
307/* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
308 nodes into a set of gotos, magic labels, and eh regions.
4ee9c684 309 The eh region creation is straight-forward, but frobbing all the gotos
310 and such into shape isn't. */
311
48e1416a 312/* The sequence into which we record all EH stuff. This will be
e38def9c 313 placed at the end of the function when we're all done. */
314static gimple_seq eh_seq;
315
316/* Record whether an EH region contains something that can throw,
317 indexed by EH region number. */
55d6d4e4 318static bitmap eh_region_may_contain_throw_map;
e38def9c 319
47ae02b7 320/* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN
0b09525f 321 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
322 The idea is to record a gimple statement for everything except for
323 the conditionals, which get their labels recorded. Since labels are
324 of type 'tree', we need this node to store both gimple and tree
325 objects. REPL_STMT is the sequence used to replace the goto/return
326 statement. CONT_STMT is used to store the statement that allows
327 the return/goto to jump to the original destination. */
328
329struct goto_queue_node
330{
331 treemple stmt;
d7ebacec 332 location_t location;
0b09525f 333 gimple_seq repl_stmt;
42acab1c 334 gimple *cont_stmt;
0b09525f 335 int index;
336 /* This is used when index >= 0 to indicate that stmt is a label (as
337 opposed to a goto stmt). */
338 int is_label;
339};
340
4ee9c684 341/* State of the world while lowering. */
342
343struct leh_state
344{
ac13e8d9 345 /* What's "current" while constructing the eh region tree. These
4ee9c684 346 correspond to variables of the same name in cfun->eh, which we
347 don't have easy access to. */
e38def9c 348 eh_region cur_region;
349
350 /* What's "current" for the purposes of __builtin_eh_pointer. For
351 a CATCH, this is the associated TRY. For an EH_FILTER, this is
352 the associated ALLOWED_EXCEPTIONS, etc. */
353 eh_region ehp_region;
4ee9c684 354
355 /* Processing of TRY_FINALLY requires a bit more state. This is
356 split out into a separate structure so that we don't have to
357 copy so much when processing other nodes. */
358 struct leh_tf_state *tf;
359};
360
361struct leh_tf_state
362{
75a70cf9 363 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
364 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
365 this so that outside_finally_tree can reliably reference the tree used
366 in the collect_finally_tree data structures. */
1a91d914 367 gtry *try_finally_expr;
368 gtry *top_p;
e38def9c 369
75a70cf9 370 /* While lowering a top_p usually it is expanded into multiple statements,
371 thus we need the following field to store them. */
372 gimple_seq top_p_seq;
4ee9c684 373
374 /* The state outside this try_finally node. */
375 struct leh_state *outer;
376
377 /* The exception region created for it. */
e38def9c 378 eh_region region;
4ee9c684 379
0b09525f 380 /* The goto queue. */
381 struct goto_queue_node *goto_queue;
4ee9c684 382 size_t goto_queue_size;
383 size_t goto_queue_active;
384
f0b5f617 385 /* Pointer map to help in searching goto_queue when it is large. */
42acab1c 386 hash_map<gimple *, goto_queue_node *> *goto_queue_map;
46699809 387
4ee9c684 388 /* The set of unique labels seen as entries in the goto queue. */
f1f41a6c 389 vec<tree> dest_array;
4ee9c684 390
391 /* A label to be added at the end of the completed transformed
392 sequence. It will be set if may_fallthru was true *at one time*,
393 though subsequent transformations may have cleared that flag. */
394 tree fallthru_label;
395
4ee9c684 396 /* True if it is possible to fall out the bottom of the try block.
397 Cleared if the fallthru is converted to a goto. */
398 bool may_fallthru;
399
75a70cf9 400 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
4ee9c684 401 bool may_return;
402
403 /* True if the finally block can receive an exception edge.
404 Cleared if the exception case is handled by code duplication. */
405 bool may_throw;
406};
407
1a91d914 408static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
4ee9c684 409
4ee9c684 410/* Search for STMT in the goto queue. Return the replacement,
411 or null if the statement isn't in the queue. */
412
46699809 413#define LARGE_GOTO_QUEUE 20
414
e3a19533 415static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
75a70cf9 416
417static gimple_seq
418find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
4ee9c684 419{
46699809 420 unsigned int i;
46699809 421
422 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
423 {
424 for (i = 0; i < tf->goto_queue_active; i++)
75a70cf9 425 if ( tf->goto_queue[i].stmt.g == stmt.g)
46699809 426 return tf->goto_queue[i].repl_stmt;
427 return NULL;
428 }
429
430 /* If we have a large number of entries in the goto_queue, create a
431 pointer map and use that for searching. */
432
433 if (!tf->goto_queue_map)
434 {
42acab1c 435 tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>;
46699809 436 for (i = 0; i < tf->goto_queue_active; i++)
437 {
06ecf488 438 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
439 &tf->goto_queue[i]);
440 gcc_assert (!existed);
46699809 441 }
442 }
443
06ecf488 444 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
46699809 445 if (slot != NULL)
06ecf488 446 return ((*slot)->repl_stmt);
46699809 447
448 return NULL;
4ee9c684 449}
450
451/* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
75a70cf9 452 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
4ee9c684 453 then we can just splat it in, otherwise we add the new stmts immediately
75a70cf9 454 after the GIMPLE_COND and redirect. */
4ee9c684 455
456static void
457replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
75a70cf9 458 gimple_stmt_iterator *gsi)
4ee9c684 459{
75a70cf9 460 tree label;
f4e36c33 461 gimple_seq new_seq;
75a70cf9 462 treemple temp;
e60a6f7b 463 location_t loc = gimple_location (gsi_stmt (*gsi));
4ee9c684 464
75a70cf9 465 temp.tp = tp;
f4e36c33 466 new_seq = find_goto_replacement (tf, temp);
467 if (!new_seq)
4ee9c684 468 return;
469
f4e36c33 470 if (gimple_seq_singleton_p (new_seq)
471 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
4ee9c684 472 {
f4e36c33 473 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
4ee9c684 474 return;
475 }
476
e60a6f7b 477 label = create_artificial_label (loc);
75a70cf9 478 /* Set the new label for the GIMPLE_COND */
479 *tp = label;
4ee9c684 480
75a70cf9 481 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
f4e36c33 482 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
4ee9c684 483}
484
ac13e8d9 485/* The real work of replace_goto_queue. Returns with TSI updated to
4ee9c684 486 point to the next statement. */
487
e3a19533 488static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
4ee9c684 489
490static void
42acab1c 491replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf,
75a70cf9 492 gimple_stmt_iterator *gsi)
4ee9c684 493{
75a70cf9 494 gimple_seq seq;
495 treemple temp;
496 temp.g = NULL;
497
498 switch (gimple_code (stmt))
4ee9c684 499 {
75a70cf9 500 case GIMPLE_GOTO:
501 case GIMPLE_RETURN:
502 temp.g = stmt;
503 seq = find_goto_replacement (tf, temp);
504 if (seq)
4ee9c684 505 {
75a70cf9 506 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
507 gsi_remove (gsi, false);
4ee9c684 508 return;
509 }
510 break;
511
75a70cf9 512 case GIMPLE_COND:
513 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
514 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
4ee9c684 515 break;
516
75a70cf9 517 case GIMPLE_TRY:
e3a19533 518 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
519 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
4ee9c684 520 break;
75a70cf9 521 case GIMPLE_CATCH:
1a91d914 522 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
523 as_a <gcatch *> (stmt)),
524 tf);
4ee9c684 525 break;
75a70cf9 526 case GIMPLE_EH_FILTER:
e3a19533 527 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
4ee9c684 528 break;
4c0315d0 529 case GIMPLE_EH_ELSE:
1a91d914 530 {
531 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
532 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
533 tf);
534 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
535 tf);
536 }
4c0315d0 537 break;
4ee9c684 538
4ee9c684 539 default:
540 /* These won't have gotos in them. */
541 break;
542 }
543
75a70cf9 544 gsi_next (gsi);
4ee9c684 545}
546
75a70cf9 547/* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
4ee9c684 548
549static void
e3a19533 550replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
4ee9c684 551{
e3a19533 552 gimple_stmt_iterator gsi = gsi_start (*seq);
75a70cf9 553
554 while (!gsi_end_p (gsi))
555 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
4ee9c684 556}
557
558/* Replace all goto queue members. */
559
560static void
561replace_goto_queue (struct leh_tf_state *tf)
562{
82a8c0dd 563 if (tf->goto_queue_active == 0)
564 return;
e3a19533 565 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
566 replace_goto_queue_stmt_list (&eh_seq, tf);
4ee9c684 567}
568
75a70cf9 569/* Add a new record to the goto queue contained in TF. NEW_STMT is the
570 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
571 a gimple return. */
4ee9c684 572
573static void
75a70cf9 574record_in_goto_queue (struct leh_tf_state *tf,
575 treemple new_stmt,
576 int index,
d7ebacec 577 bool is_label,
578 location_t location)
4ee9c684 579{
4ee9c684 580 size_t active, size;
75a70cf9 581 struct goto_queue_node *q;
4ee9c684 582
46699809 583 gcc_assert (!tf->goto_queue_map);
584
4ee9c684 585 active = tf->goto_queue_active;
586 size = tf->goto_queue_size;
587 if (active >= size)
588 {
589 size = (size ? size * 2 : 32);
590 tf->goto_queue_size = size;
591 tf->goto_queue
680a19b9 592 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
4ee9c684 593 }
594
595 q = &tf->goto_queue[active];
596 tf->goto_queue_active = active + 1;
ac13e8d9 597
4ee9c684 598 memset (q, 0, sizeof (*q));
75a70cf9 599 q->stmt = new_stmt;
4ee9c684 600 q->index = index;
d7ebacec 601 q->location = location;
75a70cf9 602 q->is_label = is_label;
603}
604
605/* Record the LABEL label in the goto queue contained in TF.
606 TF is not null. */
607
608static void
d7ebacec 609record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
610 location_t location)
75a70cf9 611{
612 int index;
613 treemple temp, new_stmt;
614
615 if (!label)
616 return;
617
618 /* Computed and non-local gotos do not get processed. Given
619 their nature we can neither tell whether we've escaped the
620 finally block nor redirect them if we knew. */
621 if (TREE_CODE (label) != LABEL_DECL)
622 return;
623
624 /* No need to record gotos that don't leave the try block. */
625 temp.t = label;
626 if (!outside_finally_tree (temp, tf->try_finally_expr))
627 return;
628
f1f41a6c 629 if (! tf->dest_array.exists ())
75a70cf9 630 {
f1f41a6c 631 tf->dest_array.create (10);
632 tf->dest_array.quick_push (label);
75a70cf9 633 index = 0;
634 }
635 else
636 {
f1f41a6c 637 int n = tf->dest_array.length ();
75a70cf9 638 for (index = 0; index < n; ++index)
f1f41a6c 639 if (tf->dest_array[index] == label)
75a70cf9 640 break;
641 if (index == n)
f1f41a6c 642 tf->dest_array.safe_push (label);
75a70cf9 643 }
644
645 /* In the case of a GOTO we want to record the destination label,
646 since with a GIMPLE_COND we have an easy access to the then/else
647 labels. */
648 new_stmt = stmt;
d7ebacec 649 record_in_goto_queue (tf, new_stmt, index, true, location);
75a70cf9 650}
651
652/* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
653 node, and if so record that fact in the goto queue associated with that
654 try_finally node. */
655
656static void
42acab1c 657maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
75a70cf9 658{
659 struct leh_tf_state *tf = state->tf;
660 treemple new_stmt;
661
662 if (!tf)
663 return;
664
665 switch (gimple_code (stmt))
666 {
667 case GIMPLE_COND:
1a91d914 668 {
669 gcond *cond_stmt = as_a <gcond *> (stmt);
670 new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
671 record_in_goto_queue_label (tf, new_stmt,
672 gimple_cond_true_label (cond_stmt),
673 EXPR_LOCATION (*new_stmt.tp));
674 new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
675 record_in_goto_queue_label (tf, new_stmt,
676 gimple_cond_false_label (cond_stmt),
677 EXPR_LOCATION (*new_stmt.tp));
678 }
75a70cf9 679 break;
680 case GIMPLE_GOTO:
681 new_stmt.g = stmt;
d7ebacec 682 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
683 gimple_location (stmt));
75a70cf9 684 break;
685
686 case GIMPLE_RETURN:
687 tf->may_return = true;
688 new_stmt.g = stmt;
d7ebacec 689 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
75a70cf9 690 break;
691
692 default:
693 gcc_unreachable ();
694 }
4ee9c684 695}
696
75a70cf9 697
382ecba7 698#if CHECKING_P
75a70cf9 699/* We do not process GIMPLE_SWITCHes for now. As long as the original source
4ee9c684 700 was in fact structured, and we've not yet done jump threading, then none
75a70cf9 701 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
4ee9c684 702
703static void
1a91d914 704verify_norecord_switch_expr (struct leh_state *state,
705 gswitch *switch_expr)
4ee9c684 706{
707 struct leh_tf_state *tf = state->tf;
708 size_t i, n;
4ee9c684 709
710 if (!tf)
711 return;
712
75a70cf9 713 n = gimple_switch_num_labels (switch_expr);
4ee9c684 714
715 for (i = 0; i < n; ++i)
716 {
75a70cf9 717 treemple temp;
718 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
719 temp.t = lab;
720 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
4ee9c684 721 }
722}
723#else
724#define verify_norecord_switch_expr(state, switch_expr)
725#endif
726
9a14ac4f 727/* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
728 non-null, insert it before the new branch. */
4ee9c684 729
730static void
9a14ac4f 731do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
4ee9c684 732{
42acab1c 733 gimple *x;
75a70cf9 734
9a14ac4f 735 /* In the case of a return, the queue node must be a gimple statement. */
75a70cf9 736 gcc_assert (!q->is_label);
737
9a14ac4f 738 /* Note that the return value may have already been computed, e.g.,
4ee9c684 739
9a14ac4f 740 int x;
741 int foo (void)
4ee9c684 742 {
9a14ac4f 743 x = 0;
744 try {
745 return x;
746 } finally {
747 x++;
748 }
4ee9c684 749 }
9a14ac4f 750
751 should return 0, not 1. We don't have to do anything to make
752 this happens because the return value has been placed in the
753 RESULT_DECL already. */
754
755 q->cont_stmt = q->stmt.g;
75a70cf9 756
4ee9c684 757 if (mod)
75a70cf9 758 gimple_seq_add_seq (&q->repl_stmt, mod);
4ee9c684 759
75a70cf9 760 x = gimple_build_goto (finlab);
ed4d69dc 761 gimple_set_location (x, q->location);
75a70cf9 762 gimple_seq_add_stmt (&q->repl_stmt, x);
4ee9c684 763}
764
75a70cf9 765/* Similar, but easier, for GIMPLE_GOTO. */
4ee9c684 766
767static void
75a70cf9 768do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
769 struct leh_tf_state *tf)
4ee9c684 770{
1a91d914 771 ggoto *x;
75a70cf9 772
773 gcc_assert (q->is_label);
75a70cf9 774
f1f41a6c 775 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
4ee9c684 776
4ee9c684 777 if (mod)
75a70cf9 778 gimple_seq_add_seq (&q->repl_stmt, mod);
4ee9c684 779
75a70cf9 780 x = gimple_build_goto (finlab);
ed4d69dc 781 gimple_set_location (x, q->location);
75a70cf9 782 gimple_seq_add_stmt (&q->repl_stmt, x);
4ee9c684 783}
784
e38def9c 785/* Emit a standard landing pad sequence into SEQ for REGION. */
786
787static void
788emit_post_landing_pad (gimple_seq *seq, eh_region region)
789{
790 eh_landing_pad lp = region->landing_pads;
1a91d914 791 glabel *x;
e38def9c 792
793 if (lp == NULL)
794 lp = gen_eh_landing_pad (region);
795
796 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
797 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
798
799 x = gimple_build_label (lp->post_landing_pad);
800 gimple_seq_add_stmt (seq, x);
801}
802
803/* Emit a RESX statement into SEQ for REGION. */
804
805static void
806emit_resx (gimple_seq *seq, eh_region region)
807{
1a91d914 808 gresx *x = gimple_build_resx (region->index);
e38def9c 809 gimple_seq_add_stmt (seq, x);
810 if (region->outer)
811 record_stmt_eh_region (region->outer, x);
812}
813
814/* Emit an EH_DISPATCH statement into SEQ for REGION. */
815
816static void
817emit_eh_dispatch (gimple_seq *seq, eh_region region)
818{
1a91d914 819 geh_dispatch *x = gimple_build_eh_dispatch (region->index);
e38def9c 820 gimple_seq_add_stmt (seq, x);
821}
822
823/* Note that the current EH region may contain a throw, or a
824 call to a function which itself may contain a throw. */
825
826static void
827note_eh_region_may_contain_throw (eh_region region)
828{
6ef9bbe0 829 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
e38def9c 830 {
39efead8 831 if (region->type == ERT_MUST_NOT_THROW)
832 break;
e38def9c 833 region = region->outer;
834 if (region == NULL)
835 break;
836 }
837}
838
55d6d4e4 839/* Check if REGION has been marked as containing a throw. If REGION is
840 NULL, this predicate is false. */
841
842static inline bool
843eh_region_may_contain_throw (eh_region r)
844{
845 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
846}
847
4ee9c684 848/* We want to transform
849 try { body; } catch { stuff; }
850 to
4422041b 851 normal_sequence:
e38def9c 852 body;
853 over:
4422041b 854 eh_sequence:
e38def9c 855 landing_pad:
856 stuff;
857 goto over;
858
859 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
4ee9c684 860 should be placed before the second operand, or NULL. OVER is
861 an existing label that should be put at the exit, or NULL. */
862
75a70cf9 863static gimple_seq
1a91d914 864frob_into_branch_around (gtry *tp, eh_region region, tree over)
4ee9c684 865{
42acab1c 866 gimple *x;
75a70cf9 867 gimple_seq cleanup, result;
e60a6f7b 868 location_t loc = gimple_location (tp);
4ee9c684 869
75a70cf9 870 cleanup = gimple_try_cleanup (tp);
871 result = gimple_try_eval (tp);
4ee9c684 872
e38def9c 873 if (region)
874 emit_post_landing_pad (&eh_seq, region);
875
876 if (gimple_seq_may_fallthru (cleanup))
4ee9c684 877 {
878 if (!over)
e60a6f7b 879 over = create_artificial_label (loc);
75a70cf9 880 x = gimple_build_goto (over);
ed4d69dc 881 gimple_set_location (x, loc);
e38def9c 882 gimple_seq_add_stmt (&cleanup, x);
4ee9c684 883 }
e38def9c 884 gimple_seq_add_seq (&eh_seq, cleanup);
4ee9c684 885
886 if (over)
887 {
75a70cf9 888 x = gimple_build_label (over);
889 gimple_seq_add_stmt (&result, x);
4ee9c684 890 }
75a70cf9 891 return result;
4ee9c684 892}
893
894/* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
895 Make sure to record all new labels found. */
896
75a70cf9 897static gimple_seq
d7ebacec 898lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
899 location_t loc)
4ee9c684 900{
1a91d914 901 gtry *region = NULL;
75a70cf9 902 gimple_seq new_seq;
d7ebacec 903 gimple_stmt_iterator gsi;
4ee9c684 904
75a70cf9 905 new_seq = copy_gimple_seq_and_replace_locals (seq);
4ee9c684 906
d7ebacec 907 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
cc9f317f 908 {
42acab1c 909 gimple *stmt = gsi_stmt (gsi);
bb8dfc4e 910 /* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating
911 it on the EH paths. When it is not eliminated, make it transparent in
912 the debug info. */
913 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
914 gimple_set_location (stmt, UNKNOWN_LOCATION);
915 else if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
cc9f317f 916 {
917 tree block = gimple_block (stmt);
918 gimple_set_location (stmt, loc);
919 gimple_set_block (stmt, block);
920 }
921 }
d7ebacec 922
4ee9c684 923 if (outer_state->tf)
924 region = outer_state->tf->try_finally_expr;
75a70cf9 925 collect_finally_tree_1 (new_seq, region);
4ee9c684 926
75a70cf9 927 return new_seq;
4ee9c684 928}
929
930/* A subroutine of lower_try_finally. Create a fallthru label for
931 the given try_finally state. The only tricky bit here is that
932 we have to make sure to record the label in our outer context. */
933
934static tree
935lower_try_finally_fallthru_label (struct leh_tf_state *tf)
936{
937 tree label = tf->fallthru_label;
75a70cf9 938 treemple temp;
939
4ee9c684 940 if (!label)
941 {
e60a6f7b 942 label = create_artificial_label (gimple_location (tf->try_finally_expr));
4ee9c684 943 tf->fallthru_label = label;
944 if (tf->outer->tf)
75a70cf9 945 {
946 temp.t = label;
947 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
948 }
4ee9c684 949 }
950 return label;
951}
952
4c0315d0 953/* A subroutine of lower_try_finally. If FINALLY consits of a
954 GIMPLE_EH_ELSE node, return it. */
955
1a91d914 956static inline geh_else *
4c0315d0 957get_eh_else (gimple_seq finally)
958{
42acab1c 959 gimple *x = gimple_seq_first_stmt (finally);
4c0315d0 960 if (gimple_code (x) == GIMPLE_EH_ELSE)
961 {
962 gcc_assert (gimple_seq_singleton_p (finally));
1a91d914 963 return as_a <geh_else *> (x);
4c0315d0 964 }
965 return NULL;
966}
967
596981c8 968/* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
969 langhook returns non-null, then the language requires that the exception
970 path out of a try_finally be treated specially. To wit: the code within
971 the finally block may not itself throw an exception. We have two choices
972 here. First we can duplicate the finally block and wrap it in a
973 must_not_throw region. Second, we can generate code like
4ee9c684 974
975 try {
976 finally_block;
977 } catch {
978 if (fintmp == eh_edge)
979 protect_cleanup_actions;
980 }
981
982 where "fintmp" is the temporary used in the switch statement generation
983 alternative considered below. For the nonce, we always choose the first
ac13e8d9 984 option.
4ee9c684 985
822e391f 986 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
4ee9c684 987
988static void
989honor_protect_cleanup_actions (struct leh_state *outer_state,
990 struct leh_state *this_state,
991 struct leh_tf_state *tf)
992{
b8bdfa31 993 gimple_seq finally = gimple_try_cleanup (tf->top_p);
4ee9c684 994
b8bdfa31 995 /* EH_ELSE doesn't come from user code; only compiler generated stuff.
996 It does need to be handled here, so as to separate the (different)
997 EH path from the normal path. But we should not attempt to wrap
998 it with a must-not-throw node (which indeed gets in the way). */
999 if (geh_else *eh_else = get_eh_else (finally))
4c0315d0 1000 {
4c0315d0 1001 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
b8bdfa31 1002 finally = gimple_eh_else_e_body (eh_else);
1003
1004 /* Let the ELSE see the exception that's being processed. */
1005 eh_region save_ehp = this_state->ehp_region;
1006 this_state->ehp_region = this_state->cur_region;
1007 lower_eh_constructs_1 (this_state, &finally);
1008 this_state->ehp_region = save_ehp;
4c0315d0 1009 }
b8bdfa31 1010 else
0bc060a4 1011 {
b8bdfa31 1012 /* First check for nothing to do. */
1013 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1014 return;
1015 tree actions = lang_hooks.eh_protect_cleanup_actions ();
1016 if (actions == NULL)
1017 return;
1018
1019 if (this_state)
1020 finally = lower_try_finally_dup_block (finally, outer_state,
1021 gimple_location (tf->try_finally_expr));
1022
1023 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1024 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1025 to be in an enclosing scope, but needs to be implemented at this level
1026 to avoid a nesting violation (see wrap_temporary_cleanups in
1027 cp/decl.c). Since it's logically at an outer level, we should call
1028 terminate before we get to it, so strip it away before adding the
1029 MUST_NOT_THROW filter. */
1030 gimple_stmt_iterator gsi = gsi_start (finally);
1031 gimple *x = gsi_stmt (gsi);
1032 if (gimple_code (x) == GIMPLE_TRY
1033 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1034 && gimple_try_catch_is_cleanup (x))
1035 {
1036 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1037 gsi_remove (&gsi, false);
1038 }
0bc060a4 1039
b8bdfa31 1040 /* Wrap the block with protect_cleanup_actions as the action. */
1041 geh_mnt *eh_mnt = gimple_build_eh_must_not_throw (actions);
1042 gtry *try_stmt = gimple_build_try (finally,
1043 gimple_seq_alloc_with_stmt (eh_mnt),
1044 GIMPLE_TRY_CATCH);
1045 finally = lower_eh_must_not_throw (outer_state, try_stmt);
1046 }
e38def9c 1047
1048 /* Drop all of this into the exception sequence. */
1049 emit_post_landing_pad (&eh_seq, tf->region);
1050 gimple_seq_add_seq (&eh_seq, finally);
b8bdfa31 1051 if (gimple_seq_may_fallthru (finally))
e38def9c 1052 emit_resx (&eh_seq, tf->region);
4ee9c684 1053
1054 /* Having now been handled, EH isn't to be considered with
1055 the rest of the outgoing edges. */
1056 tf->may_throw = false;
1057}
1058
1059/* A subroutine of lower_try_finally. We have determined that there is
1060 no fallthru edge out of the finally block. This means that there is
1061 no outgoing edge corresponding to any incoming edge. Restructure the
1062 try_finally node for this special case. */
1063
1064static void
75a70cf9 1065lower_try_finally_nofallthru (struct leh_state *state,
1066 struct leh_tf_state *tf)
4ee9c684 1067{
9a14ac4f 1068 tree lab;
42acab1c 1069 gimple *x;
1a91d914 1070 geh_else *eh_else;
75a70cf9 1071 gimple_seq finally;
4ee9c684 1072 struct goto_queue_node *q, *qe;
1073
e38def9c 1074 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
4ee9c684 1075
75a70cf9 1076 /* We expect that tf->top_p is a GIMPLE_TRY. */
1077 finally = gimple_try_cleanup (tf->top_p);
1078 tf->top_p_seq = gimple_try_eval (tf->top_p);
4ee9c684 1079
75a70cf9 1080 x = gimple_build_label (lab);
1081 gimple_seq_add_stmt (&tf->top_p_seq, x);
4ee9c684 1082
4ee9c684 1083 q = tf->goto_queue;
1084 qe = q + tf->goto_queue_active;
1085 for (; q < qe; ++q)
1086 if (q->index < 0)
9a14ac4f 1087 do_return_redirection (q, lab, NULL);
4ee9c684 1088 else
75a70cf9 1089 do_goto_redirection (q, lab, NULL, tf);
4ee9c684 1090
1091 replace_goto_queue (tf);
1092
4c0315d0 1093 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1094 eh_else = get_eh_else (finally);
1095 if (eh_else)
1096 {
1097 finally = gimple_eh_else_n_body (eh_else);
e3a19533 1098 lower_eh_constructs_1 (state, &finally);
4c0315d0 1099 gimple_seq_add_seq (&tf->top_p_seq, finally);
e38def9c 1100
4c0315d0 1101 if (tf->may_throw)
1102 {
1103 finally = gimple_eh_else_e_body (eh_else);
e3a19533 1104 lower_eh_constructs_1 (state, &finally);
4c0315d0 1105
1106 emit_post_landing_pad (&eh_seq, tf->region);
1107 gimple_seq_add_seq (&eh_seq, finally);
1108 }
1109 }
1110 else
e38def9c 1111 {
e3a19533 1112 lower_eh_constructs_1 (state, &finally);
4c0315d0 1113 gimple_seq_add_seq (&tf->top_p_seq, finally);
e38def9c 1114
4c0315d0 1115 if (tf->may_throw)
1116 {
1117 emit_post_landing_pad (&eh_seq, tf->region);
1118
1119 x = gimple_build_goto (lab);
ed4d69dc 1120 gimple_set_location (x, gimple_location (tf->try_finally_expr));
4c0315d0 1121 gimple_seq_add_stmt (&eh_seq, x);
1122 }
e38def9c 1123 }
4ee9c684 1124}
1125
1126/* A subroutine of lower_try_finally. We have determined that there is
1127 exactly one destination of the finally block. Restructure the
1128 try_finally node for this special case. */
1129
1130static void
1131lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1132{
1133 struct goto_queue_node *q, *qe;
1a91d914 1134 geh_else *eh_else;
1135 glabel *label_stmt;
42acab1c 1136 gimple *x;
75a70cf9 1137 gimple_seq finally;
ae117ec5 1138 gimple_stmt_iterator gsi;
75a70cf9 1139 tree finally_label;
e60a6f7b 1140 location_t loc = gimple_location (tf->try_finally_expr);
4ee9c684 1141
75a70cf9 1142 finally = gimple_try_cleanup (tf->top_p);
1143 tf->top_p_seq = gimple_try_eval (tf->top_p);
4ee9c684 1144
4c0315d0 1145 /* Since there's only one destination, and the destination edge can only
1146 either be EH or non-EH, that implies that all of our incoming edges
1147 are of the same type. Therefore we can lower EH_ELSE immediately. */
1a91d914 1148 eh_else = get_eh_else (finally);
1149 if (eh_else)
4c0315d0 1150 {
1151 if (tf->may_throw)
1a91d914 1152 finally = gimple_eh_else_e_body (eh_else);
4c0315d0 1153 else
1a91d914 1154 finally = gimple_eh_else_n_body (eh_else);
4c0315d0 1155 }
1156
e3a19533 1157 lower_eh_constructs_1 (state, &finally);
4ee9c684 1158
ae117ec5 1159 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1160 {
42acab1c 1161 gimple *stmt = gsi_stmt (gsi);
ae117ec5 1162 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1163 {
1164 tree block = gimple_block (stmt);
1165 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1166 gimple_set_block (stmt, block);
1167 }
1168 }
1169
4ee9c684 1170 if (tf->may_throw)
1171 {
1172 /* Only reachable via the exception edge. Add the given label to
1173 the head of the FINALLY block. Append a RESX at the end. */
e38def9c 1174 emit_post_landing_pad (&eh_seq, tf->region);
1175 gimple_seq_add_seq (&eh_seq, finally);
1176 emit_resx (&eh_seq, tf->region);
4ee9c684 1177 return;
1178 }
1179
1180 if (tf->may_fallthru)
1181 {
1182 /* Only reachable via the fallthru edge. Do nothing but let
1183 the two blocks run together; we'll fall out the bottom. */
75a70cf9 1184 gimple_seq_add_seq (&tf->top_p_seq, finally);
4ee9c684 1185 return;
1186 }
1187
e60a6f7b 1188 finally_label = create_artificial_label (loc);
1a91d914 1189 label_stmt = gimple_build_label (finally_label);
1190 gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
4ee9c684 1191
75a70cf9 1192 gimple_seq_add_seq (&tf->top_p_seq, finally);
4ee9c684 1193
1194 q = tf->goto_queue;
1195 qe = q + tf->goto_queue_active;
1196
1197 if (tf->may_return)
1198 {
1199 /* Reachable by return expressions only. Redirect them. */
4ee9c684 1200 for (; q < qe; ++q)
9a14ac4f 1201 do_return_redirection (q, finally_label, NULL);
4ee9c684 1202 replace_goto_queue (tf);
1203 }
1204 else
1205 {
1206 /* Reachable by goto expressions only. Redirect them. */
1207 for (; q < qe; ++q)
75a70cf9 1208 do_goto_redirection (q, finally_label, NULL, tf);
4ee9c684 1209 replace_goto_queue (tf);
ac13e8d9 1210
f1f41a6c 1211 if (tf->dest_array[0] == tf->fallthru_label)
4ee9c684 1212 {
1213 /* Reachable by goto to fallthru label only. Redirect it
1214 to the new label (already created, sadly), and do not
1215 emit the final branch out, or the fallthru label. */
1216 tf->fallthru_label = NULL;
1217 return;
1218 }
1219 }
1220
75a70cf9 1221 /* Place the original return/goto to the original destination
1222 immediately after the finally block. */
1223 x = tf->goto_queue[0].cont_stmt;
1224 gimple_seq_add_stmt (&tf->top_p_seq, x);
1225 maybe_record_in_goto_queue (state, x);
4ee9c684 1226}
1227
1228/* A subroutine of lower_try_finally. There are multiple edges incoming
1229 and outgoing from the finally block. Implement this by duplicating the
1230 finally block for every destination. */
1231
1232static void
1233lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1234{
75a70cf9 1235 gimple_seq finally;
1236 gimple_seq new_stmt;
1237 gimple_seq seq;
42acab1c 1238 gimple *x;
1a91d914 1239 geh_else *eh_else;
75a70cf9 1240 tree tmp;
e60a6f7b 1241 location_t tf_loc = gimple_location (tf->try_finally_expr);
4ee9c684 1242
75a70cf9 1243 finally = gimple_try_cleanup (tf->top_p);
4c0315d0 1244
1245 /* Notice EH_ELSE, and simplify some of the remaining code
1246 by considering FINALLY to be the normal return path only. */
1247 eh_else = get_eh_else (finally);
1248 if (eh_else)
1249 finally = gimple_eh_else_n_body (eh_else);
1250
75a70cf9 1251 tf->top_p_seq = gimple_try_eval (tf->top_p);
1252 new_stmt = NULL;
4ee9c684 1253
1254 if (tf->may_fallthru)
1255 {
d7ebacec 1256 seq = lower_try_finally_dup_block (finally, state, tf_loc);
e3a19533 1257 lower_eh_constructs_1 (state, &seq);
75a70cf9 1258 gimple_seq_add_seq (&new_stmt, seq);
4ee9c684 1259
75a70cf9 1260 tmp = lower_try_finally_fallthru_label (tf);
1261 x = gimple_build_goto (tmp);
ed4d69dc 1262 gimple_set_location (x, tf_loc);
75a70cf9 1263 gimple_seq_add_stmt (&new_stmt, x);
4ee9c684 1264 }
1265
1266 if (tf->may_throw)
1267 {
4c0315d0 1268 /* We don't need to copy the EH path of EH_ELSE,
1269 since it is only emitted once. */
1270 if (eh_else)
1271 seq = gimple_eh_else_e_body (eh_else);
1272 else
d7ebacec 1273 seq = lower_try_finally_dup_block (finally, state, tf_loc);
e3a19533 1274 lower_eh_constructs_1 (state, &seq);
4ee9c684 1275
2fabdfdf 1276 emit_post_landing_pad (&eh_seq, tf->region);
1277 gimple_seq_add_seq (&eh_seq, seq);
e38def9c 1278 emit_resx (&eh_seq, tf->region);
4ee9c684 1279 }
1280
1281 if (tf->goto_queue)
1282 {
1283 struct goto_queue_node *q, *qe;
22347b24 1284 int return_index, index;
680a19b9 1285 struct labels_s
22347b24 1286 {
1287 struct goto_queue_node *q;
1288 tree label;
1289 } *labels;
4ee9c684 1290
f1f41a6c 1291 return_index = tf->dest_array.length ();
680a19b9 1292 labels = XCNEWVEC (struct labels_s, return_index + 1);
4ee9c684 1293
1294 q = tf->goto_queue;
1295 qe = q + tf->goto_queue_active;
1296 for (; q < qe; q++)
1297 {
22347b24 1298 index = q->index < 0 ? return_index : q->index;
1299
1300 if (!labels[index].q)
1301 labels[index].q = q;
1302 }
1303
1304 for (index = 0; index < return_index + 1; index++)
1305 {
1306 tree lab;
1307
1308 q = labels[index].q;
1309 if (! q)
1310 continue;
1311
e60a6f7b 1312 lab = labels[index].label
1313 = create_artificial_label (tf_loc);
4ee9c684 1314
1315 if (index == return_index)
9a14ac4f 1316 do_return_redirection (q, lab, NULL);
4ee9c684 1317 else
75a70cf9 1318 do_goto_redirection (q, lab, NULL, tf);
4ee9c684 1319
75a70cf9 1320 x = gimple_build_label (lab);
1321 gimple_seq_add_stmt (&new_stmt, x);
4ee9c684 1322
d7ebacec 1323 seq = lower_try_finally_dup_block (finally, state, q->location);
e3a19533 1324 lower_eh_constructs_1 (state, &seq);
75a70cf9 1325 gimple_seq_add_seq (&new_stmt, seq);
4ee9c684 1326
75a70cf9 1327 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
22347b24 1328 maybe_record_in_goto_queue (state, q->cont_stmt);
4ee9c684 1329 }
22347b24 1330
1331 for (q = tf->goto_queue; q < qe; q++)
1332 {
1333 tree lab;
1334
1335 index = q->index < 0 ? return_index : q->index;
1336
1337 if (labels[index].q == q)
1338 continue;
1339
1340 lab = labels[index].label;
1341
1342 if (index == return_index)
9a14ac4f 1343 do_return_redirection (q, lab, NULL);
22347b24 1344 else
75a70cf9 1345 do_goto_redirection (q, lab, NULL, tf);
22347b24 1346 }
e38def9c 1347
4ee9c684 1348 replace_goto_queue (tf);
1349 free (labels);
1350 }
1351
1352 /* Need to link new stmts after running replace_goto_queue due
1353 to not wanting to process the same goto stmts twice. */
75a70cf9 1354 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
4ee9c684 1355}
1356
1357/* A subroutine of lower_try_finally. There are multiple edges incoming
1358 and outgoing from the finally block. Implement this by instrumenting
1359 each incoming edge and creating a switch statement at the end of the
1360 finally block that branches to the appropriate destination. */
1361
1362static void
1363lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1364{
1365 struct goto_queue_node *q, *qe;
75a70cf9 1366 tree finally_tmp, finally_label;
4ee9c684 1367 int return_index, eh_index, fallthru_index;
1368 int nlabels, ndests, j, last_case_index;
75a70cf9 1369 tree last_case;
b7cbf36d 1370 auto_vec<tree> case_label_vec;
e3a19533 1371 gimple_seq switch_body = NULL;
42acab1c 1372 gimple *x;
1a91d914 1373 geh_else *eh_else;
75a70cf9 1374 tree tmp;
42acab1c 1375 gimple *switch_stmt;
75a70cf9 1376 gimple_seq finally;
42acab1c 1377 hash_map<tree, gimple *> *cont_map = NULL;
e60a6f7b 1378 /* The location of the TRY_FINALLY stmt. */
0b35068b 1379 location_t tf_loc = gimple_location (tf->try_finally_expr);
e60a6f7b 1380 /* The location of the finally block. */
1381 location_t finally_loc;
75a70cf9 1382
4c0315d0 1383 finally = gimple_try_cleanup (tf->top_p);
1384 eh_else = get_eh_else (finally);
4ee9c684 1385
1386 /* Mash the TRY block to the head of the chain. */
75a70cf9 1387 tf->top_p_seq = gimple_try_eval (tf->top_p);
4ee9c684 1388
e60a6f7b 1389 /* The location of the finally is either the last stmt in the finally
1390 block or the location of the TRY_FINALLY itself. */
a6217c59 1391 x = gimple_seq_last_stmt (finally);
1392 finally_loc = x ? gimple_location (x) : tf_loc;
e60a6f7b 1393
4ee9c684 1394 /* Prepare for switch statement generation. */
f1f41a6c 1395 nlabels = tf->dest_array.length ();
4ee9c684 1396 return_index = nlabels;
1397 eh_index = return_index + tf->may_return;
4c0315d0 1398 fallthru_index = eh_index + (tf->may_throw && !eh_else);
4ee9c684 1399 ndests = fallthru_index + tf->may_fallthru;
1400
1401 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
e60a6f7b 1402 finally_label = create_artificial_label (finally_loc);
4ee9c684 1403
f1f41a6c 1404 /* We use vec::quick_push on case_label_vec throughout this function,
75a70cf9 1405 since we know the size in advance and allocate precisely as muce
1406 space as needed. */
f1f41a6c 1407 case_label_vec.create (ndests);
4ee9c684 1408 last_case = NULL;
1409 last_case_index = 0;
1410
1411 /* Begin inserting code for getting to the finally block. Things
1412 are done in this order to correspond to the sequence the code is
9d75589a 1413 laid out. */
4ee9c684 1414
1415 if (tf->may_fallthru)
1416 {
e38def9c 1417 x = gimple_build_assign (finally_tmp,
bad12c62 1418 build_int_cst (integer_type_node,
1419 fallthru_index));
eb8aa278 1420 gimple_set_location (x, finally_loc);
75a70cf9 1421 gimple_seq_add_stmt (&tf->top_p_seq, x);
4ee9c684 1422
b6e3dd65 1423 tmp = build_int_cst (integer_type_node, fallthru_index);
1424 last_case = build_case_label (tmp, NULL,
eb8aa278 1425 create_artificial_label (finally_loc));
f1f41a6c 1426 case_label_vec.quick_push (last_case);
4ee9c684 1427 last_case_index++;
1428
75a70cf9 1429 x = gimple_build_label (CASE_LABEL (last_case));
1430 gimple_seq_add_stmt (&switch_body, x);
4ee9c684 1431
75a70cf9 1432 tmp = lower_try_finally_fallthru_label (tf);
1433 x = gimple_build_goto (tmp);
eb8aa278 1434 gimple_set_location (x, finally_loc);
75a70cf9 1435 gimple_seq_add_stmt (&switch_body, x);
4ee9c684 1436 }
1437
4c0315d0 1438 /* For EH_ELSE, emit the exception path (plus resx) now, then
1439 subsequently we only need consider the normal path. */
1440 if (eh_else)
1441 {
1442 if (tf->may_throw)
1443 {
1444 finally = gimple_eh_else_e_body (eh_else);
e3a19533 1445 lower_eh_constructs_1 (state, &finally);
4c0315d0 1446
1447 emit_post_landing_pad (&eh_seq, tf->region);
1448 gimple_seq_add_seq (&eh_seq, finally);
1449 emit_resx (&eh_seq, tf->region);
1450 }
1451
1452 finally = gimple_eh_else_n_body (eh_else);
1453 }
1454 else if (tf->may_throw)
4ee9c684 1455 {
e38def9c 1456 emit_post_landing_pad (&eh_seq, tf->region);
4ee9c684 1457
e38def9c 1458 x = gimple_build_assign (finally_tmp,
bad12c62 1459 build_int_cst (integer_type_node, eh_index));
e38def9c 1460 gimple_seq_add_stmt (&eh_seq, x);
1461
1462 x = gimple_build_goto (finally_label);
ed4d69dc 1463 gimple_set_location (x, tf_loc);
e38def9c 1464 gimple_seq_add_stmt (&eh_seq, x);
4ee9c684 1465
b6e3dd65 1466 tmp = build_int_cst (integer_type_node, eh_index);
1467 last_case = build_case_label (tmp, NULL,
1468 create_artificial_label (tf_loc));
f1f41a6c 1469 case_label_vec.quick_push (last_case);
4ee9c684 1470 last_case_index++;
1471
75a70cf9 1472 x = gimple_build_label (CASE_LABEL (last_case));
e38def9c 1473 gimple_seq_add_stmt (&eh_seq, x);
1474 emit_resx (&eh_seq, tf->region);
4ee9c684 1475 }
1476
75a70cf9 1477 x = gimple_build_label (finally_label);
1478 gimple_seq_add_stmt (&tf->top_p_seq, x);
4ee9c684 1479
9ae6e329 1480 lower_eh_constructs_1 (state, &finally);
75a70cf9 1481 gimple_seq_add_seq (&tf->top_p_seq, finally);
4ee9c684 1482
1483 /* Redirect each incoming goto edge. */
1484 q = tf->goto_queue;
1485 qe = q + tf->goto_queue_active;
1486 j = last_case_index + tf->may_return;
75a70cf9 1487 /* Prepare the assignments to finally_tmp that are executed upon the
1488 entrance through a particular edge. */
4ee9c684 1489 for (; q < qe; ++q)
1490 {
e3a19533 1491 gimple_seq mod = NULL;
75a70cf9 1492 int switch_id;
1493 unsigned int case_index;
1494
4ee9c684 1495 if (q->index < 0)
1496 {
75a70cf9 1497 x = gimple_build_assign (finally_tmp,
bad12c62 1498 build_int_cst (integer_type_node,
1499 return_index));
75a70cf9 1500 gimple_seq_add_stmt (&mod, x);
9a14ac4f 1501 do_return_redirection (q, finally_label, mod);
4ee9c684 1502 switch_id = return_index;
1503 }
1504 else
1505 {
75a70cf9 1506 x = gimple_build_assign (finally_tmp,
bad12c62 1507 build_int_cst (integer_type_node, q->index));
75a70cf9 1508 gimple_seq_add_stmt (&mod, x);
1509 do_goto_redirection (q, finally_label, mod, tf);
4ee9c684 1510 switch_id = q->index;
1511 }
1512
1513 case_index = j + q->index;
f1f41a6c 1514 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
75a70cf9 1515 {
1516 tree case_lab;
b6e3dd65 1517 tmp = build_int_cst (integer_type_node, switch_id);
1518 case_lab = build_case_label (tmp, NULL,
1519 create_artificial_label (tf_loc));
75a70cf9 1520 /* We store the cont_stmt in the pointer map, so that we can recover
75a2cdc8 1521 it in the loop below. */
75a70cf9 1522 if (!cont_map)
42acab1c 1523 cont_map = new hash_map<tree, gimple *>;
06ecf488 1524 cont_map->put (case_lab, q->cont_stmt);
f1f41a6c 1525 case_label_vec.quick_push (case_lab);
75a70cf9 1526 }
22347b24 1527 }
1528 for (j = last_case_index; j < last_case_index + nlabels; j++)
1529 {
42acab1c 1530 gimple *cont_stmt;
22347b24 1531
f1f41a6c 1532 last_case = case_label_vec[j];
22347b24 1533
1534 gcc_assert (last_case);
75a70cf9 1535 gcc_assert (cont_map);
22347b24 1536
06ecf488 1537 cont_stmt = *cont_map->get (last_case);
22347b24 1538
75a2cdc8 1539 x = gimple_build_label (CASE_LABEL (last_case));
75a70cf9 1540 gimple_seq_add_stmt (&switch_body, x);
1541 gimple_seq_add_stmt (&switch_body, cont_stmt);
22347b24 1542 maybe_record_in_goto_queue (state, cont_stmt);
4ee9c684 1543 }
75a70cf9 1544 if (cont_map)
06ecf488 1545 delete cont_map;
75a70cf9 1546
4ee9c684 1547 replace_goto_queue (tf);
4ee9c684 1548
da41aa8e 1549 /* Make sure that the last case is the default label, as one is required.
1550 Then sort the labels, which is also required in GIMPLE. */
4ee9c684 1551 CASE_LOW (last_case) = NULL;
71b65939 1552 tree tem = case_label_vec.pop ();
1553 gcc_assert (tem == last_case);
da41aa8e 1554 sort_case_labels (case_label_vec);
4ee9c684 1555
75a70cf9 1556 /* Build the switch statement, setting last_case to be the default
1557 label. */
49a70175 1558 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1559 case_label_vec);
e60a6f7b 1560 gimple_set_location (switch_stmt, finally_loc);
75a70cf9 1561
1562 /* Need to link SWITCH_STMT after running replace_goto_queue
1563 due to not wanting to process the same goto stmts twice. */
1564 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1565 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
4ee9c684 1566}
1567
1568/* Decide whether or not we are going to duplicate the finally block.
1569 There are several considerations.
1570
4ee9c684 1571 Second, we'd like to prevent egregious code growth. One way to
1572 do this is to estimate the size of the finally block, multiply
1573 that by the number of copies we'd need to make, and compare against
1574 the estimate of the size of the switch machinery we'd have to add. */
1575
1576static bool
4c0315d0 1577decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
4ee9c684 1578{
1579 int f_estimate, sw_estimate;
1a91d914 1580 geh_else *eh_else;
4c0315d0 1581
1582 /* If there's an EH_ELSE involved, the exception path is separate
1583 and really doesn't come into play for this computation. */
1584 eh_else = get_eh_else (finally);
1585 if (eh_else)
1586 {
1587 ndests -= may_throw;
1588 finally = gimple_eh_else_n_body (eh_else);
1589 }
4ee9c684 1590
1591 if (!optimize)
83480f35 1592 {
1593 gimple_stmt_iterator gsi;
1594
1595 if (ndests == 1)
1596 return true;
1597
1598 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1599 {
bb8dfc4e 1600 /* Duplicate __builtin_stack_restore in the hope of eliminating it
1601 on the EH paths and, consequently, useless cleanups. */
42acab1c 1602 gimple *stmt = gsi_stmt (gsi);
bb8dfc4e 1603 if (!is_gimple_debug (stmt)
1604 && !gimple_clobber_p (stmt)
1605 && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
83480f35 1606 return false;
1607 }
1608 return true;
1609 }
4ee9c684 1610
1611 /* Finally estimate N times, plus N gotos. */
100f989e 1612 f_estimate = estimate_num_insns_seq (finally, &eni_size_weights);
4ee9c684 1613 f_estimate = (f_estimate + 1) * ndests;
1614
1615 /* Switch statement (cost 10), N variable assignments, N gotos. */
1616 sw_estimate = 10 + 2 * ndests;
1617
1618 /* Optimize for size clearly wants our best guess. */
0bfd8d5c 1619 if (optimize_function_for_size_p (cfun))
4ee9c684 1620 return f_estimate < sw_estimate;
1621
1622 /* ??? These numbers are completely made up so far. */
1623 if (optimize > 1)
72c90b15 1624 return f_estimate < 100 || f_estimate < sw_estimate * 2;
4ee9c684 1625 else
72c90b15 1626 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
4ee9c684 1627}
1628
f340b9ff 1629/* REG is the enclosing region for a possible cleanup region, or the region
1630 itself. Returns TRUE if such a region would be unreachable.
1631
1632 Cleanup regions within a must-not-throw region aren't actually reachable
1633 even if there are throwing stmts within them, because the personality
1634 routine will call terminate before unwinding. */
1635
1636static bool
1637cleanup_is_dead_in (eh_region reg)
1638{
1639 while (reg && reg->type == ERT_CLEANUP)
1640 reg = reg->outer;
1641 return (reg && reg->type == ERT_MUST_NOT_THROW);
1642}
75a70cf9 1643
1644/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
4ee9c684 1645 to a sequence of labels and blocks, plus the exception region trees
ac13e8d9 1646 that record all the magic. This is complicated by the need to
4ee9c684 1647 arrange for the FINALLY block to be executed on all exits. */
1648
75a70cf9 1649static gimple_seq
1a91d914 1650lower_try_finally (struct leh_state *state, gtry *tp)
4ee9c684 1651{
1652 struct leh_tf_state this_tf;
1653 struct leh_state this_state;
1654 int ndests;
fa5d8988 1655 gimple_seq old_eh_seq;
4ee9c684 1656
1657 /* Process the try block. */
1658
1659 memset (&this_tf, 0, sizeof (this_tf));
75a70cf9 1660 this_tf.try_finally_expr = tp;
4ee9c684 1661 this_tf.top_p = tp;
1662 this_tf.outer = state;
4e57e76d 1663 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
f340b9ff 1664 {
1665 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1666 this_state.cur_region = this_tf.region;
1667 }
4ee9c684 1668 else
f340b9ff 1669 {
1670 this_tf.region = NULL;
1671 this_state.cur_region = state->cur_region;
1672 }
4ee9c684 1673
e38def9c 1674 this_state.ehp_region = state->ehp_region;
4ee9c684 1675 this_state.tf = &this_tf;
1676
fa5d8988 1677 old_eh_seq = eh_seq;
1678 eh_seq = NULL;
1679
e3a19533 1680 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1681
1682 /* Determine if the try block is escaped through the bottom. */
75a70cf9 1683 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
4ee9c684 1684
1685 /* Determine if any exceptions are possible within the try block. */
f340b9ff 1686 if (this_tf.region)
55d6d4e4 1687 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
4ee9c684 1688 if (this_tf.may_throw)
e38def9c 1689 honor_protect_cleanup_actions (state, &this_state, &this_tf);
4ee9c684 1690
4ee9c684 1691 /* Determine how many edges (still) reach the finally block. Or rather,
1692 how many destinations are reached by the finally block. Use this to
1693 determine how we process the finally block itself. */
1694
f1f41a6c 1695 ndests = this_tf.dest_array.length ();
4ee9c684 1696 ndests += this_tf.may_fallthru;
1697 ndests += this_tf.may_return;
1698 ndests += this_tf.may_throw;
1699
1700 /* If the FINALLY block is not reachable, dike it out. */
1701 if (ndests == 0)
75a70cf9 1702 {
1703 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1704 gimple_try_set_cleanup (tp, NULL);
1705 }
4ee9c684 1706 /* If the finally block doesn't fall through, then any destination
1707 we might try to impose there isn't reached either. There may be
1708 some minor amount of cleanup and redirection still needed. */
75a70cf9 1709 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
4ee9c684 1710 lower_try_finally_nofallthru (state, &this_tf);
1711
1712 /* We can easily special-case redirection to a single destination. */
1713 else if (ndests == 1)
1714 lower_try_finally_onedest (state, &this_tf);
4c0315d0 1715 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1716 gimple_try_cleanup (tp)))
4ee9c684 1717 lower_try_finally_copy (state, &this_tf);
1718 else
1719 lower_try_finally_switch (state, &this_tf);
1720
1721 /* If someone requested we add a label at the end of the transformed
1722 block, do so. */
1723 if (this_tf.fallthru_label)
1724 {
75a70cf9 1725 /* This must be reached only if ndests == 0. */
42acab1c 1726 gimple *x = gimple_build_label (this_tf.fallthru_label);
75a70cf9 1727 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
4ee9c684 1728 }
1729
f1f41a6c 1730 this_tf.dest_array.release ();
dd045aee 1731 free (this_tf.goto_queue);
46699809 1732 if (this_tf.goto_queue_map)
06ecf488 1733 delete this_tf.goto_queue_map;
75a70cf9 1734
fa5d8988 1735 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1736 If there was no old eh_seq, then the append is trivially already done. */
1737 if (old_eh_seq)
1738 {
1739 if (eh_seq == NULL)
1740 eh_seq = old_eh_seq;
1741 else
1742 {
1743 gimple_seq new_eh_seq = eh_seq;
1744 eh_seq = old_eh_seq;
9af5ce0c 1745 gimple_seq_add_seq (&eh_seq, new_eh_seq);
fa5d8988 1746 }
1747 }
1748
75a70cf9 1749 return this_tf.top_p_seq;
4ee9c684 1750}
1751
75a70cf9 1752/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1753 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1754 exception region trees that records all the magic. */
4ee9c684 1755
75a70cf9 1756static gimple_seq
1a91d914 1757lower_catch (struct leh_state *state, gtry *tp)
4ee9c684 1758{
55d6d4e4 1759 eh_region try_region = NULL;
1760 struct leh_state this_state = *state;
75a70cf9 1761 gimple_stmt_iterator gsi;
4ee9c684 1762 tree out_label;
e3a19533 1763 gimple_seq new_seq, cleanup;
42acab1c 1764 gimple *x;
e60a6f7b 1765 location_t try_catch_loc = gimple_location (tp);
4ee9c684 1766
55d6d4e4 1767 if (flag_exceptions)
1768 {
1769 try_region = gen_eh_region_try (state->cur_region);
1770 this_state.cur_region = try_region;
1771 }
4ee9c684 1772
e3a19533 1773 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1774
55d6d4e4 1775 if (!eh_region_may_contain_throw (try_region))
e38def9c 1776 return gimple_try_eval (tp);
1777
1778 new_seq = NULL;
1779 emit_eh_dispatch (&new_seq, try_region);
1780 emit_resx (&new_seq, try_region);
1781
1782 this_state.cur_region = state->cur_region;
1783 this_state.ehp_region = try_region;
4ee9c684 1784
4422041b 1785 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1786 itself, so that e.g. for coverage purposes the nested cleanups don't
1787 appear before the cleanup body. See PR64634 for details. */
1788 gimple_seq old_eh_seq = eh_seq;
1789 eh_seq = NULL;
1790
4ee9c684 1791 out_label = NULL;
e3a19533 1792 cleanup = gimple_try_cleanup (tp);
1793 for (gsi = gsi_start (cleanup);
e38def9c 1794 !gsi_end_p (gsi);
1795 gsi_next (&gsi))
4ee9c684 1796 {
e38def9c 1797 eh_catch c;
1a91d914 1798 gcatch *catch_stmt;
e38def9c 1799 gimple_seq handler;
4ee9c684 1800
1a91d914 1801 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1802 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
4ee9c684 1803
1a91d914 1804 handler = gimple_catch_handler (catch_stmt);
e3a19533 1805 lower_eh_constructs_1 (&this_state, &handler);
4ee9c684 1806
e38def9c 1807 c->label = create_artificial_label (UNKNOWN_LOCATION);
1808 x = gimple_build_label (c->label);
1809 gimple_seq_add_stmt (&new_seq, x);
4ee9c684 1810
e38def9c 1811 gimple_seq_add_seq (&new_seq, handler);
4ee9c684 1812
e38def9c 1813 if (gimple_seq_may_fallthru (new_seq))
4ee9c684 1814 {
1815 if (!out_label)
e60a6f7b 1816 out_label = create_artificial_label (try_catch_loc);
4ee9c684 1817
75a70cf9 1818 x = gimple_build_goto (out_label);
e38def9c 1819 gimple_seq_add_stmt (&new_seq, x);
4ee9c684 1820 }
3ded67b5 1821 if (!c->type_list)
1822 break;
4ee9c684 1823 }
1824
e38def9c 1825 gimple_try_set_cleanup (tp, new_seq);
1826
4422041b 1827 gimple_seq new_eh_seq = eh_seq;
1828 eh_seq = old_eh_seq;
1829 gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1830 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1831 return ret_seq;
4ee9c684 1832}
1833
75a70cf9 1834/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1835 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
4ee9c684 1836 region trees that record all the magic. */
1837
75a70cf9 1838static gimple_seq
1a91d914 1839lower_eh_filter (struct leh_state *state, gtry *tp)
4ee9c684 1840{
55d6d4e4 1841 struct leh_state this_state = *state;
1842 eh_region this_region = NULL;
42acab1c 1843 gimple *inner, *x;
e38def9c 1844 gimple_seq new_seq;
ac13e8d9 1845
75a70cf9 1846 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1847
55d6d4e4 1848 if (flag_exceptions)
1849 {
1850 this_region = gen_eh_region_allowed (state->cur_region,
1851 gimple_eh_filter_types (inner));
1852 this_state.cur_region = this_region;
1853 }
ac13e8d9 1854
e3a19533 1855 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1856
55d6d4e4 1857 if (!eh_region_may_contain_throw (this_region))
e38def9c 1858 return gimple_try_eval (tp);
1859
1860 new_seq = NULL;
1861 this_state.cur_region = state->cur_region;
1862 this_state.ehp_region = this_region;
1863
1864 emit_eh_dispatch (&new_seq, this_region);
1865 emit_resx (&new_seq, this_region);
1866
1867 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1868 x = gimple_build_label (this_region->u.allowed.label);
1869 gimple_seq_add_stmt (&new_seq, x);
1870
e3a19533 1871 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
e38def9c 1872 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1873
1874 gimple_try_set_cleanup (tp, new_seq);
4ee9c684 1875
e38def9c 1876 return frob_into_branch_around (tp, this_region, NULL);
1877}
1878
1879/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1880 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1881 plus the exception region trees that record all the magic. */
1882
1883static gimple_seq
1a91d914 1884lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
e38def9c 1885{
55d6d4e4 1886 struct leh_state this_state = *state;
e38def9c 1887
55d6d4e4 1888 if (flag_exceptions)
1889 {
42acab1c 1890 gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
55d6d4e4 1891 eh_region this_region;
e38def9c 1892
55d6d4e4 1893 this_region = gen_eh_region_must_not_throw (state->cur_region);
1894 this_region->u.must_not_throw.failure_decl
1a91d914 1895 = gimple_eh_must_not_throw_fndecl (
1896 as_a <geh_mnt *> (inner));
0565e0b5 1897 this_region->u.must_not_throw.failure_loc
1898 = LOCATION_LOCUS (gimple_location (tp));
e38def9c 1899
55d6d4e4 1900 /* In order to get mangling applied to this decl, we must mark it
1901 used now. Otherwise, pass_ipa_free_lang_data won't think it
1902 needs to happen. */
1903 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
e38def9c 1904
55d6d4e4 1905 this_state.cur_region = this_region;
1906 }
4ee9c684 1907
e3a19533 1908 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1909
e38def9c 1910 return gimple_try_eval (tp);
4ee9c684 1911}
1912
1913/* Implement a cleanup expression. This is similar to try-finally,
1914 except that we only execute the cleanup block for exception edges. */
1915
75a70cf9 1916static gimple_seq
1a91d914 1917lower_cleanup (struct leh_state *state, gtry *tp)
4ee9c684 1918{
55d6d4e4 1919 struct leh_state this_state = *state;
1920 eh_region this_region = NULL;
4ee9c684 1921 struct leh_tf_state fake_tf;
75a70cf9 1922 gimple_seq result;
f340b9ff 1923 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
4ee9c684 1924
f340b9ff 1925 if (flag_exceptions && !cleanup_dead)
4ee9c684 1926 {
55d6d4e4 1927 this_region = gen_eh_region_cleanup (state->cur_region);
1928 this_state.cur_region = this_region;
4ee9c684 1929 }
1930
e3a19533 1931 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1932
f340b9ff 1933 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
e38def9c 1934 return gimple_try_eval (tp);
4ee9c684 1935
1936 /* Build enough of a try-finally state so that we can reuse
1937 honor_protect_cleanup_actions. */
1938 memset (&fake_tf, 0, sizeof (fake_tf));
e60a6f7b 1939 fake_tf.top_p = fake_tf.try_finally_expr = tp;
4ee9c684 1940 fake_tf.outer = state;
1941 fake_tf.region = this_region;
75a70cf9 1942 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
4ee9c684 1943 fake_tf.may_throw = true;
1944
4ee9c684 1945 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1946
1947 if (fake_tf.may_throw)
1948 {
1949 /* In this case honor_protect_cleanup_actions had nothing to do,
1950 and we should process this normally. */
e3a19533 1951 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
e38def9c 1952 result = frob_into_branch_around (tp, this_region,
1953 fake_tf.fallthru_label);
4ee9c684 1954 }
1955 else
1956 {
1957 /* In this case honor_protect_cleanup_actions did nearly all of
1958 the work. All we have left is to append the fallthru_label. */
1959
75a70cf9 1960 result = gimple_try_eval (tp);
4ee9c684 1961 if (fake_tf.fallthru_label)
1962 {
42acab1c 1963 gimple *x = gimple_build_label (fake_tf.fallthru_label);
75a70cf9 1964 gimple_seq_add_stmt (&result, x);
4ee9c684 1965 }
1966 }
75a70cf9 1967 return result;
4ee9c684 1968}
1969
e38def9c 1970/* Main loop for lowering eh constructs. Also moves gsi to the next
75a70cf9 1971 statement. */
4ee9c684 1972
1973static void
75a70cf9 1974lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
4ee9c684 1975{
75a70cf9 1976 gimple_seq replace;
42acab1c 1977 gimple *x;
1978 gimple *stmt = gsi_stmt (*gsi);
4ee9c684 1979
75a70cf9 1980 switch (gimple_code (stmt))
4ee9c684 1981 {
75a70cf9 1982 case GIMPLE_CALL:
e38def9c 1983 {
1984 tree fndecl = gimple_call_fndecl (stmt);
1985 tree rhs, lhs;
1986
a0e9bfbb 1987 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
e38def9c 1988 switch (DECL_FUNCTION_CODE (fndecl))
1989 {
1990 case BUILT_IN_EH_POINTER:
1991 /* The front end may have generated a call to
1992 __builtin_eh_pointer (0) within a catch region. Replace
1993 this zero argument with the current catch region number. */
1994 if (state->ehp_region)
1995 {
bad12c62 1996 tree nr = build_int_cst (integer_type_node,
1997 state->ehp_region->index);
e38def9c 1998 gimple_call_set_arg (stmt, 0, nr);
1999 }
2000 else
2001 {
2002 /* The user has dome something silly. Remove it. */
2512209b 2003 rhs = null_pointer_node;
e38def9c 2004 goto do_replace;
2005 }
2006 break;
2007
2008 case BUILT_IN_EH_FILTER:
2009 /* ??? This should never appear, but since it's a builtin it
2010 is accessible to abuse by users. Just remove it and
2011 replace the use with the arbitrary value zero. */
2012 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2013 do_replace:
2014 lhs = gimple_call_lhs (stmt);
2015 x = gimple_build_assign (lhs, rhs);
2016 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2017 /* FALLTHRU */
2018
2019 case BUILT_IN_EH_COPY_VALUES:
2020 /* Likewise this should not appear. Remove it. */
2021 gsi_remove (gsi, true);
2022 return;
2023
2024 default:
2025 break;
2026 }
2027 }
2028 /* FALLTHRU */
2029
75a70cf9 2030 case GIMPLE_ASSIGN:
580f57ec 2031 /* If the stmt can throw, use a new temporary for the assignment
47f11e84 2032 to a LHS. This makes sure the old value of the LHS is
fa916956 2033 available on the EH edge. Only do so for statements that
9d75589a 2034 potentially fall through (no noreturn calls e.g.), otherwise
fa916956 2035 this new assignment might create fake fallthru regions. */
aac19106 2036 if (stmt_could_throw_p (cfun, stmt)
47f11e84 2037 && gimple_has_lhs (stmt)
fa916956 2038 && gimple_stmt_may_fallthru (stmt)
47f11e84 2039 && !tree_could_throw_p (gimple_get_lhs (stmt))
2040 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2041 {
2042 tree lhs = gimple_get_lhs (stmt);
f9e245b2 2043 tree tmp = create_tmp_var (TREE_TYPE (lhs));
42acab1c 2044 gimple *s = gimple_build_assign (lhs, tmp);
47f11e84 2045 gimple_set_location (s, gimple_location (stmt));
2046 gimple_set_block (s, gimple_block (stmt));
2047 gimple_set_lhs (stmt, tmp);
2048 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2049 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2050 DECL_GIMPLE_REG_P (tmp) = 1;
2051 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2052 }
4ee9c684 2053 /* Look for things that can throw exceptions, and record them. */
aac19106 2054 if (state->cur_region && stmt_could_throw_p (cfun, stmt))
4ee9c684 2055 {
75a70cf9 2056 record_stmt_eh_region (state->cur_region, stmt);
4ee9c684 2057 note_eh_region_may_contain_throw (state->cur_region);
4ee9c684 2058 }
2059 break;
2060
75a70cf9 2061 case GIMPLE_COND:
2062 case GIMPLE_GOTO:
2063 case GIMPLE_RETURN:
2064 maybe_record_in_goto_queue (state, stmt);
4ee9c684 2065 break;
2066
75a70cf9 2067 case GIMPLE_SWITCH:
1a91d914 2068 verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
4ee9c684 2069 break;
2070
75a70cf9 2071 case GIMPLE_TRY:
1a91d914 2072 {
2073 gtry *try_stmt = as_a <gtry *> (stmt);
2074 if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2075 replace = lower_try_finally (state, try_stmt);
2076 else
2077 {
2078 x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
2079 if (!x)
c90b5d40 2080 {
1a91d914 2081 replace = gimple_try_eval (try_stmt);
2082 lower_eh_constructs_1 (state, &replace);
2083 }
2084 else
2085 switch (gimple_code (x))
2086 {
c90b5d40 2087 case GIMPLE_CATCH:
1a91d914 2088 replace = lower_catch (state, try_stmt);
2089 break;
c90b5d40 2090 case GIMPLE_EH_FILTER:
1a91d914 2091 replace = lower_eh_filter (state, try_stmt);
2092 break;
c90b5d40 2093 case GIMPLE_EH_MUST_NOT_THROW:
1a91d914 2094 replace = lower_eh_must_not_throw (state, try_stmt);
2095 break;
4c0315d0 2096 case GIMPLE_EH_ELSE:
1a91d914 2097 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2098 gcc_unreachable ();
c90b5d40 2099 default:
1a91d914 2100 replace = lower_cleanup (state, try_stmt);
2101 break;
2102 }
2103 }
2104 }
75a70cf9 2105
2106 /* Remove the old stmt and insert the transformed sequence
2107 instead. */
2108 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2109 gsi_remove (gsi, true);
2110
2111 /* Return since we don't want gsi_next () */
2112 return;
4ee9c684 2113
4c0315d0 2114 case GIMPLE_EH_ELSE:
2115 /* We should be eliminating this in lower_try_finally et al. */
2116 gcc_unreachable ();
2117
4ee9c684 2118 default:
2119 /* A type, a decl, or some kind of statement that we're not
2120 interested in. Don't walk them. */
2121 break;
2122 }
75a70cf9 2123
2124 gsi_next (gsi);
2125}
2126
2127/* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2128
2129static void
e3a19533 2130lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
75a70cf9 2131{
2132 gimple_stmt_iterator gsi;
e3a19533 2133 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
75a70cf9 2134 lower_eh_constructs_2 (state, &gsi);
4ee9c684 2135}
2136
7620bc82 2137namespace {
2138
2139const pass_data pass_data_lower_eh =
65b0537f 2140{
2141 GIMPLE_PASS, /* type */
2142 "eh", /* name */
2143 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 2144 TV_TREE_EH, /* tv_id */
2145 PROP_gimple_lcf, /* properties_required */
2146 PROP_gimple_leh, /* properties_provided */
2147 0, /* properties_destroyed */
2148 0, /* todo_flags_start */
2149 0, /* todo_flags_finish */
2150};
2151
7620bc82 2152class pass_lower_eh : public gimple_opt_pass
65b0537f 2153{
2154public:
2155 pass_lower_eh (gcc::context *ctxt)
2156 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2157 {}
2158
2159 /* opt_pass methods: */
2160 virtual unsigned int execute (function *);
2161
2162}; // class pass_lower_eh
2163
2164unsigned int
2165pass_lower_eh::execute (function *fun)
4ee9c684 2166{
2167 struct leh_state null_state;
e38def9c 2168 gimple_seq bodyp;
75a70cf9 2169
e38def9c 2170 bodyp = gimple_body (current_function_decl);
2171 if (bodyp == NULL)
2172 return 0;
4ee9c684 2173
c1f445d2 2174 finally_tree = new hash_table<finally_tree_hasher> (31);
55d6d4e4 2175 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
e38def9c 2176 memset (&null_state, 0, sizeof (null_state));
4ee9c684 2177
75a70cf9 2178 collect_finally_tree_1 (bodyp, NULL);
e3a19533 2179 lower_eh_constructs_1 (&null_state, &bodyp);
2180 gimple_set_body (current_function_decl, bodyp);
4ee9c684 2181
e38def9c 2182 /* We assume there's a return statement, or something, at the end of
2183 the function, and thus ploping the EH sequence afterward won't
2184 change anything. */
2185 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2186 gimple_seq_add_seq (&bodyp, eh_seq);
2187
2188 /* We assume that since BODYP already existed, adding EH_SEQ to it
2189 didn't change its value, and we don't have to re-set the function. */
2190 gcc_assert (bodyp == gimple_body (current_function_decl));
4ee9c684 2191
c1f445d2 2192 delete finally_tree;
2193 finally_tree = NULL;
55d6d4e4 2194 BITMAP_FREE (eh_region_may_contain_throw_map);
e38def9c 2195 eh_seq = NULL;
58d82cd0 2196
2197 /* If this function needs a language specific EH personality routine
2198 and the frontend didn't already set one do so now. */
65b0537f 2199 if (function_needs_eh_personality (fun) == eh_personality_lang
58d82cd0 2200 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2201 DECL_FUNCTION_PERSONALITY (current_function_decl)
2202 = lang_hooks.eh_personality ();
2203
2a1990e9 2204 return 0;
4ee9c684 2205}
2206
7620bc82 2207} // anon namespace
2208
cbe8bda8 2209gimple_opt_pass *
2210make_pass_lower_eh (gcc::context *ctxt)
2211{
2212 return new pass_lower_eh (ctxt);
2213}
4ee9c684 2214\f
e38def9c 2215/* Create the multiple edges from an EH_DISPATCH statement to all of
2216 the possible handlers for its EH region. Return true if there's
2217 no fallthru edge; false if there is. */
4ee9c684 2218
e38def9c 2219bool
1a91d914 2220make_eh_dispatch_edges (geh_dispatch *stmt)
4ee9c684 2221{
e38def9c 2222 eh_region r;
2223 eh_catch c;
4ee9c684 2224 basic_block src, dst;
2225
e38def9c 2226 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
75a70cf9 2227 src = gimple_bb (stmt);
4ee9c684 2228
e38def9c 2229 switch (r->type)
2230 {
2231 case ERT_TRY:
2232 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2233 {
0fb4f2ce 2234 dst = label_to_block (cfun, c->label);
e38def9c 2235 make_edge (src, dst, 0);
ac13e8d9 2236
e38def9c 2237 /* A catch-all handler doesn't have a fallthru. */
2238 if (c->type_list == NULL)
2239 return false;
2240 }
2241 break;
a5bfef5b 2242
e38def9c 2243 case ERT_ALLOWED_EXCEPTIONS:
0fb4f2ce 2244 dst = label_to_block (cfun, r->u.allowed.label);
e38def9c 2245 make_edge (src, dst, 0);
2246 break;
2247
2248 default:
2249 gcc_unreachable ();
2250 }
2251
2252 return true;
a5bfef5b 2253}
2254
e38def9c 2255/* Create the single EH edge from STMT to its nearest landing pad,
2256 if there is such a landing pad within the current function. */
2257
4ee9c684 2258void
42acab1c 2259make_eh_edges (gimple *stmt)
4ee9c684 2260{
e38def9c 2261 basic_block src, dst;
2262 eh_landing_pad lp;
2263 int lp_nr;
4ee9c684 2264
e38def9c 2265 lp_nr = lookup_stmt_eh_lp (stmt);
2266 if (lp_nr <= 0)
2267 return;
4ee9c684 2268
e38def9c 2269 lp = get_eh_landing_pad_from_number (lp_nr);
2270 gcc_assert (lp != NULL);
d6d5ab2d 2271
e38def9c 2272 src = gimple_bb (stmt);
0fb4f2ce 2273 dst = label_to_block (cfun, lp->post_landing_pad);
e38def9c 2274 make_edge (src, dst, EDGE_EH);
4ee9c684 2275}
2276
e38def9c 2277/* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2278 do not actually perform the final edge redirection.
927a6b6b 2279
e38def9c 2280 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2281 we intend to change the destination EH region as well; this means
2282 EH_LANDING_PAD_NR must already be set on the destination block label.
2283 If false, we're being called from generic cfg manipulation code and we
2284 should preserve our place within the region tree. */
2285
2286static void
2287redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
927a6b6b 2288{
e38def9c 2289 eh_landing_pad old_lp, new_lp;
2290 basic_block old_bb;
42acab1c 2291 gimple *throw_stmt;
e38def9c 2292 int old_lp_nr, new_lp_nr;
2293 tree old_label, new_label;
2294 edge_iterator ei;
2295 edge e;
2296
2297 old_bb = edge_in->dest;
2298 old_label = gimple_block_label (old_bb);
2299 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2300 gcc_assert (old_lp_nr > 0);
2301 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2302
2303 throw_stmt = last_stmt (edge_in->src);
2304 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2305
2306 new_label = gimple_block_label (new_bb);
927a6b6b 2307
e38def9c 2308 /* Look for an existing region that might be using NEW_BB already. */
2309 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2310 if (new_lp_nr)
927a6b6b 2311 {
e38def9c 2312 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2313 gcc_assert (new_lp);
48e1416a 2314
e38def9c 2315 /* Unless CHANGE_REGION is true, the new and old landing pad
2316 had better be associated with the same EH region. */
2317 gcc_assert (change_region || new_lp->region == old_lp->region);
927a6b6b 2318 }
2319 else
2320 {
e38def9c 2321 new_lp = NULL;
2322 gcc_assert (!change_region);
927a6b6b 2323 }
2324
e38def9c 2325 /* Notice when we redirect the last EH edge away from OLD_BB. */
2326 FOR_EACH_EDGE (e, ei, old_bb->preds)
2327 if (e != edge_in && (e->flags & EDGE_EH))
2328 break;
b4ba5e9d 2329
e38def9c 2330 if (new_lp)
b4ba5e9d 2331 {
e38def9c 2332 /* NEW_LP already exists. If there are still edges into OLD_LP,
2333 there's nothing to do with the EH tree. If there are no more
2334 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2335 If CHANGE_REGION is true, then our caller is expecting to remove
2336 the landing pad. */
2337 if (e == NULL && !change_region)
2338 remove_eh_landing_pad (old_lp);
b4ba5e9d 2339 }
e38def9c 2340 else
b4ba5e9d 2341 {
e38def9c 2342 /* No correct landing pad exists. If there are no more edges
2343 into OLD_LP, then we can simply re-use the existing landing pad.
2344 Otherwise, we have to create a new landing pad. */
2345 if (e == NULL)
2346 {
2347 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2348 new_lp = old_lp;
2349 }
2350 else
2351 new_lp = gen_eh_landing_pad (old_lp->region);
2352 new_lp->post_landing_pad = new_label;
2353 EH_LANDING_PAD_NR (new_label) = new_lp->index;
b4ba5e9d 2354 }
e38def9c 2355
2356 /* Maybe move the throwing statement to the new region. */
2357 if (old_lp != new_lp)
b4ba5e9d 2358 {
e38def9c 2359 remove_stmt_from_eh_lp (throw_stmt);
2360 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
b4ba5e9d 2361 }
b4ba5e9d 2362}
2363
e38def9c 2364/* Redirect EH edge E to NEW_BB. */
75a70cf9 2365
e38def9c 2366edge
2367redirect_eh_edge (edge edge_in, basic_block new_bb)
b4ba5e9d 2368{
e38def9c 2369 redirect_eh_edge_1 (edge_in, new_bb, false);
2370 return ssa_redirect_edge (edge_in, new_bb);
2371}
b4ba5e9d 2372
e38def9c 2373/* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2374 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2375 The actual edge update will happen in the caller. */
b4ba5e9d 2376
e38def9c 2377void
1a91d914 2378redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
e38def9c 2379{
2380 tree new_lab = gimple_block_label (new_bb);
2381 bool any_changed = false;
2382 basic_block old_bb;
2383 eh_region r;
2384 eh_catch c;
2385
2386 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2387 switch (r->type)
b4ba5e9d 2388 {
e38def9c 2389 case ERT_TRY:
2390 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
b4ba5e9d 2391 {
0fb4f2ce 2392 old_bb = label_to_block (cfun, c->label);
e38def9c 2393 if (old_bb == e->dest)
2394 {
2395 c->label = new_lab;
2396 any_changed = true;
2397 }
b4ba5e9d 2398 }
e38def9c 2399 break;
2400
2401 case ERT_ALLOWED_EXCEPTIONS:
0fb4f2ce 2402 old_bb = label_to_block (cfun, r->u.allowed.label);
e38def9c 2403 gcc_assert (old_bb == e->dest);
2404 r->u.allowed.label = new_lab;
2405 any_changed = true;
2406 break;
2407
2408 default:
2409 gcc_unreachable ();
b4ba5e9d 2410 }
75a70cf9 2411
e38def9c 2412 gcc_assert (any_changed);
b4ba5e9d 2413}
4ee9c684 2414\f
75a70cf9 2415/* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2416
2ac47fdf 2417bool
75a70cf9 2418operation_could_trap_helper_p (enum tree_code op,
2419 bool fp_operation,
2420 bool honor_trapv,
2421 bool honor_nans,
2422 bool honor_snans,
2423 tree divisor,
2424 bool *handled)
2425{
2426 *handled = true;
2427 switch (op)
2428 {
2429 case TRUNC_DIV_EXPR:
2430 case CEIL_DIV_EXPR:
2431 case FLOOR_DIV_EXPR:
2432 case ROUND_DIV_EXPR:
2433 case EXACT_DIV_EXPR:
2434 case CEIL_MOD_EXPR:
2435 case FLOOR_MOD_EXPR:
2436 case ROUND_MOD_EXPR:
2437 case TRUNC_MOD_EXPR:
2438 case RDIV_EXPR:
a7775217 2439 if (honor_snans)
75a70cf9 2440 return true;
2441 if (fp_operation)
2442 return flag_trapping_math;
2443 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2444 return true;
2445 return false;
2446
2447 case LT_EXPR:
2448 case LE_EXPR:
2449 case GT_EXPR:
2450 case GE_EXPR:
2451 case LTGT_EXPR:
2452 /* Some floating point comparisons may trap. */
2453 return honor_nans;
2454
2455 case EQ_EXPR:
2456 case NE_EXPR:
2457 case UNORDERED_EXPR:
2458 case ORDERED_EXPR:
2459 case UNLT_EXPR:
2460 case UNLE_EXPR:
2461 case UNGT_EXPR:
2462 case UNGE_EXPR:
2463 case UNEQ_EXPR:
2464 return honor_snans;
2465
75a70cf9 2466 case NEGATE_EXPR:
2467 case ABS_EXPR:
2468 case CONJ_EXPR:
2469 /* These operations don't trap with floating point. */
2470 if (honor_trapv)
2471 return true;
2472 return false;
2473
1c67942e 2474 case ABSU_EXPR:
2475 /* ABSU_EXPR never traps. */
2476 return false;
2477
75a70cf9 2478 case PLUS_EXPR:
2479 case MINUS_EXPR:
2480 case MULT_EXPR:
2481 /* Any floating arithmetic may trap. */
2482 if (fp_operation && flag_trapping_math)
2483 return true;
2484 if (honor_trapv)
2485 return true;
2486 return false;
2487
aa9d6f35 2488 case COMPLEX_EXPR:
2489 case CONSTRUCTOR:
2490 /* Constructing an object cannot trap. */
2491 return false;
2492
75a70cf9 2493 default:
2494 /* Any floating arithmetic may trap. */
2495 if (fp_operation && flag_trapping_math)
2496 return true;
2497
2498 *handled = false;
2499 return false;
2500 }
2501}
2502
2503/* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2504 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2505 type operands that may trap. If OP is a division operator, DIVISOR contains
2506 the value of the divisor. */
2507
2508bool
2509operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2510 tree divisor)
2511{
2512 bool honor_nans = (fp_operation && flag_trapping_math
2513 && !flag_finite_math_only);
2514 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2515 bool handled;
2516
2517 if (TREE_CODE_CLASS (op) != tcc_comparison
2518 && TREE_CODE_CLASS (op) != tcc_unary
143c3c9a 2519 && TREE_CODE_CLASS (op) != tcc_binary)
75a70cf9 2520 return false;
2521
2522 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2523 honor_nans, honor_snans, divisor,
2524 &handled);
2525}
2526
0e80b01d 2527
2528/* Returns true if it is possible to prove that the index of
2529 an array access REF (an ARRAY_REF expression) falls into the
2530 array bounds. */
2531
2532static bool
2533in_array_bounds_p (tree ref)
2534{
2535 tree idx = TREE_OPERAND (ref, 1);
2536 tree min, max;
2537
2538 if (TREE_CODE (idx) != INTEGER_CST)
2539 return false;
2540
2541 min = array_ref_low_bound (ref);
2542 max = array_ref_up_bound (ref);
2543 if (!min
2544 || !max
2545 || TREE_CODE (min) != INTEGER_CST
2546 || TREE_CODE (max) != INTEGER_CST)
2547 return false;
2548
2549 if (tree_int_cst_lt (idx, min)
2550 || tree_int_cst_lt (max, idx))
2551 return false;
2552
2553 return true;
2554}
2555
2556/* Returns true if it is possible to prove that the range of
2557 an array access REF (an ARRAY_RANGE_REF expression) falls
2558 into the array bounds. */
2559
2560static bool
2561range_in_array_bounds_p (tree ref)
2562{
2563 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2564 tree range_min, range_max, min, max;
2565
2566 range_min = TYPE_MIN_VALUE (domain_type);
2567 range_max = TYPE_MAX_VALUE (domain_type);
2568 if (!range_min
2569 || !range_max
2570 || TREE_CODE (range_min) != INTEGER_CST
2571 || TREE_CODE (range_max) != INTEGER_CST)
2572 return false;
2573
2574 min = array_ref_low_bound (ref);
2575 max = array_ref_up_bound (ref);
2576 if (!min
2577 || !max
2578 || TREE_CODE (min) != INTEGER_CST
2579 || TREE_CODE (max) != INTEGER_CST)
2580 return false;
2581
2582 if (tree_int_cst_lt (range_min, min)
2583 || tree_int_cst_lt (max, range_max))
2584 return false;
2585
2586 return true;
2587}
2588
75a70cf9 2589/* Return true if EXPR can trap, as in dereferencing an invalid pointer
35c15734 2590 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2591 This routine expects only GIMPLE lhs or rhs input. */
4ee9c684 2592
2593bool
2594tree_could_trap_p (tree expr)
2595{
75a70cf9 2596 enum tree_code code;
35c15734 2597 bool fp_operation = false;
db97ad41 2598 bool honor_trapv = false;
75a70cf9 2599 tree t, base, div = NULL_TREE;
4ee9c684 2600
75a70cf9 2601 if (!expr)
2602 return false;
e38def9c 2603
75a70cf9 2604 code = TREE_CODE (expr);
2605 t = TREE_TYPE (expr);
2606
2607 if (t)
35c15734 2608 {
7076cb5d 2609 if (COMPARISON_CLASS_P (expr))
2610 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2611 else
2612 fp_operation = FLOAT_TYPE_P (t);
75a70cf9 2613 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
35c15734 2614 }
2615
75a70cf9 2616 if (TREE_CODE_CLASS (code) == tcc_binary)
2617 div = TREE_OPERAND (expr, 1);
2618 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2619 return true;
2620
80f06481 2621 restart:
4ee9c684 2622 switch (code)
2623 {
4ee9c684 2624 case COMPONENT_REF:
2625 case REALPART_EXPR:
2626 case IMAGPART_EXPR:
2627 case BIT_FIELD_REF:
26d2ad79 2628 case VIEW_CONVERT_EXPR:
80f06481 2629 case WITH_SIZE_EXPR:
2630 expr = TREE_OPERAND (expr, 0);
2631 code = TREE_CODE (expr);
2632 goto restart;
7d23383d 2633
2634 case ARRAY_RANGE_REF:
2100c228 2635 base = TREE_OPERAND (expr, 0);
2636 if (tree_could_trap_p (base))
7d23383d 2637 return true;
2100c228 2638 if (TREE_THIS_NOTRAP (expr))
2639 return false;
2100c228 2640 return !range_in_array_bounds_p (expr);
7d23383d 2641
2642 case ARRAY_REF:
2643 base = TREE_OPERAND (expr, 0);
7d23383d 2644 if (tree_could_trap_p (base))
2645 return true;
7d23383d 2646 if (TREE_THIS_NOTRAP (expr))
2647 return false;
7d23383d 2648 return !in_array_bounds_p (expr);
4ee9c684 2649
cdf2f9e4 2650 case TARGET_MEM_REF:
182cf5a9 2651 case MEM_REF:
cdf2f9e4 2652 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2653 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2654 return true;
2655 if (TREE_THIS_NOTRAP (expr))
182cf5a9 2656 return false;
cdf2f9e4 2657 /* We cannot prove that the access is in-bounds when we have
2658 variable-index TARGET_MEM_REFs. */
2659 if (code == TARGET_MEM_REF
2660 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2661 return true;
2662 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2663 {
2664 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
90ca1268 2665 poly_offset_int off = mem_ref_offset (expr);
2666 if (maybe_lt (off, 0))
cdf2f9e4 2667 return true;
2668 if (TREE_CODE (base) == STRING_CST)
90ca1268 2669 return maybe_le (TREE_STRING_LENGTH (base), off);
2670 tree size = DECL_SIZE_UNIT (base);
2671 if (size == NULL_TREE
2672 || !poly_int_tree_p (size)
2673 || maybe_le (wi::to_poly_offset (size), off))
cdf2f9e4 2674 return true;
2675 /* Now we are sure the first byte of the access is inside
2676 the object. */
2677 return false;
2678 }
2679 return true;
2680
4ee9c684 2681 case INDIRECT_REF:
35c15734 2682 return !TREE_THIS_NOTRAP (expr);
2683
2684 case ASM_EXPR:
2685 return TREE_THIS_VOLATILE (expr);
010d0641 2686
75a70cf9 2687 case CALL_EXPR:
2688 t = get_callee_fndecl (expr);
2689 /* Assume that calls to weak functions may trap. */
7e49f1a1 2690 if (!t || !DECL_P (t))
35c15734 2691 return true;
7e49f1a1 2692 if (DECL_WEAK (t))
2693 return tree_could_trap_p (t);
2694 return false;
2695
2696 case FUNCTION_DECL:
2697 /* Assume that accesses to weak functions may trap, unless we know
2698 they are certainly defined in current TU or in some other
2699 LTO partition. */
9427cf46 2700 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
7e49f1a1 2701 {
9427cf46 2702 cgraph_node *node = cgraph_node::get (expr);
2703 if (node)
2704 node = node->function_symbol ();
2705 return !(node && node->in_other_partition);
7e49f1a1 2706 }
2707 return false;
2708
2709 case VAR_DECL:
2710 /* Assume that accesses to weak vars may trap, unless we know
2711 they are certainly defined in current TU or in some other
2712 LTO partition. */
9427cf46 2713 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
7e49f1a1 2714 {
9427cf46 2715 varpool_node *node = varpool_node::get (expr);
2716 if (node)
2717 node = node->ultimate_alias_target ();
2718 return !(node && node->in_other_partition);
7e49f1a1 2719 }
35c15734 2720 return false;
2721
75a70cf9 2722 default:
2723 return false;
2724 }
2725}
35c15734 2726
04936b7c 2727/* Return non-NULL if there is an integer operation with trapping overflow
2728 we can rewrite into non-trapping. Called via walk_tree from
2729 rewrite_to_non_trapping_overflow. */
2730
2731static tree
2732find_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2733{
2734 if (EXPR_P (*tp)
c0ea47cb 2735 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (*tp))
04936b7c 2736 && !operation_no_trapping_overflow (TREE_TYPE (*tp), TREE_CODE (*tp)))
2737 return *tp;
2738 if (IS_TYPE_OR_DECL_P (*tp)
2739 || (TREE_CODE (*tp) == SAVE_EXPR && data == NULL))
2740 *walk_subtrees = 0;
2741 return NULL_TREE;
2742}
2743
2744/* Rewrite selected operations into unsigned arithmetics, so that they
2745 don't trap on overflow. */
2746
2747static tree
2748replace_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2749{
2750 if (find_trapping_overflow (tp, walk_subtrees, data))
2751 {
2752 tree type = TREE_TYPE (*tp);
2753 tree utype = unsigned_type_for (type);
2754 *walk_subtrees = 0;
2755 int len = TREE_OPERAND_LENGTH (*tp);
2756 for (int i = 0; i < len; ++i)
2757 walk_tree (&TREE_OPERAND (*tp, i), replace_trapping_overflow,
2758 data, (hash_set<tree> *) data);
2759
2760 if (TREE_CODE (*tp) == ABS_EXPR)
2761 {
461f2ec7 2762 TREE_SET_CODE (*tp, ABSU_EXPR);
2763 TREE_TYPE (*tp) = utype;
2764 *tp = fold_convert (type, *tp);
04936b7c 2765 }
2766 else
2767 {
2768 TREE_TYPE (*tp) = utype;
2769 len = TREE_OPERAND_LENGTH (*tp);
2770 for (int i = 0; i < len; ++i)
2771 TREE_OPERAND (*tp, i)
2772 = fold_convert (utype, TREE_OPERAND (*tp, i));
2773 *tp = fold_convert (type, *tp);
2774 }
2775 }
2776 return NULL_TREE;
2777}
2778
2779/* If any subexpression of EXPR can trap due to -ftrapv, rewrite it
2780 using unsigned arithmetics to avoid traps in it. */
2781
2782tree
2783rewrite_to_non_trapping_overflow (tree expr)
2784{
2785 if (!flag_trapv)
2786 return expr;
2787 hash_set<tree> pset;
2788 if (!walk_tree (&expr, find_trapping_overflow, &pset, &pset))
2789 return expr;
2790 expr = unshare_expr (expr);
2791 pset.empty ();
2792 walk_tree (&expr, replace_trapping_overflow, &pset, &pset);
2793 return expr;
2794}
35c15734 2795
75a70cf9 2796/* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2797 an assignment or a conditional) may throw. */
35c15734 2798
75a70cf9 2799static bool
46b24e8b 2800stmt_could_throw_1_p (gassign *stmt)
75a70cf9 2801{
46b24e8b 2802 enum tree_code code = gimple_assign_rhs_code (stmt);
75a70cf9 2803 bool honor_nans = false;
2804 bool honor_snans = false;
2805 bool fp_operation = false;
2806 bool honor_trapv = false;
2807 tree t;
2808 size_t i;
2809 bool handled, ret;
db97ad41 2810
75a70cf9 2811 if (TREE_CODE_CLASS (code) == tcc_comparison
2812 || TREE_CODE_CLASS (code) == tcc_unary
143c3c9a 2813 || TREE_CODE_CLASS (code) == tcc_binary)
75a70cf9 2814 {
46b24e8b 2815 if (TREE_CODE_CLASS (code) == tcc_comparison)
25f48be0 2816 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
25f48be0 2817 else
2818 t = gimple_expr_type (stmt);
75a70cf9 2819 fp_operation = FLOAT_TYPE_P (t);
2820 if (fp_operation)
2821 {
2822 honor_nans = flag_trapping_math && !flag_finite_math_only;
2823 honor_snans = flag_signaling_nans != 0;
2824 }
2825 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2826 honor_trapv = true;
2827 }
2828
46b24e8b 2829 /* First check the LHS. */
2830 if (tree_could_trap_p (gimple_assign_lhs (stmt)))
2831 return true;
2832
75a70cf9 2833 /* Check if the main expression may trap. */
75a70cf9 2834 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
46b24e8b 2835 honor_nans, honor_snans,
2836 gimple_assign_rhs2 (stmt),
75a70cf9 2837 &handled);
2838 if (handled)
2839 return ret;
2840
2841 /* If the expression does not trap, see if any of the individual operands may
2842 trap. */
46b24e8b 2843 for (i = 1; i < gimple_num_ops (stmt); i++)
75a70cf9 2844 if (tree_could_trap_p (gimple_op (stmt, i)))
2845 return true;
2846
2847 return false;
2848}
2849
2850
aac19106 2851/* Return true if statement STMT within FUN could throw an exception. */
75a70cf9 2852
2853bool
aac19106 2854stmt_could_throw_p (function *fun, gimple *stmt)
75a70cf9 2855{
75a70cf9 2856 if (!flag_exceptions)
2857 return false;
2858
2859 /* The only statements that can throw an exception are assignments,
e38def9c 2860 conditionals, calls, resx, and asms. */
2861 switch (gimple_code (stmt))
2862 {
2863 case GIMPLE_RESX:
2864 return true;
75a70cf9 2865
e38def9c 2866 case GIMPLE_CALL:
1a91d914 2867 return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
75a70cf9 2868
e38def9c 2869 case GIMPLE_COND:
46b24e8b 2870 {
aac19106 2871 if (fun && !fun->can_throw_non_call_exceptions)
46b24e8b 2872 return false;
2873 gcond *cond = as_a <gcond *> (stmt);
2874 tree lhs = gimple_cond_lhs (cond);
2875 return operation_could_trap_p (gimple_cond_code (cond),
2876 FLOAT_TYPE_P (TREE_TYPE (lhs)),
2877 false, NULL_TREE);
2878 }
2879
2880 case GIMPLE_ASSIGN:
aac19106 2881 if ((fun && !fun->can_throw_non_call_exceptions)
46b24e8b 2882 || gimple_clobber_p (stmt))
e38def9c 2883 return false;
46b24e8b 2884 return stmt_could_throw_1_p (as_a <gassign *> (stmt));
75a70cf9 2885
e38def9c 2886 case GIMPLE_ASM:
aac19106 2887 if (fun && !fun->can_throw_non_call_exceptions)
e38def9c 2888 return false;
1a91d914 2889 return gimple_asm_volatile_p (as_a <gasm *> (stmt));
e38def9c 2890
2891 default:
2892 return false;
2893 }
4ee9c684 2894}
2895
75a70cf9 2896
2897/* Return true if expression T could throw an exception. */
2898
4ee9c684 2899bool
2900tree_could_throw_p (tree t)
2901{
2902 if (!flag_exceptions)
2903 return false;
75a70cf9 2904 if (TREE_CODE (t) == MODIFY_EXPR)
4ee9c684 2905 {
cbeb677e 2906 if (cfun->can_throw_non_call_exceptions
e38def9c 2907 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2908 return true;
75a70cf9 2909 t = TREE_OPERAND (t, 1);
4ee9c684 2910 }
2911
80f06481 2912 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2913 t = TREE_OPERAND (t, 0);
4ee9c684 2914 if (TREE_CODE (t) == CALL_EXPR)
2915 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
cbeb677e 2916 if (cfun->can_throw_non_call_exceptions)
3864ad30 2917 return tree_could_trap_p (t);
4ee9c684 2918 return false;
2919}
2920
aac19106 2921/* Return true if STMT can throw an exception that is not caught within its
2922 function FUN. FUN can be NULL but the function is extra conservative
2923 then. */
b5cebd44 2924
2925bool
aac19106 2926stmt_can_throw_external (function *fun, gimple *stmt)
b5cebd44 2927{
e38def9c 2928 int lp_nr;
b5cebd44 2929
aac19106 2930 if (!stmt_could_throw_p (fun, stmt))
b5cebd44 2931 return false;
aac19106 2932 if (!fun)
2933 return true;
b5cebd44 2934
aac19106 2935 lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
e38def9c 2936 return lp_nr == 0;
b5cebd44 2937}
75a70cf9 2938
aac19106 2939/* Return true if STMT can throw an exception that is caught within its
2940 function FUN. */
75a70cf9 2941
4ee9c684 2942bool
aac19106 2943stmt_can_throw_internal (function *fun, gimple *stmt)
4ee9c684 2944{
e38def9c 2945 int lp_nr;
75a70cf9 2946
aac19106 2947 gcc_checking_assert (fun);
2948 if (!stmt_could_throw_p (fun, stmt))
4ee9c684 2949 return false;
75a70cf9 2950
aac19106 2951 lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
e38def9c 2952 return lp_nr > 0;
2953}
2954
2955/* Given a statement STMT in IFUN, if STMT can no longer throw, then
2956 remove any entry it might have from the EH table. Return true if
2957 any change was made. */
2958
2959bool
42acab1c 2960maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
e38def9c 2961{
aac19106 2962 if (stmt_could_throw_p (ifun, stmt))
e38def9c 2963 return false;
2964 return remove_stmt_from_eh_lp_fn (ifun, stmt);
4ee9c684 2965}
2966
e38def9c 2967/* Likewise, but always use the current function. */
2968
2969bool
42acab1c 2970maybe_clean_eh_stmt (gimple *stmt)
e38def9c 2971{
2972 return maybe_clean_eh_stmt_fn (cfun, stmt);
2973}
4ee9c684 2974
4c27dd45 2975/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2976 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2977 in the table if it should be in there. Return TRUE if a replacement was
2978 done that my require an EH edge purge. */
2979
e38def9c 2980bool
42acab1c 2981maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt)
35c15734 2982{
e38def9c 2983 int lp_nr = lookup_stmt_eh_lp (old_stmt);
4c27dd45 2984
e38def9c 2985 if (lp_nr != 0)
4c27dd45 2986 {
aac19106 2987 bool new_stmt_could_throw = stmt_could_throw_p (cfun, new_stmt);
4c27dd45 2988
2989 if (new_stmt == old_stmt && new_stmt_could_throw)
2990 return false;
2991
e38def9c 2992 remove_stmt_from_eh_lp (old_stmt);
4c27dd45 2993 if (new_stmt_could_throw)
2994 {
e38def9c 2995 add_stmt_to_eh_lp (new_stmt, lp_nr);
4c27dd45 2996 return false;
2997 }
2998 else
2999 return true;
3000 }
3001
35c15734 3002 return false;
3003}
e38def9c 3004
9d75589a 3005/* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
e38def9c 3006 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
3007 operand is the return value of duplicate_eh_regions. */
3008
3009bool
42acab1c 3010maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt,
3011 struct function *old_fun, gimple *old_stmt,
06ecf488 3012 hash_map<void *, void *> *map,
3013 int default_lp_nr)
e38def9c 3014{
3015 int old_lp_nr, new_lp_nr;
e38def9c 3016
aac19106 3017 if (!stmt_could_throw_p (new_fun, new_stmt))
e38def9c 3018 return false;
3019
3020 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
3021 if (old_lp_nr == 0)
3022 {
3023 if (default_lp_nr == 0)
3024 return false;
3025 new_lp_nr = default_lp_nr;
3026 }
3027 else if (old_lp_nr > 0)
3028 {
3029 eh_landing_pad old_lp, new_lp;
3030
f1f41a6c 3031 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
06ecf488 3032 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
e38def9c 3033 new_lp_nr = new_lp->index;
3034 }
3035 else
3036 {
3037 eh_region old_r, new_r;
3038
f1f41a6c 3039 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
06ecf488 3040 new_r = static_cast<eh_region> (*map->get (old_r));
e38def9c 3041 new_lp_nr = -new_r->index;
3042 }
3043
3044 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
3045 return true;
3046}
3047
3048/* Similar, but both OLD_STMT and NEW_STMT are within the current function,
3049 and thus no remapping is required. */
3050
3051bool
42acab1c 3052maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt)
e38def9c 3053{
3054 int lp_nr;
3055
aac19106 3056 if (!stmt_could_throw_p (cfun, new_stmt))
e38def9c 3057 return false;
3058
3059 lp_nr = lookup_stmt_eh_lp (old_stmt);
3060 if (lp_nr == 0)
3061 return false;
3062
3063 add_stmt_to_eh_lp (new_stmt, lp_nr);
3064 return true;
3065}
4888ab9a 3066\f
75a70cf9 3067/* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
3068 GIMPLE_TRY) that are similar enough to be considered the same. Currently
3069 this only handles handlers consisting of a single call, as that's the
3070 important case for C++: a destructor call for a particular object showing
3071 up in multiple handlers. */
4888ab9a 3072
3073static bool
75a70cf9 3074same_handler_p (gimple_seq oneh, gimple_seq twoh)
4888ab9a 3075{
75a70cf9 3076 gimple_stmt_iterator gsi;
42acab1c 3077 gimple *ones, *twos;
75a70cf9 3078 unsigned int ai;
4888ab9a 3079
75a70cf9 3080 gsi = gsi_start (oneh);
3081 if (!gsi_one_before_end_p (gsi))
4888ab9a 3082 return false;
75a70cf9 3083 ones = gsi_stmt (gsi);
4888ab9a 3084
75a70cf9 3085 gsi = gsi_start (twoh);
3086 if (!gsi_one_before_end_p (gsi))
4888ab9a 3087 return false;
75a70cf9 3088 twos = gsi_stmt (gsi);
3089
3090 if (!is_gimple_call (ones)
3091 || !is_gimple_call (twos)
3092 || gimple_call_lhs (ones)
3093 || gimple_call_lhs (twos)
3094 || gimple_call_chain (ones)
3095 || gimple_call_chain (twos)
fb049fba 3096 || !gimple_call_same_target_p (ones, twos)
75a70cf9 3097 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
4888ab9a 3098 return false;
3099
75a70cf9 3100 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3101 if (!operand_equal_p (gimple_call_arg (ones, ai),
e38def9c 3102 gimple_call_arg (twos, ai), 0))
4888ab9a 3103 return false;
3104
3105 return true;
3106}
3107
3108/* Optimize
3109 try { A() } finally { try { ~B() } catch { ~A() } }
3110 try { ... } finally { ~A() }
3111 into
3112 try { A() } catch { ~B() }
3113 try { ~B() ... } finally { ~A() }
3114
3115 This occurs frequently in C++, where A is a local variable and B is a
3116 temporary used in the initializer for A. */
3117
3118static void
1a91d914 3119optimize_double_finally (gtry *one, gtry *two)
4888ab9a 3120{
42acab1c 3121 gimple *oneh;
75a70cf9 3122 gimple_stmt_iterator gsi;
e3a19533 3123 gimple_seq cleanup;
4888ab9a 3124
e3a19533 3125 cleanup = gimple_try_cleanup (one);
3126 gsi = gsi_start (cleanup);
75a70cf9 3127 if (!gsi_one_before_end_p (gsi))
4888ab9a 3128 return;
3129
75a70cf9 3130 oneh = gsi_stmt (gsi);
3131 if (gimple_code (oneh) != GIMPLE_TRY
3132 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
4888ab9a 3133 return;
3134
75a70cf9 3135 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
4888ab9a 3136 {
75a70cf9 3137 gimple_seq seq = gimple_try_eval (oneh);
4888ab9a 3138
75a70cf9 3139 gimple_try_set_cleanup (one, seq);
3140 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3141 seq = copy_gimple_seq_and_replace_locals (seq);
3142 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3143 gimple_try_set_eval (two, seq);
4888ab9a 3144 }
3145}
3146
3147/* Perform EH refactoring optimizations that are simpler to do when code
c7684b8e 3148 flow has been lowered but EH structures haven't. */
4888ab9a 3149
3150static void
75a70cf9 3151refactor_eh_r (gimple_seq seq)
4888ab9a 3152{
75a70cf9 3153 gimple_stmt_iterator gsi;
42acab1c 3154 gimple *one, *two;
4888ab9a 3155
75a70cf9 3156 one = NULL;
3157 two = NULL;
3158 gsi = gsi_start (seq);
3159 while (1)
3160 {
3161 one = two;
3162 if (gsi_end_p (gsi))
3163 two = NULL;
3164 else
3165 two = gsi_stmt (gsi);
1a91d914 3166 if (one && two)
3167 if (gtry *try_one = dyn_cast <gtry *> (one))
3168 if (gtry *try_two = dyn_cast <gtry *> (two))
3169 if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
3170 && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
3171 optimize_double_finally (try_one, try_two);
75a70cf9 3172 if (one)
3173 switch (gimple_code (one))
4888ab9a 3174 {
75a70cf9 3175 case GIMPLE_TRY:
3176 refactor_eh_r (gimple_try_eval (one));
3177 refactor_eh_r (gimple_try_cleanup (one));
3178 break;
3179 case GIMPLE_CATCH:
1a91d914 3180 refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
75a70cf9 3181 break;
3182 case GIMPLE_EH_FILTER:
3183 refactor_eh_r (gimple_eh_filter_failure (one));
3184 break;
4c0315d0 3185 case GIMPLE_EH_ELSE:
1a91d914 3186 {
3187 geh_else *eh_else_stmt = as_a <geh_else *> (one);
3188 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
3189 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
3190 }
4c0315d0 3191 break;
75a70cf9 3192 default:
3193 break;
4888ab9a 3194 }
75a70cf9 3195 if (two)
3196 gsi_next (&gsi);
3197 else
3198 break;
4888ab9a 3199 }
3200}
3201
7620bc82 3202namespace {
3203
3204const pass_data pass_data_refactor_eh =
4888ab9a 3205{
cbe8bda8 3206 GIMPLE_PASS, /* type */
3207 "ehopt", /* name */
3208 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 3209 TV_TREE_EH, /* tv_id */
3210 PROP_gimple_lcf, /* properties_required */
3211 0, /* properties_provided */
3212 0, /* properties_destroyed */
3213 0, /* todo_flags_start */
3214 0, /* todo_flags_finish */
4888ab9a 3215};
cbe8bda8 3216
7620bc82 3217class pass_refactor_eh : public gimple_opt_pass
cbe8bda8 3218{
3219public:
9af5ce0c 3220 pass_refactor_eh (gcc::context *ctxt)
3221 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
cbe8bda8 3222 {}
3223
3224 /* opt_pass methods: */
31315c24 3225 virtual bool gate (function *) { return flag_exceptions != 0; }
65b0537f 3226 virtual unsigned int execute (function *)
3227 {
3228 refactor_eh_r (gimple_body (current_function_decl));
3229 return 0;
3230 }
cbe8bda8 3231
3232}; // class pass_refactor_eh
3233
7620bc82 3234} // anon namespace
3235
cbe8bda8 3236gimple_opt_pass *
3237make_pass_refactor_eh (gcc::context *ctxt)
3238{
3239 return new pass_refactor_eh (ctxt);
3240}
e38def9c 3241\f
3242/* At the end of gimple optimization, we can lower RESX. */
4c5fcca6 3243
e38def9c 3244static bool
1a91d914 3245lower_resx (basic_block bb, gresx *stmt,
3246 hash_map<eh_region, tree> *mnt_map)
4c5fcca6 3247{
e38def9c 3248 int lp_nr;
3249 eh_region src_r, dst_r;
3250 gimple_stmt_iterator gsi;
42acab1c 3251 gimple *x;
e38def9c 3252 tree fn, src_nr;
3253 bool ret = false;
4c5fcca6 3254
e38def9c 3255 lp_nr = lookup_stmt_eh_lp (stmt);
3256 if (lp_nr != 0)
3257 dst_r = get_eh_region_from_lp_number (lp_nr);
3258 else
3259 dst_r = NULL;
4c5fcca6 3260
e38def9c 3261 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
e38def9c 3262 gsi = gsi_last_bb (bb);
4c5fcca6 3263
395fc2bb 3264 if (src_r == NULL)
3265 {
3266 /* We can wind up with no source region when pass_cleanup_eh shows
3267 that there are no entries into an eh region and deletes it, but
3268 then the block that contains the resx isn't removed. This can
3269 happen without optimization when the switch statement created by
3270 lower_try_finally_switch isn't simplified to remove the eh case.
3271
3272 Resolve this by expanding the resx node to an abort. */
3273
b9a16870 3274 fn = builtin_decl_implicit (BUILT_IN_TRAP);
395fc2bb 3275 x = gimple_build_call (fn, 0);
3276 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3277
3278 while (EDGE_COUNT (bb->succs) > 0)
3279 remove_edge (EDGE_SUCC (bb, 0));
3280 }
3281 else if (dst_r)
e38def9c 3282 {
3283 /* When we have a destination region, we resolve this by copying
3284 the excptr and filter values into place, and changing the edge
3285 to immediately after the landing pad. */
3286 edge e;
4c5fcca6 3287
e38def9c 3288 if (lp_nr < 0)
3289 {
3290 basic_block new_bb;
e38def9c 3291 tree lab;
3d1eacdb 3292
e38def9c 3293 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3294 the failure decl into a new block, if needed. */
3295 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
4c5fcca6 3296
06ecf488 3297 tree *slot = mnt_map->get (dst_r);
e38def9c 3298 if (slot == NULL)
3299 {
3300 gimple_stmt_iterator gsi2;
4c5fcca6 3301
e38def9c 3302 new_bb = create_empty_bb (bb);
205ce1aa 3303 new_bb->count = bb->count;
b3083327 3304 add_bb_to_loop (new_bb, bb->loop_father);
e38def9c 3305 lab = gimple_block_label (new_bb);
3306 gsi2 = gsi_start_bb (new_bb);
4c5fcca6 3307
e38def9c 3308 fn = dst_r->u.must_not_throw.failure_decl;
3309 x = gimple_build_call (fn, 0);
3310 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3311 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3bd82487 3312
06ecf488 3313 mnt_map->put (dst_r, lab);
e38def9c 3314 }
3315 else
3316 {
06ecf488 3317 lab = *slot;
0fb4f2ce 3318 new_bb = label_to_block (cfun, lab);
e38def9c 3319 }
4c5fcca6 3320
e38def9c 3321 gcc_assert (EDGE_COUNT (bb->succs) == 0);
720cfc43 3322 e = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
e38def9c 3323 }
3324 else
3325 {
3326 edge_iterator ei;
bad12c62 3327 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
4c5fcca6 3328
b9a16870 3329 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
bad12c62 3330 src_nr = build_int_cst (integer_type_node, src_r->index);
e38def9c 3331 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3332 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
4c5fcca6 3333
e38def9c 3334 /* Update the flags for the outgoing edge. */
3335 e = single_succ_edge (bb);
3336 gcc_assert (e->flags & EDGE_EH);
3337 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
720cfc43 3338 e->probability = profile_probability::always ();
4c5fcca6 3339
e38def9c 3340 /* If there are no more EH users of the landing pad, delete it. */
3341 FOR_EACH_EDGE (e, ei, e->dest->preds)
3342 if (e->flags & EDGE_EH)
3343 break;
3344 if (e == NULL)
3345 {
3346 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3347 remove_eh_landing_pad (lp);
3348 }
3349 }
4c5fcca6 3350
e38def9c 3351 ret = true;
3352 }
3353 else
3354 {
3355 tree var;
4c5fcca6 3356
e38def9c 3357 /* When we don't have a destination region, this exception escapes
3358 up the call chain. We resolve this by generating a call to the
3359 _Unwind_Resume library function. */
4c5fcca6 3360
471eff36 3361 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
61b3ef70 3362 with no arguments for C++. Check for that. */
471eff36 3363 if (src_r->use_cxa_end_cleanup)
3364 {
b9a16870 3365 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
471eff36 3366 x = gimple_build_call (fn, 0);
3367 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3368 }
3369 else
3bd82487 3370 {
b9a16870 3371 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
bad12c62 3372 src_nr = build_int_cst (integer_type_node, src_r->index);
e38def9c 3373 x = gimple_build_call (fn, 1, src_nr);
f9e245b2 3374 var = create_tmp_var (ptr_type_node);
e38def9c 3375 var = make_ssa_name (var, x);
3376 gimple_call_set_lhs (x, var);
3377 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3378
5b64e274 3379 /* When exception handling is delegated to a caller function, we
3380 have to guarantee that shadow memory variables living on stack
3381 will be cleaner before control is given to a parent function. */
3382 if (sanitize_flags_p (SANITIZE_ADDRESS))
3383 {
3384 tree decl
3385 = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3386 gimple *g = gimple_build_call (decl, 0);
3387 gimple_set_location (g, gimple_location (stmt));
3388 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3389 }
3390
b9a16870 3391 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
e38def9c 3392 x = gimple_build_call (fn, 1, var);
3393 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3bd82487 3394 }
4c5fcca6 3395
e38def9c 3396 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3bd82487 3397 }
3d1eacdb 3398
e38def9c 3399 gsi_remove (&gsi, true);
3400
3401 return ret;
3bd82487 3402}
3403
7620bc82 3404namespace {
3405
3406const pass_data pass_data_lower_resx =
3bd82487 3407{
cbe8bda8 3408 GIMPLE_PASS, /* type */
3409 "resx", /* name */
3410 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 3411 TV_TREE_EH, /* tv_id */
3412 PROP_gimple_lcf, /* properties_required */
3413 0, /* properties_provided */
3414 0, /* properties_destroyed */
3415 0, /* todo_flags_start */
8b88439e 3416 0, /* todo_flags_finish */
3bd82487 3417};
3418
7620bc82 3419class pass_lower_resx : public gimple_opt_pass
cbe8bda8 3420{
3421public:
9af5ce0c 3422 pass_lower_resx (gcc::context *ctxt)
3423 : gimple_opt_pass (pass_data_lower_resx, ctxt)
cbe8bda8 3424 {}
3425
3426 /* opt_pass methods: */
31315c24 3427 virtual bool gate (function *) { return flag_exceptions != 0; }
65b0537f 3428 virtual unsigned int execute (function *);
cbe8bda8 3429
3430}; // class pass_lower_resx
3431
65b0537f 3432unsigned
3433pass_lower_resx::execute (function *fun)
e38def9c 3434{
3435 basic_block bb;
e38def9c 3436 bool dominance_invalidated = false;
3437 bool any_rewritten = false;
3bd82487 3438
06ecf488 3439 hash_map<eh_region, tree> mnt_map;
3bd82487 3440
65b0537f 3441 FOR_EACH_BB_FN (bb, fun)
e38def9c 3442 {
42acab1c 3443 gimple *last = last_stmt (bb);
e38def9c 3444 if (last && is_gimple_resx (last))
3445 {
1a91d914 3446 dominance_invalidated |=
3447 lower_resx (bb, as_a <gresx *> (last), &mnt_map);
e38def9c 3448 any_rewritten = true;
3449 }
3450 }
3451
e38def9c 3452 if (dominance_invalidated)
3453 {
3454 free_dominance_info (CDI_DOMINATORS);
3455 free_dominance_info (CDI_POST_DOMINATORS);
3bd82487 3456 }
4c5fcca6 3457
e38def9c 3458 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3459}
4c5fcca6 3460
7620bc82 3461} // anon namespace
3462
cbe8bda8 3463gimple_opt_pass *
3464make_pass_lower_resx (gcc::context *ctxt)
3465{
3466 return new pass_lower_resx (ctxt);
3467}
3468
1227a337 3469/* Try to optimize var = {v} {CLOBBER} stmts followed just by
3470 external throw. */
3471
3472static void
3473optimize_clobbers (basic_block bb)
3474{
3475 gimple_stmt_iterator gsi = gsi_last_bb (bb);
896a0c42 3476 bool any_clobbers = false;
3477 bool seen_stack_restore = false;
3478 edge_iterator ei;
3479 edge e;
3480
3481 /* Only optimize anything if the bb contains at least one clobber,
3482 ends with resx (checked by caller), optionally contains some
3483 debug stmts or labels, or at most one __builtin_stack_restore
3484 call, and has an incoming EH edge. */
d1d905ee 3485 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
1227a337 3486 {
42acab1c 3487 gimple *stmt = gsi_stmt (gsi);
1227a337 3488 if (is_gimple_debug (stmt))
d1d905ee 3489 continue;
896a0c42 3490 if (gimple_clobber_p (stmt))
3491 {
3492 any_clobbers = true;
3493 continue;
3494 }
3495 if (!seen_stack_restore
3496 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3497 {
3498 seen_stack_restore = true;
3499 continue;
3500 }
3501 if (gimple_code (stmt) == GIMPLE_LABEL)
3502 break;
3503 return;
3504 }
3505 if (!any_clobbers)
3506 return;
3507 FOR_EACH_EDGE (e, ei, bb->preds)
3508 if (e->flags & EDGE_EH)
3509 break;
3510 if (e == NULL)
3511 return;
3512 gsi = gsi_last_bb (bb);
3513 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3514 {
42acab1c 3515 gimple *stmt = gsi_stmt (gsi);
896a0c42 3516 if (!gimple_clobber_p (stmt))
3517 continue;
1227a337 3518 unlink_stmt_vdef (stmt);
3519 gsi_remove (&gsi, true);
3520 release_defs (stmt);
3521 }
3522}
e38def9c 3523
07428872 3524/* Try to sink var = {v} {CLOBBER} stmts followed just by
3525 internal throw to successor BB. */
3526
3527static int
3528sink_clobbers (basic_block bb)
3529{
3530 edge e;
3531 edge_iterator ei;
3532 gimple_stmt_iterator gsi, dgsi;
3533 basic_block succbb;
3534 bool any_clobbers = false;
8aacb2c5 3535 unsigned todo = 0;
07428872 3536
3537 /* Only optimize if BB has a single EH successor and
3538 all predecessor edges are EH too. */
3539 if (!single_succ_p (bb)
3540 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3541 return 0;
3542
3543 FOR_EACH_EDGE (e, ei, bb->preds)
3544 {
3545 if ((e->flags & EDGE_EH) == 0)
3546 return 0;
3547 }
3548
3549 /* And BB contains only CLOBBER stmts before the final
3550 RESX. */
3551 gsi = gsi_last_bb (bb);
3552 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3553 {
42acab1c 3554 gimple *stmt = gsi_stmt (gsi);
07428872 3555 if (is_gimple_debug (stmt))
3556 continue;
3557 if (gimple_code (stmt) == GIMPLE_LABEL)
3558 break;
896a0c42 3559 if (!gimple_clobber_p (stmt))
07428872 3560 return 0;
3561 any_clobbers = true;
3562 }
3563 if (!any_clobbers)
3564 return 0;
3565
0ba38440 3566 edge succe = single_succ_edge (bb);
3567 succbb = succe->dest;
3568
3569 /* See if there is a virtual PHI node to take an updated virtual
3570 operand from. */
1a91d914 3571 gphi *vphi = NULL;
0ba38440 3572 tree vuse = NULL_TREE;
1a91d914 3573 for (gphi_iterator gpi = gsi_start_phis (succbb);
3574 !gsi_end_p (gpi); gsi_next (&gpi))
0ba38440 3575 {
1a91d914 3576 tree res = gimple_phi_result (gpi.phi ());
0ba38440 3577 if (virtual_operand_p (res))
3578 {
1a91d914 3579 vphi = gpi.phi ();
0ba38440 3580 vuse = res;
3581 break;
3582 }
3583 }
3584
07428872 3585 dgsi = gsi_after_labels (succbb);
3586 gsi = gsi_last_bb (bb);
3587 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3588 {
42acab1c 3589 gimple *stmt = gsi_stmt (gsi);
896a0c42 3590 tree lhs;
07428872 3591 if (is_gimple_debug (stmt))
3592 continue;
3593 if (gimple_code (stmt) == GIMPLE_LABEL)
3594 break;
896a0c42 3595 lhs = gimple_assign_lhs (stmt);
3596 /* Unfortunately we don't have dominance info updated at this
3597 point, so checking if
3598 dominated_by_p (CDI_DOMINATORS, succbb,
3599 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3600 would be too costly. Thus, avoid sinking any clobbers that
3601 refer to non-(D) SSA_NAMEs. */
3602 if (TREE_CODE (lhs) == MEM_REF
3603 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3604 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3605 {
0ba38440 3606 unlink_stmt_vdef (stmt);
896a0c42 3607 gsi_remove (&gsi, true);
3608 release_defs (stmt);
3609 continue;
3610 }
0ba38440 3611
3612 /* As we do not change stmt order when sinking across a
3613 forwarder edge we can keep virtual operands in place. */
07428872 3614 gsi_remove (&gsi, false);
0ba38440 3615 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3616
3617 /* But adjust virtual operands if we sunk across a PHI node. */
3618 if (vuse)
3619 {
42acab1c 3620 gimple *use_stmt;
0ba38440 3621 imm_use_iterator iter;
3622 use_operand_p use_p;
3623 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3624 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3625 SET_USE (use_p, gimple_vdef (stmt));
587a19f1 3626 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3627 {
3628 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3629 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3630 }
0ba38440 3631 /* Adjust the incoming virtual operand. */
3632 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3633 SET_USE (gimple_vuse_op (stmt), vuse);
3634 }
8aacb2c5 3635 /* If there isn't a single predecessor but no virtual PHI node
3636 arrange for virtual operands to be renamed. */
3637 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3638 && !single_pred_p (succbb))
3639 {
3640 /* In this case there will be no use of the VDEF of this stmt.
3641 ??? Unless this is a secondary opportunity and we have not
3642 removed unreachable blocks yet, so we cannot assert this.
3643 Which also means we will end up renaming too many times. */
3644 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3645 mark_virtual_operands_for_renaming (cfun);
3646 todo |= TODO_update_ssa_only_virtuals;
3647 }
07428872 3648 }
3649
8aacb2c5 3650 return todo;
07428872 3651}
3652
778f5bdd 3653/* At the end of inlining, we can lower EH_DISPATCH. Return true when
3654 we have found some duplicate labels and removed some edges. */
3bd82487 3655
778f5bdd 3656static bool
1a91d914 3657lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
3bd82487 3658{
e38def9c 3659 gimple_stmt_iterator gsi;
3660 int region_nr;
3661 eh_region r;
3662 tree filter, fn;
42acab1c 3663 gimple *x;
778f5bdd 3664 bool redirected = false;
3bd82487 3665
e38def9c 3666 region_nr = gimple_eh_dispatch_region (stmt);
3667 r = get_eh_region_from_number (region_nr);
3bd82487 3668
e38def9c 3669 gsi = gsi_last_bb (src);
3bd82487 3670
e38def9c 3671 switch (r->type)
3bd82487 3672 {
e38def9c 3673 case ERT_TRY:
3674 {
c2078b80 3675 auto_vec<tree> labels;
e38def9c 3676 tree default_label = NULL;
3677 eh_catch c;
3678 edge_iterator ei;
3679 edge e;
431205b7 3680 hash_set<tree> seen_values;
e38def9c 3681
3682 /* Collect the labels for a switch. Zero the post_landing_pad
3683 field becase we'll no longer have anything keeping these labels
9d75589a 3684 in existence and the optimizer will be free to merge these
e38def9c 3685 blocks at will. */
3686 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3687 {
3688 tree tp_node, flt_node, lab = c->label;
778f5bdd 3689 bool have_label = false;
3bd82487 3690
e38def9c 3691 c->label = NULL;
3692 tp_node = c->type_list;
3693 flt_node = c->filter_list;
3694
3695 if (tp_node == NULL)
3696 {
3697 default_label = lab;
3698 break;
3699 }
3700 do
3701 {
778f5bdd 3702 /* Filter out duplicate labels that arise when this handler
3703 is shadowed by an earlier one. When no labels are
3704 attached to the handler anymore, we remove
3705 the corresponding edge and then we delete unreachable
3706 blocks at the end of this pass. */
431205b7 3707 if (! seen_values.contains (TREE_VALUE (flt_node)))
778f5bdd 3708 {
b6e3dd65 3709 tree t = build_case_label (TREE_VALUE (flt_node),
3710 NULL, lab);
f1f41a6c 3711 labels.safe_push (t);
431205b7 3712 seen_values.add (TREE_VALUE (flt_node));
778f5bdd 3713 have_label = true;
3714 }
e38def9c 3715
3716 tp_node = TREE_CHAIN (tp_node);
3717 flt_node = TREE_CHAIN (flt_node);
3718 }
3719 while (tp_node);
778f5bdd 3720 if (! have_label)
3721 {
0fb4f2ce 3722 remove_edge (find_edge (src, label_to_block (cfun, lab)));
778f5bdd 3723 redirected = true;
3724 }
e38def9c 3725 }
3726
3727 /* Clean up the edge flags. */
3728 FOR_EACH_EDGE (e, ei, src->succs)
3729 {
3730 if (e->flags & EDGE_FALLTHRU)
3731 {
3732 /* If there was no catch-all, use the fallthru edge. */
3733 if (default_label == NULL)
3734 default_label = gimple_block_label (e->dest);
3735 e->flags &= ~EDGE_FALLTHRU;
3736 }
3737 }
3738 gcc_assert (default_label != NULL);
3739
3740 /* Don't generate a switch if there's only a default case.
3741 This is common in the form of try { A; } catch (...) { B; }. */
f1f41a6c 3742 if (!labels.exists ())
e38def9c 3743 {
3744 e = single_succ_edge (src);
3745 e->flags |= EDGE_FALLTHRU;
3746 }
3747 else
3748 {
b9a16870 3749 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
bad12c62 3750 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3751 region_nr));
f9e245b2 3752 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
e38def9c 3753 filter = make_ssa_name (filter, x);
3754 gimple_call_set_lhs (x, filter);
3755 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3756
3757 /* Turn the default label into a default case. */
b6e3dd65 3758 default_label = build_case_label (NULL, NULL, default_label);
e38def9c 3759 sort_case_labels (labels);
3760
49a70175 3761 x = gimple_build_switch (filter, default_label, labels);
e38def9c 3762 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
e38def9c 3763 }
3764 }
3765 break;
3766
3767 case ERT_ALLOWED_EXCEPTIONS:
3768 {
3769 edge b_e = BRANCH_EDGE (src);
3770 edge f_e = FALLTHRU_EDGE (src);
3771
b9a16870 3772 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
bad12c62 3773 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3774 region_nr));
f9e245b2 3775 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
e38def9c 3776 filter = make_ssa_name (filter, x);
3777 gimple_call_set_lhs (x, filter);
3778 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3779
3780 r->u.allowed.label = NULL;
3781 x = gimple_build_cond (EQ_EXPR, filter,
3782 build_int_cst (TREE_TYPE (filter),
3783 r->u.allowed.filter),
3784 NULL_TREE, NULL_TREE);
3785 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3786
3787 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3788 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3789 }
3790 break;
3791
3792 default:
3793 gcc_unreachable ();
3bd82487 3794 }
e38def9c 3795
3796 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3797 gsi_remove (&gsi, true);
778f5bdd 3798 return redirected;
3bd82487 3799}
3800
7620bc82 3801namespace {
3802
3803const pass_data pass_data_lower_eh_dispatch =
65b0537f 3804{
3805 GIMPLE_PASS, /* type */
3806 "ehdisp", /* name */
3807 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 3808 TV_TREE_EH, /* tv_id */
3809 PROP_gimple_lcf, /* properties_required */
3810 0, /* properties_provided */
3811 0, /* properties_destroyed */
3812 0, /* todo_flags_start */
8b88439e 3813 0, /* todo_flags_finish */
65b0537f 3814};
3815
7620bc82 3816class pass_lower_eh_dispatch : public gimple_opt_pass
65b0537f 3817{
3818public:
3819 pass_lower_eh_dispatch (gcc::context *ctxt)
3820 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3821 {}
3822
3823 /* opt_pass methods: */
3824 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3825 virtual unsigned int execute (function *);
3826
3827}; // class pass_lower_eh_dispatch
3828
3829unsigned
3830pass_lower_eh_dispatch::execute (function *fun)
e38def9c 3831{
3832 basic_block bb;
07428872 3833 int flags = 0;
778f5bdd 3834 bool redirected = false;
3bd82487 3835
e38def9c 3836 assign_filter_values ();
3d1eacdb 3837
65b0537f 3838 FOR_EACH_BB_FN (bb, fun)
e38def9c 3839 {
42acab1c 3840 gimple *last = last_stmt (bb);
1227a337 3841 if (last == NULL)
3842 continue;
3843 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
e38def9c 3844 {
1a91d914 3845 redirected |= lower_eh_dispatch (bb,
3846 as_a <geh_dispatch *> (last));
07428872 3847 flags |= TODO_update_ssa_only_virtuals;
3848 }
3849 else if (gimple_code (last) == GIMPLE_RESX)
3850 {
aac19106 3851 if (stmt_can_throw_external (cfun, last))
07428872 3852 optimize_clobbers (bb);
3853 else
3854 flags |= sink_clobbers (bb);
e38def9c 3855 }
3856 }
3857
778f5bdd 3858 if (redirected)
3a2c20a1 3859 {
3860 free_dominance_info (CDI_DOMINATORS);
3861 delete_unreachable_blocks ();
3862 }
07428872 3863 return flags;
e38def9c 3864}
3865
7620bc82 3866} // anon namespace
3867
cbe8bda8 3868gimple_opt_pass *
3869make_pass_lower_eh_dispatch (gcc::context *ctxt)
3870{
3871 return new pass_lower_eh_dispatch (ctxt);
3872}
e38def9c 3873\f
390f4a4b 3874/* Walk statements, see what regions and, optionally, landing pads
3875 are really referenced.
3876
3877 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3878 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3879
3880 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3881 regions are marked.
3882
3883 The caller is responsible for freeing the returned sbitmaps. */
e38def9c 3884
3885static void
390f4a4b 3886mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
e38def9c 3887{
3888 sbitmap r_reachable, lp_reachable;
e38def9c 3889 basic_block bb;
390f4a4b 3890 bool mark_landing_pads = (lp_reachablep != NULL);
3891 gcc_checking_assert (r_reachablep != NULL);
3bd82487 3892
f1f41a6c 3893 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
53c5d9d4 3894 bitmap_clear (r_reachable);
390f4a4b 3895 *r_reachablep = r_reachable;
3896
3897 if (mark_landing_pads)
3898 {
3899 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3900 bitmap_clear (lp_reachable);
3901 *lp_reachablep = lp_reachable;
3902 }
3903 else
3904 lp_reachable = NULL;
3bd82487 3905
fc00614f 3906 FOR_EACH_BB_FN (bb, cfun)
3bd82487 3907 {
d0ac3b8a 3908 gimple_stmt_iterator gsi;
e38def9c 3909
3910 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3911 {
42acab1c 3912 gimple *stmt = gsi_stmt (gsi);
e38def9c 3913
390f4a4b 3914 if (mark_landing_pads)
e38def9c 3915 {
390f4a4b 3916 int lp_nr = lookup_stmt_eh_lp (stmt);
3917
3918 /* Negative LP numbers are MUST_NOT_THROW regions which
3919 are not considered BB enders. */
3920 if (lp_nr < 0)
3921 bitmap_set_bit (r_reachable, -lp_nr);
3922
3923 /* Positive LP numbers are real landing pads, and BB enders. */
3924 else if (lp_nr > 0)
3925 {
3926 gcc_assert (gsi_one_before_end_p (gsi));
3927 eh_region region = get_eh_region_from_lp_number (lp_nr);
3928 bitmap_set_bit (r_reachable, region->index);
3929 bitmap_set_bit (lp_reachable, lp_nr);
3930 }
e38def9c 3931 }
4e392ca1 3932
3933 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3934 switch (gimple_code (stmt))
3935 {
3936 case GIMPLE_RESX:
1a91d914 3937 bitmap_set_bit (r_reachable,
3938 gimple_resx_region (as_a <gresx *> (stmt)));
4e392ca1 3939 break;
3940 case GIMPLE_EH_DISPATCH:
1a91d914 3941 bitmap_set_bit (r_reachable,
3942 gimple_eh_dispatch_region (
3943 as_a <geh_dispatch *> (stmt)));
4e392ca1 3944 break;
9eb0aee3 3945 case GIMPLE_CALL:
3946 if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES))
3947 for (int i = 0; i < 2; ++i)
3948 {
3949 tree rt = gimple_call_arg (stmt, i);
3950 HOST_WIDE_INT ri = tree_to_shwi (rt);
3951
dd0b0596 3952 gcc_assert (ri == (int)ri);
9eb0aee3 3953 bitmap_set_bit (r_reachable, ri);
3954 }
3955 break;
4e392ca1 3956 default:
3957 break;
3958 }
e38def9c 3959 }
3bd82487 3960 }
390f4a4b 3961}
3962
3963/* Remove unreachable handlers and unreachable landing pads. */
3964
3965static void
3966remove_unreachable_handlers (void)
3967{
3968 sbitmap r_reachable, lp_reachable;
3969 eh_region region;
3970 eh_landing_pad lp;
3971 unsigned i;
3972
3973 mark_reachable_handlers (&r_reachable, &lp_reachable);
e38def9c 3974
3975 if (dump_file)
3bd82487 3976 {
e38def9c 3977 fprintf (dump_file, "Before removal of unreachable regions:\n");
3978 dump_eh_tree (dump_file, cfun);
3979 fprintf (dump_file, "Reachable regions: ");
53c5d9d4 3980 dump_bitmap_file (dump_file, r_reachable);
e38def9c 3981 fprintf (dump_file, "Reachable landing pads: ");
53c5d9d4 3982 dump_bitmap_file (dump_file, lp_reachable);
3bd82487 3983 }
3984
390f4a4b 3985 if (dump_file)
3986 {
3987 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3988 if (region && !bitmap_bit_p (r_reachable, region->index))
3989 fprintf (dump_file,
3990 "Removing unreachable region %d\n",
3991 region->index);
3992 }
3993
3994 remove_unreachable_eh_regions (r_reachable);
3bd82487 3995
390f4a4b 3996 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3997 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
e38def9c 3998 {
3999 if (dump_file)
390f4a4b 4000 fprintf (dump_file,
4001 "Removing unreachable landing pad %d\n",
4002 lp->index);
e38def9c 4003 remove_eh_landing_pad (lp);
4004 }
48e1416a 4005
e38def9c 4006 if (dump_file)
3bd82487 4007 {
e38def9c 4008 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
4009 dump_eh_tree (dump_file, cfun);
4010 fprintf (dump_file, "\n\n");
3bd82487 4011 }
4012
e38def9c 4013 sbitmap_free (r_reachable);
4014 sbitmap_free (lp_reachable);
4015
382ecba7 4016 if (flag_checking)
4017 verify_eh_tree (cfun);
e38def9c 4018}
4019
b00b0dc4 4020/* Remove unreachable handlers if any landing pads have been removed after
4021 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
4022
4023void
4024maybe_remove_unreachable_handlers (void)
4025{
4026 eh_landing_pad lp;
390f4a4b 4027 unsigned i;
b00b0dc4 4028
4029 if (cfun->eh == NULL)
4030 return;
390f4a4b 4031
4032 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
b00b0dc4 4033 if (lp && lp->post_landing_pad)
4034 {
0fb4f2ce 4035 if (label_to_block (cfun, lp->post_landing_pad) == NULL)
b00b0dc4 4036 {
4037 remove_unreachable_handlers ();
4038 return;
4039 }
4040 }
4041}
4042
e38def9c 4043/* Remove regions that do not have landing pads. This assumes
4044 that remove_unreachable_handlers has already been run, and
390f4a4b 4045 that we've just manipulated the landing pads since then.
4046
4047 Preserve regions with landing pads and regions that prevent
4048 exceptions from propagating further, even if these regions
4049 are not reachable. */
e38def9c 4050
4051static void
4052remove_unreachable_handlers_no_lp (void)
4053{
390f4a4b 4054 eh_region region;
4b393c71 4055 sbitmap r_reachable;
390f4a4b 4056 unsigned i;
4b393c71 4057
390f4a4b 4058 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
4b393c71 4059
390f4a4b 4060 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
4b393c71 4061 {
390f4a4b 4062 if (! region)
4063 continue;
4064
4065 if (region->landing_pads != NULL
4066 || region->type == ERT_MUST_NOT_THROW)
4067 bitmap_set_bit (r_reachable, region->index);
4068
4069 if (dump_file
4070 && !bitmap_bit_p (r_reachable, region->index))
4071 fprintf (dump_file,
4072 "Removing unreachable region %d\n",
4073 region->index);
4b393c71 4074 }
e38def9c 4075
390f4a4b 4076 remove_unreachable_eh_regions (r_reachable);
4b393c71 4077
4078 sbitmap_free (r_reachable);
3bd82487 4079}
4080
e38def9c 4081/* Undo critical edge splitting on an EH landing pad. Earlier, we
4082 optimisticaly split all sorts of edges, including EH edges. The
4083 optimization passes in between may not have needed them; if not,
4084 we should undo the split.
4085
4086 Recognize this case by having one EH edge incoming to the BB and
4087 one normal edge outgoing; BB should be empty apart from the
4088 post_landing_pad label.
4089
4090 Note that this is slightly different from the empty handler case
4091 handled by cleanup_empty_eh, in that the actual handler may yet
4092 have actual code but the landing pad has been separated from the
4093 handler. As such, cleanup_empty_eh relies on this transformation
4094 having been done first. */
4c5fcca6 4095
4096static bool
e38def9c 4097unsplit_eh (eh_landing_pad lp)
4c5fcca6 4098{
0fb4f2ce 4099 basic_block bb = label_to_block (cfun, lp->post_landing_pad);
e38def9c 4100 gimple_stmt_iterator gsi;
4101 edge e_in, e_out;
4102
4103 /* Quickly check the edge counts on BB for singularity. */
896a0c42 4104 if (!single_pred_p (bb) || !single_succ_p (bb))
e38def9c 4105 return false;
896a0c42 4106 e_in = single_pred_edge (bb);
4107 e_out = single_succ_edge (bb);
4c5fcca6 4108
e38def9c 4109 /* Input edge must be EH and output edge must be normal. */
4110 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4111 return false;
4112
0b76e49c 4113 /* The block must be empty except for the labels and debug insns. */
4114 gsi = gsi_after_labels (bb);
4115 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4116 gsi_next_nondebug (&gsi);
4117 if (!gsi_end_p (gsi))
e38def9c 4118 return false;
4119
4120 /* The destination block must not already have a landing pad
4121 for a different region. */
4122 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4c5fcca6 4123 {
1a91d914 4124 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
e38def9c 4125 tree lab;
4126 int lp_nr;
4c5fcca6 4127
1a91d914 4128 if (!label_stmt)
e38def9c 4129 break;
1a91d914 4130 lab = gimple_label_label (label_stmt);
e38def9c 4131 lp_nr = EH_LANDING_PAD_NR (lab);
4132 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4133 return false;
4134 }
4c5fcca6 4135
e9d5f86f 4136 /* The new destination block must not already be a destination of
4137 the source block, lest we merge fallthru and eh edges and get
4138 all sorts of confused. */
4139 if (find_edge (e_in->src, e_out->dest))
4140 return false;
4141
c57e3b9d 4142 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4143 thought this should have been cleaned up by a phicprop pass, but
4144 that doesn't appear to handle virtuals. Propagate by hand. */
4145 if (!gimple_seq_empty_p (phi_nodes (bb)))
4146 {
1a91d914 4147 for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
c57e3b9d 4148 {
42acab1c 4149 gimple *use_stmt;
1a91d914 4150 gphi *phi = gpi.phi ();
c57e3b9d 4151 tree lhs = gimple_phi_result (phi);
4152 tree rhs = gimple_phi_arg_def (phi, 0);
4153 use_operand_p use_p;
4154 imm_use_iterator iter;
4155
4156 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4157 {
4158 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4159 SET_USE (use_p, rhs);
4160 }
4161
4162 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4163 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4164
1a91d914 4165 remove_phi_node (&gpi, true);
c57e3b9d 4166 }
4167 }
3d1eacdb 4168
e38def9c 4169 if (dump_file && (dump_flags & TDF_DETAILS))
4170 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4171 lp->index, e_out->dest->index);
4172
4173 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4174 a successor edge, humor it. But do the real CFG change with the
4175 predecessor of E_OUT in order to preserve the ordering of arguments
4176 to the PHI nodes in E_OUT->DEST. */
4177 redirect_eh_edge_1 (e_in, e_out->dest, false);
4178 redirect_edge_pred (e_out, e_in->src);
4179 e_out->flags = e_in->flags;
4180 e_out->probability = e_in->probability;
e38def9c 4181 remove_edge (e_in);
3d1eacdb 4182
e38def9c 4183 return true;
4184}
3d1eacdb 4185
e38def9c 4186/* Examine each landing pad block and see if it matches unsplit_eh. */
3d1eacdb 4187
e38def9c 4188static bool
4189unsplit_all_eh (void)
4190{
4191 bool changed = false;
4192 eh_landing_pad lp;
4193 int i;
3d1eacdb 4194
f1f41a6c 4195 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
e38def9c 4196 if (lp)
4197 changed |= unsplit_eh (lp);
4198
4199 return changed;
4200}
4201
4202/* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4203 to OLD_BB to NEW_BB; return true on success, false on failure.
4204
4205 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4206 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4207 Virtual PHIs may be deleted and marked for renaming. */
4208
4209static bool
4210cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
c57e3b9d 4211 edge old_bb_out, bool change_region)
e38def9c 4212{
1a91d914 4213 gphi_iterator ngsi, ogsi;
e38def9c 4214 edge_iterator ei;
4215 edge e;
e38def9c 4216 bitmap ophi_handled;
4217
19bcc424 4218 /* The destination block must not be a regular successor for any
4219 of the preds of the landing pad. Thus, avoid turning
4220 <..>
4221 | \ EH
4222 | <..>
4223 | /
4224 <..>
4225 into
4226 <..>
4227 | | EH
4228 <..>
4229 which CFG verification would choke on. See PR45172 and PR51089. */
4230 FOR_EACH_EDGE (e, ei, old_bb->preds)
4231 if (find_edge (e->src, new_bb))
4232 return false;
4233
e38def9c 4234 FOR_EACH_EDGE (e, ei, old_bb->preds)
4235 redirect_edge_var_map_clear (e);
4236
4237 ophi_handled = BITMAP_ALLOC (NULL);
e38def9c 4238
4239 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4240 for the edges we're going to move. */
4241 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4242 {
1a91d914 4243 gphi *ophi, *nphi = ngsi.phi ();
e38def9c 4244 tree nresult, nop;
4245
4246 nresult = gimple_phi_result (nphi);
4247 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4248
4249 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4250 the source ssa_name. */
4251 ophi = NULL;
4252 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4253 {
1a91d914 4254 ophi = ogsi.phi ();
e38def9c 4255 if (gimple_phi_result (ophi) == nop)
4256 break;
4257 ophi = NULL;
927a6b6b 4258 }
3d1eacdb 4259
e38def9c 4260 /* If we did find the corresponding PHI, copy those inputs. */
4261 if (ophi)
4c5fcca6 4262 {
6e21b2e0 4263 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4264 if (!has_single_use (nop))
4265 {
4266 imm_use_iterator imm_iter;
4267 use_operand_p use_p;
4268
4269 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4270 {
4271 if (!gimple_debug_bind_p (USE_STMT (use_p))
4272 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4273 || gimple_bb (USE_STMT (use_p)) != new_bb))
4274 goto fail;
4275 }
4276 }
e38def9c 4277 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4278 FOR_EACH_EDGE (e, ei, old_bb->preds)
3d1eacdb 4279 {
e38def9c 4280 location_t oloc;
4281 tree oop;
4282
4283 if ((e->flags & EDGE_EH) == 0)
4284 continue;
4285 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4286 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
60d535d2 4287 redirect_edge_var_map_add (e, nresult, oop, oloc);
3d1eacdb 4288 }
e38def9c 4289 }
077b5b8a 4290 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
e38def9c 4291 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4292 variable is unchanged from input to the block and we can simply
4293 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4294 else
4295 {
4296 location_t nloc
4297 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4298 FOR_EACH_EDGE (e, ei, old_bb->preds)
60d535d2 4299 redirect_edge_var_map_add (e, nresult, nop, nloc);
e38def9c 4300 }
4301 }
4302
4303 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4304 we don't know what values from the other edges into NEW_BB to use. */
4305 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4306 {
1a91d914 4307 gphi *ophi = ogsi.phi ();
e38def9c 4308 tree oresult = gimple_phi_result (ophi);
4309 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4310 goto fail;
4311 }
4312
e38def9c 4313 /* Finally, move the edges and update the PHIs. */
4314 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4315 if (e->flags & EDGE_EH)
4316 {
3a37f7bd 4317 /* ??? CFG manipluation routines do not try to update loop
4318 form on edge redirection. Do so manually here for now. */
4319 /* If we redirect a loop entry or latch edge that will either create
4320 a multiple entry loop or rotate the loop. If the loops merge
4321 we may have created a loop with multiple latches.
4322 All of this isn't easily fixed thus cancel the affected loop
4323 and mark the other loop as possibly having multiple latches. */
b3083327 4324 if (e->dest == e->dest->loop_father->header)
3a37f7bd 4325 {
d25159cc 4326 mark_loop_for_removal (e->dest->loop_father);
3a37f7bd 4327 new_bb->loop_father->latch = NULL;
d25159cc 4328 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
3a37f7bd 4329 }
c57e3b9d 4330 redirect_eh_edge_1 (e, new_bb, change_region);
e38def9c 4331 redirect_edge_succ (e, new_bb);
4332 flush_pending_stmts (e);
4333 }
4334 else
4335 ei_next (&ei);
3bd82487 4336
e38def9c 4337 BITMAP_FREE (ophi_handled);
e38def9c 4338 return true;
4339
4340 fail:
4341 FOR_EACH_EDGE (e, ei, old_bb->preds)
4342 redirect_edge_var_map_clear (e);
4343 BITMAP_FREE (ophi_handled);
e38def9c 4344 return false;
4345}
4346
4347/* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4348 old region to NEW_REGION at BB. */
4349
4350static void
4351cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4352 eh_landing_pad lp, eh_region new_region)
4353{
4354 gimple_stmt_iterator gsi;
4355 eh_landing_pad *pp;
4356
4357 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4358 continue;
4359 *pp = lp->next_lp;
4360
4361 lp->region = new_region;
4362 lp->next_lp = new_region->landing_pads;
4363 new_region->landing_pads = lp;
4364
4365 /* Delete the RESX that was matched within the empty handler block. */
4366 gsi = gsi_last_bb (bb);
bc8a8451 4367 unlink_stmt_vdef (gsi_stmt (gsi));
e38def9c 4368 gsi_remove (&gsi, true);
4369
4370 /* Clean up E_OUT for the fallthru. */
4371 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
720cfc43 4372 e_out->probability = profile_probability::always ();
e38def9c 4373}
4374
4375/* A subroutine of cleanup_empty_eh. Handle more complex cases of
48e1416a 4376 unsplitting than unsplit_eh was prepared to handle, e.g. when
e38def9c 4377 multiple incoming edges and phis are involved. */
4378
4379static bool
c57e3b9d 4380cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
e38def9c 4381{
4382 gimple_stmt_iterator gsi;
e38def9c 4383 tree lab;
4384
4385 /* We really ought not have totally lost everything following
4386 a landing pad label. Given that BB is empty, there had better
4387 be a successor. */
4388 gcc_assert (e_out != NULL);
4389
c57e3b9d 4390 /* The destination block must not already have a landing pad
4391 for a different region. */
e38def9c 4392 lab = NULL;
4393 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4394 {
1a91d914 4395 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
c57e3b9d 4396 int lp_nr;
4397
1a91d914 4398 if (!stmt)
e38def9c 4399 break;
4400 lab = gimple_label_label (stmt);
c57e3b9d 4401 lp_nr = EH_LANDING_PAD_NR (lab);
4402 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4403 return false;
e38def9c 4404 }
e38def9c 4405
4406 /* Attempt to move the PHIs into the successor block. */
c57e3b9d 4407 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
e38def9c 4408 {
4409 if (dump_file && (dump_flags & TDF_DETAILS))
4410 fprintf (dump_file,
c57e3b9d 4411 "Unsplit EH landing pad %d to block %i "
4412 "(via cleanup_empty_eh).\n",
4413 lp->index, e_out->dest->index);
e38def9c 4414 return true;
4415 }
4416
4417 return false;
4418}
4419
a9309f85 4420/* Return true if edge E_FIRST is part of an empty infinite loop
4421 or leads to such a loop through a series of single successor
4422 empty bbs. */
4423
4424static bool
4425infinite_empty_loop_p (edge e_first)
4426{
4427 bool inf_loop = false;
4428 edge e;
4429
4430 if (e_first->dest == e_first->src)
4431 return true;
4432
4433 e_first->src->aux = (void *) 1;
4434 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4435 {
4436 gimple_stmt_iterator gsi;
4437 if (e->dest->aux)
4438 {
4439 inf_loop = true;
4440 break;
4441 }
4442 e->dest->aux = (void *) 1;
4443 gsi = gsi_after_labels (e->dest);
4444 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4445 gsi_next_nondebug (&gsi);
4446 if (!gsi_end_p (gsi))
4447 break;
4448 }
4449 e_first->src->aux = NULL;
4450 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4451 e->dest->aux = NULL;
4452
4453 return inf_loop;
4454}
4455
e38def9c 4456/* Examine the block associated with LP to determine if it's an empty
4457 handler for its EH region. If so, attempt to redirect EH edges to
4458 an outer region. Return true the CFG was updated in any way. This
4459 is similar to jump forwarding, just across EH edges. */
4460
4461static bool
4462cleanup_empty_eh (eh_landing_pad lp)
4463{
0fb4f2ce 4464 basic_block bb = label_to_block (cfun, lp->post_landing_pad);
e38def9c 4465 gimple_stmt_iterator gsi;
42acab1c 4466 gimple *resx;
e38def9c 4467 eh_region new_region;
4468 edge_iterator ei;
4469 edge e, e_out;
4470 bool has_non_eh_pred;
b74338cf 4471 bool ret = false;
e38def9c 4472 int new_lp_nr;
4473
4474 /* There can be zero or one edges out of BB. This is the quickest test. */
4475 switch (EDGE_COUNT (bb->succs))
4476 {
4477 case 0:
4478 e_out = NULL;
4479 break;
4480 case 1:
896a0c42 4481 e_out = single_succ_edge (bb);
e38def9c 4482 break;
4483 default:
4484 return false;
4485 }
b74338cf 4486
3aad3afc 4487 gsi = gsi_last_nondebug_bb (bb);
4488 resx = gsi_stmt (gsi);
b74338cf 4489 if (resx && is_gimple_resx (resx))
4490 {
aac19106 4491 if (stmt_can_throw_external (cfun, resx))
b74338cf 4492 optimize_clobbers (bb);
4493 else if (sink_clobbers (bb))
4494 ret = true;
4495 }
4496
e38def9c 4497 gsi = gsi_after_labels (bb);
4498
4499 /* Make sure to skip debug statements. */
4500 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4501 gsi_next_nondebug (&gsi);
4502
4503 /* If the block is totally empty, look for more unsplitting cases. */
4504 if (gsi_end_p (gsi))
e54fce5c 4505 {
ae3a21c9 4506 /* For the degenerate case of an infinite loop bail out.
4507 If bb has no successors and is totally empty, which can happen e.g.
4508 because of incorrect noreturn attribute, bail out too. */
4509 if (e_out == NULL
4510 || infinite_empty_loop_p (e_out))
b74338cf 4511 return ret;
e54fce5c 4512
b74338cf 4513 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
e54fce5c 4514 }
e38def9c 4515
367113ea 4516 /* The block should consist only of a single RESX statement, modulo a
4517 preceding call to __builtin_stack_restore if there is no outgoing
4518 edge, since the call can be eliminated in this case. */
e38def9c 4519 resx = gsi_stmt (gsi);
367113ea 4520 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4521 {
3aad3afc 4522 gsi_next_nondebug (&gsi);
367113ea 4523 resx = gsi_stmt (gsi);
4524 }
e38def9c 4525 if (!is_gimple_resx (resx))
b74338cf 4526 return ret;
3aad3afc 4527 gcc_assert (gsi_one_nondebug_before_end_p (gsi));
e38def9c 4528
4529 /* Determine if there are non-EH edges, or resx edges into the handler. */
4530 has_non_eh_pred = false;
4531 FOR_EACH_EDGE (e, ei, bb->preds)
4532 if (!(e->flags & EDGE_EH))
4533 has_non_eh_pred = true;
4534
4535 /* Find the handler that's outer of the empty handler by looking at
4536 where the RESX instruction was vectored. */
4537 new_lp_nr = lookup_stmt_eh_lp (resx);
4538 new_region = get_eh_region_from_lp_number (new_lp_nr);
4539
4540 /* If there's no destination region within the current function,
4541 redirection is trivial via removing the throwing statements from
4542 the EH region, removing the EH edges, and allowing the block
4543 to go unreachable. */
4544 if (new_region == NULL)
4545 {
4546 gcc_assert (e_out == NULL);
4547 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4548 if (e->flags & EDGE_EH)
4549 {
42acab1c 4550 gimple *stmt = last_stmt (e->src);
e38def9c 4551 remove_stmt_from_eh_lp (stmt);
4552 remove_edge (e);
4553 }
4554 else
4555 ei_next (&ei);
4556 goto succeed;
4557 }
4558
4559 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4560 to handle the abort and allow the blocks to go unreachable. */
4561 if (new_region->type == ERT_MUST_NOT_THROW)
4562 {
4563 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4564 if (e->flags & EDGE_EH)
4565 {
42acab1c 4566 gimple *stmt = last_stmt (e->src);
e38def9c 4567 remove_stmt_from_eh_lp (stmt);
4568 add_stmt_to_eh_lp (stmt, new_lp_nr);
4569 remove_edge (e);
4570 }
4571 else
4572 ei_next (&ei);
4573 goto succeed;
4574 }
4575
4576 /* Try to redirect the EH edges and merge the PHIs into the destination
4577 landing pad block. If the merge succeeds, we'll already have redirected
4578 all the EH edges. The handler itself will go unreachable if there were
4579 no normal edges. */
c57e3b9d 4580 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
e38def9c 4581 goto succeed;
4582
4583 /* Finally, if all input edges are EH edges, then we can (potentially)
4584 reduce the number of transfers from the runtime by moving the landing
4585 pad from the original region to the new region. This is a win when
4586 we remove the last CLEANUP region along a particular exception
4587 propagation path. Since nothing changes except for the region with
4588 which the landing pad is associated, the PHI nodes do not need to be
4589 adjusted at all. */
4590 if (!has_non_eh_pred)
4591 {
4592 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4593 if (dump_file && (dump_flags & TDF_DETAILS))
4594 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4595 lp->index, new_region->index);
4596
4597 /* ??? The CFG didn't change, but we may have rendered the
4598 old EH region unreachable. Trigger a cleanup there. */
4c5fcca6 4599 return true;
4600 }
e38def9c 4601
b74338cf 4602 return ret;
e38def9c 4603
4604 succeed:
4605 if (dump_file && (dump_flags & TDF_DETAILS))
4606 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4607 remove_eh_landing_pad (lp);
4608 return true;
4c5fcca6 4609}
4610
e38def9c 4611/* Do a post-order traversal of the EH region tree. Examine each
4612 post_landing_pad block and see if we can eliminate it as empty. */
4613
4614static bool
4615cleanup_all_empty_eh (void)
4616{
4617 bool changed = false;
4618 eh_landing_pad lp;
4619 int i;
4620
f1f41a6c 4621 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
e38def9c 4622 if (lp)
4623 changed |= cleanup_empty_eh (lp);
4624
4625 return changed;
4626}
4c5fcca6 4627
4628/* Perform cleanups and lowering of exception handling
4629 1) cleanups regions with handlers doing nothing are optimized out
4630 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4631 3) Info about regions that are containing instructions, and regions
4632 reachable via local EH edges is collected
c31fb425 4633 4) Eh tree is pruned for regions no longer necessary.
e38def9c 4634
4635 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4636 Unify those that have the same failure decl and locus.
4637*/
4c5fcca6 4638
4639static unsigned int
15100018 4640execute_cleanup_eh_1 (void)
4c5fcca6 4641{
e38def9c 4642 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4643 looking up unreachable landing pads. */
4644 remove_unreachable_handlers ();
4c5fcca6 4645
e38def9c 4646 /* Watch out for the region tree vanishing due to all unreachable. */
6b3688f5 4647 if (cfun->eh->region_tree)
4c5fcca6 4648 {
e38def9c 4649 bool changed = false;
4c5fcca6 4650
6b3688f5 4651 if (optimize)
4652 changed |= unsplit_all_eh ();
e38def9c 4653 changed |= cleanup_all_empty_eh ();
4654
4655 if (changed)
48d5ef93 4656 {
4657 free_dominance_info (CDI_DOMINATORS);
4658 free_dominance_info (CDI_POST_DOMINATORS);
4c5fcca6 4659
e38def9c 4660 /* We delayed all basic block deletion, as we may have performed
4661 cleanups on EH edges while non-EH edges were still present. */
4662 delete_unreachable_blocks ();
4c5fcca6 4663
e38def9c 4664 /* We manipulated the landing pads. Remove any region that no
4665 longer has a landing pad. */
4666 remove_unreachable_handlers_no_lp ();
4667
4668 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4669 }
4c5fcca6 4670 }
4671
e38def9c 4672 return 0;
4673}
4674
7620bc82 4675namespace {
4676
4677const pass_data pass_data_cleanup_eh =
cbe8bda8 4678{
4679 GIMPLE_PASS, /* type */
4680 "ehcleanup", /* name */
4681 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 4682 TV_TREE_EH, /* tv_id */
4683 PROP_gimple_lcf, /* properties_required */
4684 0, /* properties_provided */
4685 0, /* properties_destroyed */
4686 0, /* todo_flags_start */
8b88439e 4687 0, /* todo_flags_finish */
4c5fcca6 4688};
cbe8bda8 4689
7620bc82 4690class pass_cleanup_eh : public gimple_opt_pass
cbe8bda8 4691{
4692public:
9af5ce0c 4693 pass_cleanup_eh (gcc::context *ctxt)
4694 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
cbe8bda8 4695 {}
4696
4697 /* opt_pass methods: */
ae84f584 4698 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
31315c24 4699 virtual bool gate (function *fun)
4700 {
4701 return fun->eh != NULL && fun->eh->region_tree != NULL;
4702 }
4703
65b0537f 4704 virtual unsigned int execute (function *);
cbe8bda8 4705
4706}; // class pass_cleanup_eh
4707
65b0537f 4708unsigned int
4709pass_cleanup_eh::execute (function *fun)
4710{
4711 int ret = execute_cleanup_eh_1 ();
4712
4713 /* If the function no longer needs an EH personality routine
4714 clear it. This exposes cross-language inlining opportunities
4715 and avoids references to a never defined personality routine. */
4716 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4717 && function_needs_eh_personality (fun) != eh_personality_lang)
4718 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4719
4720 return ret;
4721}
4722
7620bc82 4723} // anon namespace
4724
cbe8bda8 4725gimple_opt_pass *
4726make_pass_cleanup_eh (gcc::context *ctxt)
4727{
4728 return new pass_cleanup_eh (ctxt);
4729}
e38def9c 4730\f
4731/* Verify that BB containing STMT as the last statement, has precisely the
4732 edge that make_eh_edges would create. */
4733
4b987fac 4734DEBUG_FUNCTION bool
42acab1c 4735verify_eh_edges (gimple *stmt)
e38def9c 4736{
4737 basic_block bb = gimple_bb (stmt);
4738 eh_landing_pad lp = NULL;
4739 int lp_nr;
4740 edge_iterator ei;
4741 edge e, eh_edge;
4742
4743 lp_nr = lookup_stmt_eh_lp (stmt);
4744 if (lp_nr > 0)
4745 lp = get_eh_landing_pad_from_number (lp_nr);
4746
4747 eh_edge = NULL;
4748 FOR_EACH_EDGE (e, ei, bb->succs)
4749 {
4750 if (e->flags & EDGE_EH)
4751 {
4752 if (eh_edge)
4753 {
4754 error ("BB %i has multiple EH edges", bb->index);
4755 return true;
4756 }
4757 else
4758 eh_edge = e;
4759 }
4760 }
4761
4762 if (lp == NULL)
4763 {
4764 if (eh_edge)
4765 {
f4d3c071 4766 error ("BB %i cannot throw but has an EH edge", bb->index);
e38def9c 4767 return true;
4768 }
4769 return false;
4770 }
4771
aac19106 4772 if (!stmt_could_throw_p (cfun, stmt))
e38def9c 4773 {
4774 error ("BB %i last statement has incorrectly set lp", bb->index);
4775 return true;
4776 }
4777
4778 if (eh_edge == NULL)
4779 {
4780 error ("BB %i is missing an EH edge", bb->index);
4781 return true;
4782 }
4783
0fb4f2ce 4784 if (eh_edge->dest != label_to_block (cfun, lp->post_landing_pad))
e38def9c 4785 {
4786 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4787 return true;
4788 }
4789
4790 return false;
4791}
4792
4793/* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4794
4b987fac 4795DEBUG_FUNCTION bool
1a91d914 4796verify_eh_dispatch_edge (geh_dispatch *stmt)
e38def9c 4797{
4798 eh_region r;
4799 eh_catch c;
4800 basic_block src, dst;
4801 bool want_fallthru = true;
4802 edge_iterator ei;
4803 edge e, fall_edge;
4804
4805 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4806 src = gimple_bb (stmt);
4807
4808 FOR_EACH_EDGE (e, ei, src->succs)
4809 gcc_assert (e->aux == NULL);
4810
4811 switch (r->type)
4812 {
4813 case ERT_TRY:
4814 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4815 {
0fb4f2ce 4816 dst = label_to_block (cfun, c->label);
e38def9c 4817 e = find_edge (src, dst);
4818 if (e == NULL)
4819 {
4820 error ("BB %i is missing an edge", src->index);
4821 return true;
4822 }
4823 e->aux = (void *)e;
4824
4825 /* A catch-all handler doesn't have a fallthru. */
4826 if (c->type_list == NULL)
4827 {
4828 want_fallthru = false;
4829 break;
4830 }
4831 }
4832 break;
4833
4834 case ERT_ALLOWED_EXCEPTIONS:
0fb4f2ce 4835 dst = label_to_block (cfun, r->u.allowed.label);
e38def9c 4836 e = find_edge (src, dst);
4837 if (e == NULL)
4838 {
4839 error ("BB %i is missing an edge", src->index);
4840 return true;
4841 }
4842 e->aux = (void *)e;
4843 break;
4844
4845 default:
4846 gcc_unreachable ();
4847 }
4848
4849 fall_edge = NULL;
4850 FOR_EACH_EDGE (e, ei, src->succs)
4851 {
4852 if (e->flags & EDGE_FALLTHRU)
4853 {
4854 if (fall_edge != NULL)
4855 {
4856 error ("BB %i too many fallthru edges", src->index);
4857 return true;
4858 }
4859 fall_edge = e;
4860 }
4861 else if (e->aux)
4862 e->aux = NULL;
4863 else
4864 {
4865 error ("BB %i has incorrect edge", src->index);
4866 return true;
4867 }
4868 }
4869 if ((fall_edge != NULL) ^ want_fallthru)
4870 {
4871 error ("BB %i has incorrect fallthru edge", src->index);
4872 return true;
4873 }
4874
4875 return false;
4876}