]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-eh.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / tree-eh.c
CommitLineData
4ee9c684 1/* Exception handling semantics and decomposition for trees.
d353bf18 2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
4ee9c684 3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8c4c00c1 8the Free Software Foundation; either version 3, or (at your option)
4ee9c684 9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
4ee9c684 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
b20a8bb4 24#include "alias.h"
25#include "symtab.h"
4ee9c684 26#include "tree.h"
b20a8bb4 27#include "fold-const.h"
a3020f2f 28#include "hard-reg-set.h"
4ee9c684 29#include "function.h"
d53441c8 30#include "rtl.h"
31#include "flags.h"
d53441c8 32#include "insn-config.h"
33#include "expmed.h"
34#include "dojump.h"
35#include "explow.h"
36#include "calls.h"
37#include "emit-rtl.h"
38#include "varasm.h"
39#include "stmt.h"
40#include "expr.h"
4ee9c684 41#include "except.h"
94ea8568 42#include "predict.h"
43#include "dominance.h"
44#include "cfg.h"
45#include "cfganal.h"
46#include "cfgcleanup.h"
bc61cadb 47#include "basic-block.h"
48#include "tree-ssa-alias.h"
49#include "internal-fn.h"
50#include "tree-eh.h"
51#include "gimple-expr.h"
073c1fd5 52#include "gimple.h"
dcf1a1ec 53#include "gimple-iterator.h"
073c1fd5 54#include "gimple-ssa.h"
55#include "cgraph.h"
56#include "tree-cfg.h"
57#include "tree-phinodes.h"
58#include "ssa-iterators.h"
9ed99284 59#include "stringpool.h"
073c1fd5 60#include "tree-ssanames.h"
61#include "tree-into-ssa.h"
69ee5dbb 62#include "tree-ssa.h"
4ee9c684 63#include "tree-inline.h"
4ee9c684 64#include "tree-pass.h"
4ee9c684 65#include "langhooks.h"
0b205f4c 66#include "diagnostic-core.h"
e38def9c 67#include "target.h"
79f958cb 68#include "cfgloop.h"
424a4a92 69#include "gimple-low.h"
75a70cf9 70
71/* In some instances a tree and a gimple need to be stored in a same table,
72 i.e. in hash tables. This is a structure to do this. */
73typedef union {tree *tp; tree t; gimple g;} treemple;
4ee9c684 74
4ee9c684 75/* Misc functions used in this file. */
76
e38def9c 77/* Remember and lookup EH landing pad data for arbitrary statements.
4ee9c684 78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
80
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
84
ac13e8d9 85 (2) There are many more statements that *cannot* throw as
4ee9c684 86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
88
e38def9c 89/* Add statement T in function IFUN to landing pad NUM. */
75a70cf9 90
4e57e76d 91static void
e38def9c 92add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
4ee9c684 93{
e38def9c 94 gcc_assert (num != 0);
4ee9c684 95
0de999f1 96 if (!get_eh_throw_stmt_table (ifun))
8f359205 97 set_eh_throw_stmt_table (ifun, hash_map<gimple, int>::create_ggc (31));
0de999f1 98
8f359205 99 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
4ee9c684 100}
35c15734 101
e38def9c 102/* Add statement T in the current function (cfun) to EH landing pad NUM. */
75a70cf9 103
b3f1469f 104void
e38def9c 105add_stmt_to_eh_lp (gimple t, int num)
b3f1469f 106{
e38def9c 107 add_stmt_to_eh_lp_fn (cfun, t, num);
108}
109
110/* Add statement T to the single EH landing pad in REGION. */
111
112static void
113record_stmt_eh_region (eh_region region, gimple t)
114{
115 if (region == NULL)
116 return;
117 if (region->type == ERT_MUST_NOT_THROW)
118 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
119 else
120 {
121 eh_landing_pad lp = region->landing_pads;
122 if (lp == NULL)
123 lp = gen_eh_landing_pad (region);
124 else
125 gcc_assert (lp->next_lp == NULL);
126 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
127 }
b3f1469f 128}
129
75a70cf9 130
e38def9c 131/* Remove statement T in function IFUN from its EH landing pad. */
75a70cf9 132
35c15734 133bool
e38def9c 134remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
35c15734 135{
b3f1469f 136 if (!get_eh_throw_stmt_table (ifun))
35c15734 137 return false;
138
8f359205 139 if (!get_eh_throw_stmt_table (ifun)->get (t))
35c15734 140 return false;
8f359205 141
142 get_eh_throw_stmt_table (ifun)->remove (t);
143 return true;
35c15734 144}
145
75a70cf9 146
e38def9c 147/* Remove statement T in the current function (cfun) from its
148 EH landing pad. */
75a70cf9 149
b3f1469f 150bool
e38def9c 151remove_stmt_from_eh_lp (gimple t)
b3f1469f 152{
e38def9c 153 return remove_stmt_from_eh_lp_fn (cfun, t);
b3f1469f 154}
155
75a70cf9 156/* Determine if statement T is inside an EH region in function IFUN.
e38def9c 157 Positive numbers indicate a landing pad index; negative numbers
158 indicate a MUST_NOT_THROW region index; zero indicates that the
159 statement is not recorded in the region table. */
75a70cf9 160
4ee9c684 161int
e38def9c 162lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
4ee9c684 163{
e38def9c 164 if (ifun->eh->throw_stmt_table == NULL)
165 return 0;
4ee9c684 166
8f359205 167 int *lp_nr = ifun->eh->throw_stmt_table->get (t);
168 return lp_nr ? *lp_nr : 0;
4ee9c684 169}
170
e38def9c 171/* Likewise, but always use the current function. */
75a70cf9 172
b3f1469f 173int
e38def9c 174lookup_stmt_eh_lp (gimple t)
b3f1469f 175{
176 /* We can get called from initialized data when -fnon-call-exceptions
177 is on; prevent crash. */
178 if (!cfun)
e38def9c 179 return 0;
180 return lookup_stmt_eh_lp_fn (cfun, t);
b3f1469f 181}
4ee9c684 182
75a70cf9 183/* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
4ee9c684 184 nodes and LABEL_DECL nodes. We will use this during the second phase to
185 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
186
187struct finally_tree_node
188{
75a70cf9 189 /* When storing a GIMPLE_TRY, we have to record a gimple. However
190 when deciding whether a GOTO to a certain LABEL_DECL (which is a
191 tree) leaves the TRY block, its necessary to record a tree in
192 this field. Thus a treemple is used. */
e38def9c 193 treemple child;
1a91d914 194 gtry *parent;
4ee9c684 195};
196
d9dd21a8 197/* Hashtable helpers. */
198
298e7f9a 199struct finally_tree_hasher : free_ptr_hash <finally_tree_node>
d9dd21a8 200{
9969c043 201 static inline hashval_t hash (const finally_tree_node *);
202 static inline bool equal (const finally_tree_node *,
203 const finally_tree_node *);
d9dd21a8 204};
205
206inline hashval_t
9969c043 207finally_tree_hasher::hash (const finally_tree_node *v)
d9dd21a8 208{
209 return (intptr_t)v->child.t >> 4;
210}
211
212inline bool
9969c043 213finally_tree_hasher::equal (const finally_tree_node *v,
214 const finally_tree_node *c)
d9dd21a8 215{
216 return v->child.t == c->child.t;
217}
218
4ee9c684 219/* Note that this table is *not* marked GTY. It is short-lived. */
c1f445d2 220static hash_table<finally_tree_hasher> *finally_tree;
4ee9c684 221
222static void
1a91d914 223record_in_finally_tree (treemple child, gtry *parent)
4ee9c684 224{
225 struct finally_tree_node *n;
d9dd21a8 226 finally_tree_node **slot;
4ee9c684 227
680a19b9 228 n = XNEW (struct finally_tree_node);
4ee9c684 229 n->child = child;
230 n->parent = parent;
231
c1f445d2 232 slot = finally_tree->find_slot (n, INSERT);
8c0963c4 233 gcc_assert (!*slot);
4ee9c684 234 *slot = n;
235}
236
237static void
1a91d914 238collect_finally_tree (gimple stmt, gtry *region);
75a70cf9 239
e38def9c 240/* Go through the gimple sequence. Works with collect_finally_tree to
75a70cf9 241 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
242
243static void
1a91d914 244collect_finally_tree_1 (gimple_seq seq, gtry *region)
4ee9c684 245{
75a70cf9 246 gimple_stmt_iterator gsi;
4ee9c684 247
75a70cf9 248 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
249 collect_finally_tree (gsi_stmt (gsi), region);
250}
4ee9c684 251
75a70cf9 252static void
1a91d914 253collect_finally_tree (gimple stmt, gtry *region)
75a70cf9 254{
255 treemple temp;
256
257 switch (gimple_code (stmt))
258 {
259 case GIMPLE_LABEL:
1a91d914 260 temp.t = gimple_label_label (as_a <glabel *> (stmt));
75a70cf9 261 record_in_finally_tree (temp, region);
262 break;
4ee9c684 263
75a70cf9 264 case GIMPLE_TRY:
265 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
266 {
267 temp.g = stmt;
268 record_in_finally_tree (temp, region);
1a91d914 269 collect_finally_tree_1 (gimple_try_eval (stmt),
270 as_a <gtry *> (stmt));
75a70cf9 271 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
272 }
273 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
274 {
275 collect_finally_tree_1 (gimple_try_eval (stmt), region);
276 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
277 }
278 break;
4ee9c684 279
75a70cf9 280 case GIMPLE_CATCH:
1a91d914 281 collect_finally_tree_1 (gimple_catch_handler (
282 as_a <gcatch *> (stmt)),
283 region);
75a70cf9 284 break;
4ee9c684 285
75a70cf9 286 case GIMPLE_EH_FILTER:
287 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
4ee9c684 288 break;
289
4c0315d0 290 case GIMPLE_EH_ELSE:
1a91d914 291 {
292 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
293 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
294 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
295 }
4c0315d0 296 break;
297
4ee9c684 298 default:
299 /* A type, a decl, or some kind of statement that we're not
300 interested in. Don't walk them. */
301 break;
302 }
303}
304
75a70cf9 305
4ee9c684 306/* Use the finally tree to determine if a jump from START to TARGET
307 would leave the try_finally node that START lives in. */
308
309static bool
75a70cf9 310outside_finally_tree (treemple start, gimple target)
4ee9c684 311{
312 struct finally_tree_node n, *p;
313
314 do
315 {
316 n.child = start;
c1f445d2 317 p = finally_tree->find (&n);
4ee9c684 318 if (!p)
319 return true;
75a70cf9 320 start.g = p->parent;
4ee9c684 321 }
75a70cf9 322 while (start.g != target);
4ee9c684 323
324 return false;
325}
75a70cf9 326
327/* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
328 nodes into a set of gotos, magic labels, and eh regions.
4ee9c684 329 The eh region creation is straight-forward, but frobbing all the gotos
330 and such into shape isn't. */
331
48e1416a 332/* The sequence into which we record all EH stuff. This will be
e38def9c 333 placed at the end of the function when we're all done. */
334static gimple_seq eh_seq;
335
336/* Record whether an EH region contains something that can throw,
337 indexed by EH region number. */
55d6d4e4 338static bitmap eh_region_may_contain_throw_map;
e38def9c 339
0b09525f 340/* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
341 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
342 The idea is to record a gimple statement for everything except for
343 the conditionals, which get their labels recorded. Since labels are
344 of type 'tree', we need this node to store both gimple and tree
345 objects. REPL_STMT is the sequence used to replace the goto/return
346 statement. CONT_STMT is used to store the statement that allows
347 the return/goto to jump to the original destination. */
348
349struct goto_queue_node
350{
351 treemple stmt;
d7ebacec 352 location_t location;
0b09525f 353 gimple_seq repl_stmt;
354 gimple cont_stmt;
355 int index;
356 /* This is used when index >= 0 to indicate that stmt is a label (as
357 opposed to a goto stmt). */
358 int is_label;
359};
360
4ee9c684 361/* State of the world while lowering. */
362
363struct leh_state
364{
ac13e8d9 365 /* What's "current" while constructing the eh region tree. These
4ee9c684 366 correspond to variables of the same name in cfun->eh, which we
367 don't have easy access to. */
e38def9c 368 eh_region cur_region;
369
370 /* What's "current" for the purposes of __builtin_eh_pointer. For
371 a CATCH, this is the associated TRY. For an EH_FILTER, this is
372 the associated ALLOWED_EXCEPTIONS, etc. */
373 eh_region ehp_region;
4ee9c684 374
375 /* Processing of TRY_FINALLY requires a bit more state. This is
376 split out into a separate structure so that we don't have to
377 copy so much when processing other nodes. */
378 struct leh_tf_state *tf;
379};
380
381struct leh_tf_state
382{
75a70cf9 383 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
384 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
385 this so that outside_finally_tree can reliably reference the tree used
386 in the collect_finally_tree data structures. */
1a91d914 387 gtry *try_finally_expr;
388 gtry *top_p;
e38def9c 389
75a70cf9 390 /* While lowering a top_p usually it is expanded into multiple statements,
391 thus we need the following field to store them. */
392 gimple_seq top_p_seq;
4ee9c684 393
394 /* The state outside this try_finally node. */
395 struct leh_state *outer;
396
397 /* The exception region created for it. */
e38def9c 398 eh_region region;
4ee9c684 399
0b09525f 400 /* The goto queue. */
401 struct goto_queue_node *goto_queue;
4ee9c684 402 size_t goto_queue_size;
403 size_t goto_queue_active;
404
f0b5f617 405 /* Pointer map to help in searching goto_queue when it is large. */
06ecf488 406 hash_map<gimple, goto_queue_node *> *goto_queue_map;
46699809 407
4ee9c684 408 /* The set of unique labels seen as entries in the goto queue. */
f1f41a6c 409 vec<tree> dest_array;
4ee9c684 410
411 /* A label to be added at the end of the completed transformed
412 sequence. It will be set if may_fallthru was true *at one time*,
413 though subsequent transformations may have cleared that flag. */
414 tree fallthru_label;
415
4ee9c684 416 /* True if it is possible to fall out the bottom of the try block.
417 Cleared if the fallthru is converted to a goto. */
418 bool may_fallthru;
419
75a70cf9 420 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
4ee9c684 421 bool may_return;
422
423 /* True if the finally block can receive an exception edge.
424 Cleared if the exception case is handled by code duplication. */
425 bool may_throw;
426};
427
1a91d914 428static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
4ee9c684 429
4ee9c684 430/* Search for STMT in the goto queue. Return the replacement,
431 or null if the statement isn't in the queue. */
432
46699809 433#define LARGE_GOTO_QUEUE 20
434
e3a19533 435static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
75a70cf9 436
437static gimple_seq
438find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
4ee9c684 439{
46699809 440 unsigned int i;
46699809 441
442 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
443 {
444 for (i = 0; i < tf->goto_queue_active; i++)
75a70cf9 445 if ( tf->goto_queue[i].stmt.g == stmt.g)
46699809 446 return tf->goto_queue[i].repl_stmt;
447 return NULL;
448 }
449
450 /* If we have a large number of entries in the goto_queue, create a
451 pointer map and use that for searching. */
452
453 if (!tf->goto_queue_map)
454 {
06ecf488 455 tf->goto_queue_map = new hash_map<gimple, goto_queue_node *>;
46699809 456 for (i = 0; i < tf->goto_queue_active; i++)
457 {
06ecf488 458 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
459 &tf->goto_queue[i]);
460 gcc_assert (!existed);
46699809 461 }
462 }
463
06ecf488 464 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
46699809 465 if (slot != NULL)
06ecf488 466 return ((*slot)->repl_stmt);
46699809 467
468 return NULL;
4ee9c684 469}
470
471/* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
75a70cf9 472 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
4ee9c684 473 then we can just splat it in, otherwise we add the new stmts immediately
75a70cf9 474 after the GIMPLE_COND and redirect. */
4ee9c684 475
476static void
477replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
75a70cf9 478 gimple_stmt_iterator *gsi)
4ee9c684 479{
75a70cf9 480 tree label;
f4e36c33 481 gimple_seq new_seq;
75a70cf9 482 treemple temp;
e60a6f7b 483 location_t loc = gimple_location (gsi_stmt (*gsi));
4ee9c684 484
75a70cf9 485 temp.tp = tp;
f4e36c33 486 new_seq = find_goto_replacement (tf, temp);
487 if (!new_seq)
4ee9c684 488 return;
489
f4e36c33 490 if (gimple_seq_singleton_p (new_seq)
491 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
4ee9c684 492 {
f4e36c33 493 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
4ee9c684 494 return;
495 }
496
e60a6f7b 497 label = create_artificial_label (loc);
75a70cf9 498 /* Set the new label for the GIMPLE_COND */
499 *tp = label;
4ee9c684 500
75a70cf9 501 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
f4e36c33 502 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
4ee9c684 503}
504
ac13e8d9 505/* The real work of replace_goto_queue. Returns with TSI updated to
4ee9c684 506 point to the next statement. */
507
e3a19533 508static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
4ee9c684 509
510static void
75a70cf9 511replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
512 gimple_stmt_iterator *gsi)
4ee9c684 513{
75a70cf9 514 gimple_seq seq;
515 treemple temp;
516 temp.g = NULL;
517
518 switch (gimple_code (stmt))
4ee9c684 519 {
75a70cf9 520 case GIMPLE_GOTO:
521 case GIMPLE_RETURN:
522 temp.g = stmt;
523 seq = find_goto_replacement (tf, temp);
524 if (seq)
4ee9c684 525 {
75a70cf9 526 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
527 gsi_remove (gsi, false);
4ee9c684 528 return;
529 }
530 break;
531
75a70cf9 532 case GIMPLE_COND:
533 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
534 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
4ee9c684 535 break;
536
75a70cf9 537 case GIMPLE_TRY:
e3a19533 538 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
539 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
4ee9c684 540 break;
75a70cf9 541 case GIMPLE_CATCH:
1a91d914 542 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
543 as_a <gcatch *> (stmt)),
544 tf);
4ee9c684 545 break;
75a70cf9 546 case GIMPLE_EH_FILTER:
e3a19533 547 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
4ee9c684 548 break;
4c0315d0 549 case GIMPLE_EH_ELSE:
1a91d914 550 {
551 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
552 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
553 tf);
554 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
555 tf);
556 }
4c0315d0 557 break;
4ee9c684 558
4ee9c684 559 default:
560 /* These won't have gotos in them. */
561 break;
562 }
563
75a70cf9 564 gsi_next (gsi);
4ee9c684 565}
566
75a70cf9 567/* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
4ee9c684 568
569static void
e3a19533 570replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
4ee9c684 571{
e3a19533 572 gimple_stmt_iterator gsi = gsi_start (*seq);
75a70cf9 573
574 while (!gsi_end_p (gsi))
575 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
4ee9c684 576}
577
578/* Replace all goto queue members. */
579
580static void
581replace_goto_queue (struct leh_tf_state *tf)
582{
82a8c0dd 583 if (tf->goto_queue_active == 0)
584 return;
e3a19533 585 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
586 replace_goto_queue_stmt_list (&eh_seq, tf);
4ee9c684 587}
588
75a70cf9 589/* Add a new record to the goto queue contained in TF. NEW_STMT is the
590 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
591 a gimple return. */
4ee9c684 592
593static void
75a70cf9 594record_in_goto_queue (struct leh_tf_state *tf,
595 treemple new_stmt,
596 int index,
d7ebacec 597 bool is_label,
598 location_t location)
4ee9c684 599{
4ee9c684 600 size_t active, size;
75a70cf9 601 struct goto_queue_node *q;
4ee9c684 602
46699809 603 gcc_assert (!tf->goto_queue_map);
604
4ee9c684 605 active = tf->goto_queue_active;
606 size = tf->goto_queue_size;
607 if (active >= size)
608 {
609 size = (size ? size * 2 : 32);
610 tf->goto_queue_size = size;
611 tf->goto_queue
680a19b9 612 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
4ee9c684 613 }
614
615 q = &tf->goto_queue[active];
616 tf->goto_queue_active = active + 1;
ac13e8d9 617
4ee9c684 618 memset (q, 0, sizeof (*q));
75a70cf9 619 q->stmt = new_stmt;
4ee9c684 620 q->index = index;
d7ebacec 621 q->location = location;
75a70cf9 622 q->is_label = is_label;
623}
624
625/* Record the LABEL label in the goto queue contained in TF.
626 TF is not null. */
627
628static void
d7ebacec 629record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
630 location_t location)
75a70cf9 631{
632 int index;
633 treemple temp, new_stmt;
634
635 if (!label)
636 return;
637
638 /* Computed and non-local gotos do not get processed. Given
639 their nature we can neither tell whether we've escaped the
640 finally block nor redirect them if we knew. */
641 if (TREE_CODE (label) != LABEL_DECL)
642 return;
643
644 /* No need to record gotos that don't leave the try block. */
645 temp.t = label;
646 if (!outside_finally_tree (temp, tf->try_finally_expr))
647 return;
648
f1f41a6c 649 if (! tf->dest_array.exists ())
75a70cf9 650 {
f1f41a6c 651 tf->dest_array.create (10);
652 tf->dest_array.quick_push (label);
75a70cf9 653 index = 0;
654 }
655 else
656 {
f1f41a6c 657 int n = tf->dest_array.length ();
75a70cf9 658 for (index = 0; index < n; ++index)
f1f41a6c 659 if (tf->dest_array[index] == label)
75a70cf9 660 break;
661 if (index == n)
f1f41a6c 662 tf->dest_array.safe_push (label);
75a70cf9 663 }
664
665 /* In the case of a GOTO we want to record the destination label,
666 since with a GIMPLE_COND we have an easy access to the then/else
667 labels. */
668 new_stmt = stmt;
d7ebacec 669 record_in_goto_queue (tf, new_stmt, index, true, location);
75a70cf9 670}
671
672/* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
673 node, and if so record that fact in the goto queue associated with that
674 try_finally node. */
675
676static void
677maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
678{
679 struct leh_tf_state *tf = state->tf;
680 treemple new_stmt;
681
682 if (!tf)
683 return;
684
685 switch (gimple_code (stmt))
686 {
687 case GIMPLE_COND:
1a91d914 688 {
689 gcond *cond_stmt = as_a <gcond *> (stmt);
690 new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
691 record_in_goto_queue_label (tf, new_stmt,
692 gimple_cond_true_label (cond_stmt),
693 EXPR_LOCATION (*new_stmt.tp));
694 new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
695 record_in_goto_queue_label (tf, new_stmt,
696 gimple_cond_false_label (cond_stmt),
697 EXPR_LOCATION (*new_stmt.tp));
698 }
75a70cf9 699 break;
700 case GIMPLE_GOTO:
701 new_stmt.g = stmt;
d7ebacec 702 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
703 gimple_location (stmt));
75a70cf9 704 break;
705
706 case GIMPLE_RETURN:
707 tf->may_return = true;
708 new_stmt.g = stmt;
d7ebacec 709 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
75a70cf9 710 break;
711
712 default:
713 gcc_unreachable ();
714 }
4ee9c684 715}
716
75a70cf9 717
4ee9c684 718#ifdef ENABLE_CHECKING
75a70cf9 719/* We do not process GIMPLE_SWITCHes for now. As long as the original source
4ee9c684 720 was in fact structured, and we've not yet done jump threading, then none
75a70cf9 721 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
4ee9c684 722
723static void
1a91d914 724verify_norecord_switch_expr (struct leh_state *state,
725 gswitch *switch_expr)
4ee9c684 726{
727 struct leh_tf_state *tf = state->tf;
728 size_t i, n;
4ee9c684 729
730 if (!tf)
731 return;
732
75a70cf9 733 n = gimple_switch_num_labels (switch_expr);
4ee9c684 734
735 for (i = 0; i < n; ++i)
736 {
75a70cf9 737 treemple temp;
738 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
739 temp.t = lab;
740 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
4ee9c684 741 }
742}
743#else
744#define verify_norecord_switch_expr(state, switch_expr)
745#endif
746
9a14ac4f 747/* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
748 non-null, insert it before the new branch. */
4ee9c684 749
750static void
9a14ac4f 751do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
4ee9c684 752{
75a70cf9 753 gimple x;
754
9a14ac4f 755 /* In the case of a return, the queue node must be a gimple statement. */
75a70cf9 756 gcc_assert (!q->is_label);
757
9a14ac4f 758 /* Note that the return value may have already been computed, e.g.,
4ee9c684 759
9a14ac4f 760 int x;
761 int foo (void)
4ee9c684 762 {
9a14ac4f 763 x = 0;
764 try {
765 return x;
766 } finally {
767 x++;
768 }
4ee9c684 769 }
9a14ac4f 770
771 should return 0, not 1. We don't have to do anything to make
772 this happens because the return value has been placed in the
773 RESULT_DECL already. */
774
775 q->cont_stmt = q->stmt.g;
75a70cf9 776
4ee9c684 777 if (mod)
75a70cf9 778 gimple_seq_add_seq (&q->repl_stmt, mod);
4ee9c684 779
75a70cf9 780 x = gimple_build_goto (finlab);
ed4d69dc 781 gimple_set_location (x, q->location);
75a70cf9 782 gimple_seq_add_stmt (&q->repl_stmt, x);
4ee9c684 783}
784
75a70cf9 785/* Similar, but easier, for GIMPLE_GOTO. */
4ee9c684 786
787static void
75a70cf9 788do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
789 struct leh_tf_state *tf)
4ee9c684 790{
1a91d914 791 ggoto *x;
75a70cf9 792
793 gcc_assert (q->is_label);
75a70cf9 794
f1f41a6c 795 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
4ee9c684 796
4ee9c684 797 if (mod)
75a70cf9 798 gimple_seq_add_seq (&q->repl_stmt, mod);
4ee9c684 799
75a70cf9 800 x = gimple_build_goto (finlab);
ed4d69dc 801 gimple_set_location (x, q->location);
75a70cf9 802 gimple_seq_add_stmt (&q->repl_stmt, x);
4ee9c684 803}
804
e38def9c 805/* Emit a standard landing pad sequence into SEQ for REGION. */
806
807static void
808emit_post_landing_pad (gimple_seq *seq, eh_region region)
809{
810 eh_landing_pad lp = region->landing_pads;
1a91d914 811 glabel *x;
e38def9c 812
813 if (lp == NULL)
814 lp = gen_eh_landing_pad (region);
815
816 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
817 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
818
819 x = gimple_build_label (lp->post_landing_pad);
820 gimple_seq_add_stmt (seq, x);
821}
822
823/* Emit a RESX statement into SEQ for REGION. */
824
825static void
826emit_resx (gimple_seq *seq, eh_region region)
827{
1a91d914 828 gresx *x = gimple_build_resx (region->index);
e38def9c 829 gimple_seq_add_stmt (seq, x);
830 if (region->outer)
831 record_stmt_eh_region (region->outer, x);
832}
833
834/* Emit an EH_DISPATCH statement into SEQ for REGION. */
835
836static void
837emit_eh_dispatch (gimple_seq *seq, eh_region region)
838{
1a91d914 839 geh_dispatch *x = gimple_build_eh_dispatch (region->index);
e38def9c 840 gimple_seq_add_stmt (seq, x);
841}
842
843/* Note that the current EH region may contain a throw, or a
844 call to a function which itself may contain a throw. */
845
846static void
847note_eh_region_may_contain_throw (eh_region region)
848{
6ef9bbe0 849 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
e38def9c 850 {
39efead8 851 if (region->type == ERT_MUST_NOT_THROW)
852 break;
e38def9c 853 region = region->outer;
854 if (region == NULL)
855 break;
856 }
857}
858
55d6d4e4 859/* Check if REGION has been marked as containing a throw. If REGION is
860 NULL, this predicate is false. */
861
862static inline bool
863eh_region_may_contain_throw (eh_region r)
864{
865 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
866}
867
4ee9c684 868/* We want to transform
869 try { body; } catch { stuff; }
870 to
4422041b 871 normal_sequence:
e38def9c 872 body;
873 over:
4422041b 874 eh_sequence:
e38def9c 875 landing_pad:
876 stuff;
877 goto over;
878
879 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
4ee9c684 880 should be placed before the second operand, or NULL. OVER is
881 an existing label that should be put at the exit, or NULL. */
882
75a70cf9 883static gimple_seq
1a91d914 884frob_into_branch_around (gtry *tp, eh_region region, tree over)
4ee9c684 885{
75a70cf9 886 gimple x;
887 gimple_seq cleanup, result;
e60a6f7b 888 location_t loc = gimple_location (tp);
4ee9c684 889
75a70cf9 890 cleanup = gimple_try_cleanup (tp);
891 result = gimple_try_eval (tp);
4ee9c684 892
e38def9c 893 if (region)
894 emit_post_landing_pad (&eh_seq, region);
895
896 if (gimple_seq_may_fallthru (cleanup))
4ee9c684 897 {
898 if (!over)
e60a6f7b 899 over = create_artificial_label (loc);
75a70cf9 900 x = gimple_build_goto (over);
ed4d69dc 901 gimple_set_location (x, loc);
e38def9c 902 gimple_seq_add_stmt (&cleanup, x);
4ee9c684 903 }
e38def9c 904 gimple_seq_add_seq (&eh_seq, cleanup);
4ee9c684 905
906 if (over)
907 {
75a70cf9 908 x = gimple_build_label (over);
909 gimple_seq_add_stmt (&result, x);
4ee9c684 910 }
75a70cf9 911 return result;
4ee9c684 912}
913
914/* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
915 Make sure to record all new labels found. */
916
75a70cf9 917static gimple_seq
d7ebacec 918lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
919 location_t loc)
4ee9c684 920{
1a91d914 921 gtry *region = NULL;
75a70cf9 922 gimple_seq new_seq;
d7ebacec 923 gimple_stmt_iterator gsi;
4ee9c684 924
75a70cf9 925 new_seq = copy_gimple_seq_and_replace_locals (seq);
4ee9c684 926
d7ebacec 927 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
cc9f317f 928 {
929 gimple stmt = gsi_stmt (gsi);
8e7408e3 930 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
cc9f317f 931 {
932 tree block = gimple_block (stmt);
933 gimple_set_location (stmt, loc);
934 gimple_set_block (stmt, block);
935 }
936 }
d7ebacec 937
4ee9c684 938 if (outer_state->tf)
939 region = outer_state->tf->try_finally_expr;
75a70cf9 940 collect_finally_tree_1 (new_seq, region);
4ee9c684 941
75a70cf9 942 return new_seq;
4ee9c684 943}
944
945/* A subroutine of lower_try_finally. Create a fallthru label for
946 the given try_finally state. The only tricky bit here is that
947 we have to make sure to record the label in our outer context. */
948
949static tree
950lower_try_finally_fallthru_label (struct leh_tf_state *tf)
951{
952 tree label = tf->fallthru_label;
75a70cf9 953 treemple temp;
954
4ee9c684 955 if (!label)
956 {
e60a6f7b 957 label = create_artificial_label (gimple_location (tf->try_finally_expr));
4ee9c684 958 tf->fallthru_label = label;
959 if (tf->outer->tf)
75a70cf9 960 {
961 temp.t = label;
962 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
963 }
4ee9c684 964 }
965 return label;
966}
967
4c0315d0 968/* A subroutine of lower_try_finally. If FINALLY consits of a
969 GIMPLE_EH_ELSE node, return it. */
970
1a91d914 971static inline geh_else *
4c0315d0 972get_eh_else (gimple_seq finally)
973{
974 gimple x = gimple_seq_first_stmt (finally);
975 if (gimple_code (x) == GIMPLE_EH_ELSE)
976 {
977 gcc_assert (gimple_seq_singleton_p (finally));
1a91d914 978 return as_a <geh_else *> (x);
4c0315d0 979 }
980 return NULL;
981}
982
596981c8 983/* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
984 langhook returns non-null, then the language requires that the exception
985 path out of a try_finally be treated specially. To wit: the code within
986 the finally block may not itself throw an exception. We have two choices
987 here. First we can duplicate the finally block and wrap it in a
988 must_not_throw region. Second, we can generate code like
4ee9c684 989
990 try {
991 finally_block;
992 } catch {
993 if (fintmp == eh_edge)
994 protect_cleanup_actions;
995 }
996
997 where "fintmp" is the temporary used in the switch statement generation
998 alternative considered below. For the nonce, we always choose the first
ac13e8d9 999 option.
4ee9c684 1000
822e391f 1001 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
4ee9c684 1002
1003static void
1004honor_protect_cleanup_actions (struct leh_state *outer_state,
1005 struct leh_state *this_state,
1006 struct leh_tf_state *tf)
1007{
e38def9c 1008 tree protect_cleanup_actions;
75a70cf9 1009 gimple_stmt_iterator gsi;
4ee9c684 1010 bool finally_may_fallthru;
75a70cf9 1011 gimple_seq finally;
1a91d914 1012 gimple x;
1013 geh_mnt *eh_mnt;
1014 gtry *try_stmt;
1015 geh_else *eh_else;
4ee9c684 1016
1017 /* First check for nothing to do. */
596981c8 1018 if (lang_hooks.eh_protect_cleanup_actions == NULL)
e38def9c 1019 return;
596981c8 1020 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
e38def9c 1021 if (protect_cleanup_actions == NULL)
1022 return;
4ee9c684 1023
75a70cf9 1024 finally = gimple_try_cleanup (tf->top_p);
4c0315d0 1025 eh_else = get_eh_else (finally);
4ee9c684 1026
1027 /* Duplicate the FINALLY block. Only need to do this for try-finally,
4c0315d0 1028 and not for cleanups. If we've got an EH_ELSE, extract it now. */
1029 if (eh_else)
1030 {
1031 finally = gimple_eh_else_e_body (eh_else);
1032 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
1033 }
1034 else if (this_state)
d7ebacec 1035 finally = lower_try_finally_dup_block (finally, outer_state,
5169661d 1036 gimple_location (tf->try_finally_expr));
4c0315d0 1037 finally_may_fallthru = gimple_seq_may_fallthru (finally);
4ee9c684 1038
0bc060a4 1039 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1040 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1041 to be in an enclosing scope, but needs to be implemented at this level
1042 to avoid a nesting violation (see wrap_temporary_cleanups in
1043 cp/decl.c). Since it's logically at an outer level, we should call
1044 terminate before we get to it, so strip it away before adding the
1045 MUST_NOT_THROW filter. */
75a70cf9 1046 gsi = gsi_start (finally);
1047 x = gsi_stmt (gsi);
e38def9c 1048 if (gimple_code (x) == GIMPLE_TRY
75a70cf9 1049 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1050 && gimple_try_catch_is_cleanup (x))
0bc060a4 1051 {
75a70cf9 1052 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1053 gsi_remove (&gsi, false);
0bc060a4 1054 }
1055
4ee9c684 1056 /* Wrap the block with protect_cleanup_actions as the action. */
1a91d914 1057 eh_mnt = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1058 try_stmt = gimple_build_try (finally, gimple_seq_alloc_with_stmt (eh_mnt),
1059 GIMPLE_TRY_CATCH);
1060 finally = lower_eh_must_not_throw (outer_state, try_stmt);
e38def9c 1061
1062 /* Drop all of this into the exception sequence. */
1063 emit_post_landing_pad (&eh_seq, tf->region);
1064 gimple_seq_add_seq (&eh_seq, finally);
1065 if (finally_may_fallthru)
1066 emit_resx (&eh_seq, tf->region);
4ee9c684 1067
1068 /* Having now been handled, EH isn't to be considered with
1069 the rest of the outgoing edges. */
1070 tf->may_throw = false;
1071}
1072
1073/* A subroutine of lower_try_finally. We have determined that there is
1074 no fallthru edge out of the finally block. This means that there is
1075 no outgoing edge corresponding to any incoming edge. Restructure the
1076 try_finally node for this special case. */
1077
1078static void
75a70cf9 1079lower_try_finally_nofallthru (struct leh_state *state,
1080 struct leh_tf_state *tf)
4ee9c684 1081{
9a14ac4f 1082 tree lab;
1a91d914 1083 gimple x;
1084 geh_else *eh_else;
75a70cf9 1085 gimple_seq finally;
4ee9c684 1086 struct goto_queue_node *q, *qe;
1087
e38def9c 1088 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
4ee9c684 1089
75a70cf9 1090 /* We expect that tf->top_p is a GIMPLE_TRY. */
1091 finally = gimple_try_cleanup (tf->top_p);
1092 tf->top_p_seq = gimple_try_eval (tf->top_p);
4ee9c684 1093
75a70cf9 1094 x = gimple_build_label (lab);
1095 gimple_seq_add_stmt (&tf->top_p_seq, x);
4ee9c684 1096
4ee9c684 1097 q = tf->goto_queue;
1098 qe = q + tf->goto_queue_active;
1099 for (; q < qe; ++q)
1100 if (q->index < 0)
9a14ac4f 1101 do_return_redirection (q, lab, NULL);
4ee9c684 1102 else
75a70cf9 1103 do_goto_redirection (q, lab, NULL, tf);
4ee9c684 1104
1105 replace_goto_queue (tf);
1106
4c0315d0 1107 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1108 eh_else = get_eh_else (finally);
1109 if (eh_else)
1110 {
1111 finally = gimple_eh_else_n_body (eh_else);
e3a19533 1112 lower_eh_constructs_1 (state, &finally);
4c0315d0 1113 gimple_seq_add_seq (&tf->top_p_seq, finally);
e38def9c 1114
4c0315d0 1115 if (tf->may_throw)
1116 {
1117 finally = gimple_eh_else_e_body (eh_else);
e3a19533 1118 lower_eh_constructs_1 (state, &finally);
4c0315d0 1119
1120 emit_post_landing_pad (&eh_seq, tf->region);
1121 gimple_seq_add_seq (&eh_seq, finally);
1122 }
1123 }
1124 else
e38def9c 1125 {
e3a19533 1126 lower_eh_constructs_1 (state, &finally);
4c0315d0 1127 gimple_seq_add_seq (&tf->top_p_seq, finally);
e38def9c 1128
4c0315d0 1129 if (tf->may_throw)
1130 {
1131 emit_post_landing_pad (&eh_seq, tf->region);
1132
1133 x = gimple_build_goto (lab);
ed4d69dc 1134 gimple_set_location (x, gimple_location (tf->try_finally_expr));
4c0315d0 1135 gimple_seq_add_stmt (&eh_seq, x);
1136 }
e38def9c 1137 }
4ee9c684 1138}
1139
1140/* A subroutine of lower_try_finally. We have determined that there is
1141 exactly one destination of the finally block. Restructure the
1142 try_finally node for this special case. */
1143
1144static void
1145lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1146{
1147 struct goto_queue_node *q, *qe;
1a91d914 1148 geh_else *eh_else;
1149 glabel *label_stmt;
75a70cf9 1150 gimple x;
1151 gimple_seq finally;
ae117ec5 1152 gimple_stmt_iterator gsi;
75a70cf9 1153 tree finally_label;
e60a6f7b 1154 location_t loc = gimple_location (tf->try_finally_expr);
4ee9c684 1155
75a70cf9 1156 finally = gimple_try_cleanup (tf->top_p);
1157 tf->top_p_seq = gimple_try_eval (tf->top_p);
4ee9c684 1158
4c0315d0 1159 /* Since there's only one destination, and the destination edge can only
1160 either be EH or non-EH, that implies that all of our incoming edges
1161 are of the same type. Therefore we can lower EH_ELSE immediately. */
1a91d914 1162 eh_else = get_eh_else (finally);
1163 if (eh_else)
4c0315d0 1164 {
1165 if (tf->may_throw)
1a91d914 1166 finally = gimple_eh_else_e_body (eh_else);
4c0315d0 1167 else
1a91d914 1168 finally = gimple_eh_else_n_body (eh_else);
4c0315d0 1169 }
1170
e3a19533 1171 lower_eh_constructs_1 (state, &finally);
4ee9c684 1172
ae117ec5 1173 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1174 {
1175 gimple stmt = gsi_stmt (gsi);
1176 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1177 {
1178 tree block = gimple_block (stmt);
1179 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1180 gimple_set_block (stmt, block);
1181 }
1182 }
1183
4ee9c684 1184 if (tf->may_throw)
1185 {
1186 /* Only reachable via the exception edge. Add the given label to
1187 the head of the FINALLY block. Append a RESX at the end. */
e38def9c 1188 emit_post_landing_pad (&eh_seq, tf->region);
1189 gimple_seq_add_seq (&eh_seq, finally);
1190 emit_resx (&eh_seq, tf->region);
4ee9c684 1191 return;
1192 }
1193
1194 if (tf->may_fallthru)
1195 {
1196 /* Only reachable via the fallthru edge. Do nothing but let
1197 the two blocks run together; we'll fall out the bottom. */
75a70cf9 1198 gimple_seq_add_seq (&tf->top_p_seq, finally);
4ee9c684 1199 return;
1200 }
1201
e60a6f7b 1202 finally_label = create_artificial_label (loc);
1a91d914 1203 label_stmt = gimple_build_label (finally_label);
1204 gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
4ee9c684 1205
75a70cf9 1206 gimple_seq_add_seq (&tf->top_p_seq, finally);
4ee9c684 1207
1208 q = tf->goto_queue;
1209 qe = q + tf->goto_queue_active;
1210
1211 if (tf->may_return)
1212 {
1213 /* Reachable by return expressions only. Redirect them. */
4ee9c684 1214 for (; q < qe; ++q)
9a14ac4f 1215 do_return_redirection (q, finally_label, NULL);
4ee9c684 1216 replace_goto_queue (tf);
1217 }
1218 else
1219 {
1220 /* Reachable by goto expressions only. Redirect them. */
1221 for (; q < qe; ++q)
75a70cf9 1222 do_goto_redirection (q, finally_label, NULL, tf);
4ee9c684 1223 replace_goto_queue (tf);
ac13e8d9 1224
f1f41a6c 1225 if (tf->dest_array[0] == tf->fallthru_label)
4ee9c684 1226 {
1227 /* Reachable by goto to fallthru label only. Redirect it
1228 to the new label (already created, sadly), and do not
1229 emit the final branch out, or the fallthru label. */
1230 tf->fallthru_label = NULL;
1231 return;
1232 }
1233 }
1234
75a70cf9 1235 /* Place the original return/goto to the original destination
1236 immediately after the finally block. */
1237 x = tf->goto_queue[0].cont_stmt;
1238 gimple_seq_add_stmt (&tf->top_p_seq, x);
1239 maybe_record_in_goto_queue (state, x);
4ee9c684 1240}
1241
1242/* A subroutine of lower_try_finally. There are multiple edges incoming
1243 and outgoing from the finally block. Implement this by duplicating the
1244 finally block for every destination. */
1245
1246static void
1247lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1248{
75a70cf9 1249 gimple_seq finally;
1250 gimple_seq new_stmt;
1251 gimple_seq seq;
1a91d914 1252 gimple x;
1253 geh_else *eh_else;
75a70cf9 1254 tree tmp;
e60a6f7b 1255 location_t tf_loc = gimple_location (tf->try_finally_expr);
4ee9c684 1256
75a70cf9 1257 finally = gimple_try_cleanup (tf->top_p);
4c0315d0 1258
1259 /* Notice EH_ELSE, and simplify some of the remaining code
1260 by considering FINALLY to be the normal return path only. */
1261 eh_else = get_eh_else (finally);
1262 if (eh_else)
1263 finally = gimple_eh_else_n_body (eh_else);
1264
75a70cf9 1265 tf->top_p_seq = gimple_try_eval (tf->top_p);
1266 new_stmt = NULL;
4ee9c684 1267
1268 if (tf->may_fallthru)
1269 {
d7ebacec 1270 seq = lower_try_finally_dup_block (finally, state, tf_loc);
e3a19533 1271 lower_eh_constructs_1 (state, &seq);
75a70cf9 1272 gimple_seq_add_seq (&new_stmt, seq);
4ee9c684 1273
75a70cf9 1274 tmp = lower_try_finally_fallthru_label (tf);
1275 x = gimple_build_goto (tmp);
ed4d69dc 1276 gimple_set_location (x, tf_loc);
75a70cf9 1277 gimple_seq_add_stmt (&new_stmt, x);
4ee9c684 1278 }
1279
1280 if (tf->may_throw)
1281 {
4c0315d0 1282 /* We don't need to copy the EH path of EH_ELSE,
1283 since it is only emitted once. */
1284 if (eh_else)
1285 seq = gimple_eh_else_e_body (eh_else);
1286 else
d7ebacec 1287 seq = lower_try_finally_dup_block (finally, state, tf_loc);
e3a19533 1288 lower_eh_constructs_1 (state, &seq);
4ee9c684 1289
2fabdfdf 1290 emit_post_landing_pad (&eh_seq, tf->region);
1291 gimple_seq_add_seq (&eh_seq, seq);
e38def9c 1292 emit_resx (&eh_seq, tf->region);
4ee9c684 1293 }
1294
1295 if (tf->goto_queue)
1296 {
1297 struct goto_queue_node *q, *qe;
22347b24 1298 int return_index, index;
680a19b9 1299 struct labels_s
22347b24 1300 {
1301 struct goto_queue_node *q;
1302 tree label;
1303 } *labels;
4ee9c684 1304
f1f41a6c 1305 return_index = tf->dest_array.length ();
680a19b9 1306 labels = XCNEWVEC (struct labels_s, return_index + 1);
4ee9c684 1307
1308 q = tf->goto_queue;
1309 qe = q + tf->goto_queue_active;
1310 for (; q < qe; q++)
1311 {
22347b24 1312 index = q->index < 0 ? return_index : q->index;
1313
1314 if (!labels[index].q)
1315 labels[index].q = q;
1316 }
1317
1318 for (index = 0; index < return_index + 1; index++)
1319 {
1320 tree lab;
1321
1322 q = labels[index].q;
1323 if (! q)
1324 continue;
1325
e60a6f7b 1326 lab = labels[index].label
1327 = create_artificial_label (tf_loc);
4ee9c684 1328
1329 if (index == return_index)
9a14ac4f 1330 do_return_redirection (q, lab, NULL);
4ee9c684 1331 else
75a70cf9 1332 do_goto_redirection (q, lab, NULL, tf);
4ee9c684 1333
75a70cf9 1334 x = gimple_build_label (lab);
1335 gimple_seq_add_stmt (&new_stmt, x);
4ee9c684 1336
d7ebacec 1337 seq = lower_try_finally_dup_block (finally, state, q->location);
e3a19533 1338 lower_eh_constructs_1 (state, &seq);
75a70cf9 1339 gimple_seq_add_seq (&new_stmt, seq);
4ee9c684 1340
75a70cf9 1341 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
22347b24 1342 maybe_record_in_goto_queue (state, q->cont_stmt);
4ee9c684 1343 }
22347b24 1344
1345 for (q = tf->goto_queue; q < qe; q++)
1346 {
1347 tree lab;
1348
1349 index = q->index < 0 ? return_index : q->index;
1350
1351 if (labels[index].q == q)
1352 continue;
1353
1354 lab = labels[index].label;
1355
1356 if (index == return_index)
9a14ac4f 1357 do_return_redirection (q, lab, NULL);
22347b24 1358 else
75a70cf9 1359 do_goto_redirection (q, lab, NULL, tf);
22347b24 1360 }
e38def9c 1361
4ee9c684 1362 replace_goto_queue (tf);
1363 free (labels);
1364 }
1365
1366 /* Need to link new stmts after running replace_goto_queue due
1367 to not wanting to process the same goto stmts twice. */
75a70cf9 1368 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
4ee9c684 1369}
1370
1371/* A subroutine of lower_try_finally. There are multiple edges incoming
1372 and outgoing from the finally block. Implement this by instrumenting
1373 each incoming edge and creating a switch statement at the end of the
1374 finally block that branches to the appropriate destination. */
1375
1376static void
1377lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1378{
1379 struct goto_queue_node *q, *qe;
75a70cf9 1380 tree finally_tmp, finally_label;
4ee9c684 1381 int return_index, eh_index, fallthru_index;
1382 int nlabels, ndests, j, last_case_index;
75a70cf9 1383 tree last_case;
f1f41a6c 1384 vec<tree> case_label_vec;
e3a19533 1385 gimple_seq switch_body = NULL;
1a91d914 1386 gimple x;
1387 geh_else *eh_else;
75a70cf9 1388 tree tmp;
1389 gimple switch_stmt;
1390 gimple_seq finally;
06ecf488 1391 hash_map<tree, gimple> *cont_map = NULL;
e60a6f7b 1392 /* The location of the TRY_FINALLY stmt. */
0b35068b 1393 location_t tf_loc = gimple_location (tf->try_finally_expr);
e60a6f7b 1394 /* The location of the finally block. */
1395 location_t finally_loc;
75a70cf9 1396
4c0315d0 1397 finally = gimple_try_cleanup (tf->top_p);
1398 eh_else = get_eh_else (finally);
4ee9c684 1399
1400 /* Mash the TRY block to the head of the chain. */
75a70cf9 1401 tf->top_p_seq = gimple_try_eval (tf->top_p);
4ee9c684 1402
e60a6f7b 1403 /* The location of the finally is either the last stmt in the finally
1404 block or the location of the TRY_FINALLY itself. */
a6217c59 1405 x = gimple_seq_last_stmt (finally);
1406 finally_loc = x ? gimple_location (x) : tf_loc;
e60a6f7b 1407
4ee9c684 1408 /* Prepare for switch statement generation. */
f1f41a6c 1409 nlabels = tf->dest_array.length ();
4ee9c684 1410 return_index = nlabels;
1411 eh_index = return_index + tf->may_return;
4c0315d0 1412 fallthru_index = eh_index + (tf->may_throw && !eh_else);
4ee9c684 1413 ndests = fallthru_index + tf->may_fallthru;
1414
1415 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
e60a6f7b 1416 finally_label = create_artificial_label (finally_loc);
4ee9c684 1417
f1f41a6c 1418 /* We use vec::quick_push on case_label_vec throughout this function,
75a70cf9 1419 since we know the size in advance and allocate precisely as muce
1420 space as needed. */
f1f41a6c 1421 case_label_vec.create (ndests);
4ee9c684 1422 last_case = NULL;
1423 last_case_index = 0;
1424
1425 /* Begin inserting code for getting to the finally block. Things
1426 are done in this order to correspond to the sequence the code is
9d75589a 1427 laid out. */
4ee9c684 1428
1429 if (tf->may_fallthru)
1430 {
e38def9c 1431 x = gimple_build_assign (finally_tmp,
bad12c62 1432 build_int_cst (integer_type_node,
1433 fallthru_index));
75a70cf9 1434 gimple_seq_add_stmt (&tf->top_p_seq, x);
4ee9c684 1435
b6e3dd65 1436 tmp = build_int_cst (integer_type_node, fallthru_index);
1437 last_case = build_case_label (tmp, NULL,
1438 create_artificial_label (tf_loc));
f1f41a6c 1439 case_label_vec.quick_push (last_case);
4ee9c684 1440 last_case_index++;
1441
75a70cf9 1442 x = gimple_build_label (CASE_LABEL (last_case));
1443 gimple_seq_add_stmt (&switch_body, x);
4ee9c684 1444
75a70cf9 1445 tmp = lower_try_finally_fallthru_label (tf);
1446 x = gimple_build_goto (tmp);
ed4d69dc 1447 gimple_set_location (x, tf_loc);
75a70cf9 1448 gimple_seq_add_stmt (&switch_body, x);
4ee9c684 1449 }
1450
4c0315d0 1451 /* For EH_ELSE, emit the exception path (plus resx) now, then
1452 subsequently we only need consider the normal path. */
1453 if (eh_else)
1454 {
1455 if (tf->may_throw)
1456 {
1457 finally = gimple_eh_else_e_body (eh_else);
e3a19533 1458 lower_eh_constructs_1 (state, &finally);
4c0315d0 1459
1460 emit_post_landing_pad (&eh_seq, tf->region);
1461 gimple_seq_add_seq (&eh_seq, finally);
1462 emit_resx (&eh_seq, tf->region);
1463 }
1464
1465 finally = gimple_eh_else_n_body (eh_else);
1466 }
1467 else if (tf->may_throw)
4ee9c684 1468 {
e38def9c 1469 emit_post_landing_pad (&eh_seq, tf->region);
4ee9c684 1470
e38def9c 1471 x = gimple_build_assign (finally_tmp,
bad12c62 1472 build_int_cst (integer_type_node, eh_index));
e38def9c 1473 gimple_seq_add_stmt (&eh_seq, x);
1474
1475 x = gimple_build_goto (finally_label);
ed4d69dc 1476 gimple_set_location (x, tf_loc);
e38def9c 1477 gimple_seq_add_stmt (&eh_seq, x);
4ee9c684 1478
b6e3dd65 1479 tmp = build_int_cst (integer_type_node, eh_index);
1480 last_case = build_case_label (tmp, NULL,
1481 create_artificial_label (tf_loc));
f1f41a6c 1482 case_label_vec.quick_push (last_case);
4ee9c684 1483 last_case_index++;
1484
75a70cf9 1485 x = gimple_build_label (CASE_LABEL (last_case));
e38def9c 1486 gimple_seq_add_stmt (&eh_seq, x);
1487 emit_resx (&eh_seq, tf->region);
4ee9c684 1488 }
1489
75a70cf9 1490 x = gimple_build_label (finally_label);
1491 gimple_seq_add_stmt (&tf->top_p_seq, x);
4ee9c684 1492
9ae6e329 1493 lower_eh_constructs_1 (state, &finally);
75a70cf9 1494 gimple_seq_add_seq (&tf->top_p_seq, finally);
4ee9c684 1495
1496 /* Redirect each incoming goto edge. */
1497 q = tf->goto_queue;
1498 qe = q + tf->goto_queue_active;
1499 j = last_case_index + tf->may_return;
75a70cf9 1500 /* Prepare the assignments to finally_tmp that are executed upon the
1501 entrance through a particular edge. */
4ee9c684 1502 for (; q < qe; ++q)
1503 {
e3a19533 1504 gimple_seq mod = NULL;
75a70cf9 1505 int switch_id;
1506 unsigned int case_index;
1507
4ee9c684 1508 if (q->index < 0)
1509 {
75a70cf9 1510 x = gimple_build_assign (finally_tmp,
bad12c62 1511 build_int_cst (integer_type_node,
1512 return_index));
75a70cf9 1513 gimple_seq_add_stmt (&mod, x);
9a14ac4f 1514 do_return_redirection (q, finally_label, mod);
4ee9c684 1515 switch_id = return_index;
1516 }
1517 else
1518 {
75a70cf9 1519 x = gimple_build_assign (finally_tmp,
bad12c62 1520 build_int_cst (integer_type_node, q->index));
75a70cf9 1521 gimple_seq_add_stmt (&mod, x);
1522 do_goto_redirection (q, finally_label, mod, tf);
4ee9c684 1523 switch_id = q->index;
1524 }
1525
1526 case_index = j + q->index;
f1f41a6c 1527 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
75a70cf9 1528 {
1529 tree case_lab;
b6e3dd65 1530 tmp = build_int_cst (integer_type_node, switch_id);
1531 case_lab = build_case_label (tmp, NULL,
1532 create_artificial_label (tf_loc));
75a70cf9 1533 /* We store the cont_stmt in the pointer map, so that we can recover
75a2cdc8 1534 it in the loop below. */
75a70cf9 1535 if (!cont_map)
06ecf488 1536 cont_map = new hash_map<tree, gimple>;
1537 cont_map->put (case_lab, q->cont_stmt);
f1f41a6c 1538 case_label_vec.quick_push (case_lab);
75a70cf9 1539 }
22347b24 1540 }
1541 for (j = last_case_index; j < last_case_index + nlabels; j++)
1542 {
75a70cf9 1543 gimple cont_stmt;
22347b24 1544
f1f41a6c 1545 last_case = case_label_vec[j];
22347b24 1546
1547 gcc_assert (last_case);
75a70cf9 1548 gcc_assert (cont_map);
22347b24 1549
06ecf488 1550 cont_stmt = *cont_map->get (last_case);
22347b24 1551
75a2cdc8 1552 x = gimple_build_label (CASE_LABEL (last_case));
75a70cf9 1553 gimple_seq_add_stmt (&switch_body, x);
1554 gimple_seq_add_stmt (&switch_body, cont_stmt);
22347b24 1555 maybe_record_in_goto_queue (state, cont_stmt);
4ee9c684 1556 }
75a70cf9 1557 if (cont_map)
06ecf488 1558 delete cont_map;
75a70cf9 1559
4ee9c684 1560 replace_goto_queue (tf);
4ee9c684 1561
da41aa8e 1562 /* Make sure that the last case is the default label, as one is required.
1563 Then sort the labels, which is also required in GIMPLE. */
4ee9c684 1564 CASE_LOW (last_case) = NULL;
71b65939 1565 tree tem = case_label_vec.pop ();
1566 gcc_assert (tem == last_case);
da41aa8e 1567 sort_case_labels (case_label_vec);
4ee9c684 1568
75a70cf9 1569 /* Build the switch statement, setting last_case to be the default
1570 label. */
49a70175 1571 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1572 case_label_vec);
e60a6f7b 1573 gimple_set_location (switch_stmt, finally_loc);
75a70cf9 1574
1575 /* Need to link SWITCH_STMT after running replace_goto_queue
1576 due to not wanting to process the same goto stmts twice. */
1577 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1578 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
4ee9c684 1579}
1580
1581/* Decide whether or not we are going to duplicate the finally block.
1582 There are several considerations.
1583
1584 First, if this is Java, then the finally block contains code
1585 written by the user. It has line numbers associated with it,
1586 so duplicating the block means it's difficult to set a breakpoint.
1587 Since controlling code generation via -g is verboten, we simply
1588 never duplicate code without optimization.
1589
1590 Second, we'd like to prevent egregious code growth. One way to
1591 do this is to estimate the size of the finally block, multiply
1592 that by the number of copies we'd need to make, and compare against
1593 the estimate of the size of the switch machinery we'd have to add. */
1594
1595static bool
4c0315d0 1596decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
4ee9c684 1597{
1598 int f_estimate, sw_estimate;
1a91d914 1599 geh_else *eh_else;
4c0315d0 1600
1601 /* If there's an EH_ELSE involved, the exception path is separate
1602 and really doesn't come into play for this computation. */
1603 eh_else = get_eh_else (finally);
1604 if (eh_else)
1605 {
1606 ndests -= may_throw;
1607 finally = gimple_eh_else_n_body (eh_else);
1608 }
4ee9c684 1609
1610 if (!optimize)
83480f35 1611 {
1612 gimple_stmt_iterator gsi;
1613
1614 if (ndests == 1)
1615 return true;
1616
1617 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1618 {
1619 gimple stmt = gsi_stmt (gsi);
1620 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1621 return false;
1622 }
1623 return true;
1624 }
4ee9c684 1625
1626 /* Finally estimate N times, plus N gotos. */
75a70cf9 1627 f_estimate = count_insns_seq (finally, &eni_size_weights);
4ee9c684 1628 f_estimate = (f_estimate + 1) * ndests;
1629
1630 /* Switch statement (cost 10), N variable assignments, N gotos. */
1631 sw_estimate = 10 + 2 * ndests;
1632
1633 /* Optimize for size clearly wants our best guess. */
0bfd8d5c 1634 if (optimize_function_for_size_p (cfun))
4ee9c684 1635 return f_estimate < sw_estimate;
1636
1637 /* ??? These numbers are completely made up so far. */
1638 if (optimize > 1)
72c90b15 1639 return f_estimate < 100 || f_estimate < sw_estimate * 2;
4ee9c684 1640 else
72c90b15 1641 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
4ee9c684 1642}
1643
f340b9ff 1644/* REG is the enclosing region for a possible cleanup region, or the region
1645 itself. Returns TRUE if such a region would be unreachable.
1646
1647 Cleanup regions within a must-not-throw region aren't actually reachable
1648 even if there are throwing stmts within them, because the personality
1649 routine will call terminate before unwinding. */
1650
1651static bool
1652cleanup_is_dead_in (eh_region reg)
1653{
1654 while (reg && reg->type == ERT_CLEANUP)
1655 reg = reg->outer;
1656 return (reg && reg->type == ERT_MUST_NOT_THROW);
1657}
75a70cf9 1658
1659/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
4ee9c684 1660 to a sequence of labels and blocks, plus the exception region trees
ac13e8d9 1661 that record all the magic. This is complicated by the need to
4ee9c684 1662 arrange for the FINALLY block to be executed on all exits. */
1663
75a70cf9 1664static gimple_seq
1a91d914 1665lower_try_finally (struct leh_state *state, gtry *tp)
4ee9c684 1666{
1667 struct leh_tf_state this_tf;
1668 struct leh_state this_state;
1669 int ndests;
fa5d8988 1670 gimple_seq old_eh_seq;
4ee9c684 1671
1672 /* Process the try block. */
1673
1674 memset (&this_tf, 0, sizeof (this_tf));
75a70cf9 1675 this_tf.try_finally_expr = tp;
4ee9c684 1676 this_tf.top_p = tp;
1677 this_tf.outer = state;
4e57e76d 1678 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
f340b9ff 1679 {
1680 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1681 this_state.cur_region = this_tf.region;
1682 }
4ee9c684 1683 else
f340b9ff 1684 {
1685 this_tf.region = NULL;
1686 this_state.cur_region = state->cur_region;
1687 }
4ee9c684 1688
e38def9c 1689 this_state.ehp_region = state->ehp_region;
4ee9c684 1690 this_state.tf = &this_tf;
1691
fa5d8988 1692 old_eh_seq = eh_seq;
1693 eh_seq = NULL;
1694
e3a19533 1695 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1696
1697 /* Determine if the try block is escaped through the bottom. */
75a70cf9 1698 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
4ee9c684 1699
1700 /* Determine if any exceptions are possible within the try block. */
f340b9ff 1701 if (this_tf.region)
55d6d4e4 1702 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
4ee9c684 1703 if (this_tf.may_throw)
e38def9c 1704 honor_protect_cleanup_actions (state, &this_state, &this_tf);
4ee9c684 1705
4ee9c684 1706 /* Determine how many edges (still) reach the finally block. Or rather,
1707 how many destinations are reached by the finally block. Use this to
1708 determine how we process the finally block itself. */
1709
f1f41a6c 1710 ndests = this_tf.dest_array.length ();
4ee9c684 1711 ndests += this_tf.may_fallthru;
1712 ndests += this_tf.may_return;
1713 ndests += this_tf.may_throw;
1714
1715 /* If the FINALLY block is not reachable, dike it out. */
1716 if (ndests == 0)
75a70cf9 1717 {
1718 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1719 gimple_try_set_cleanup (tp, NULL);
1720 }
4ee9c684 1721 /* If the finally block doesn't fall through, then any destination
1722 we might try to impose there isn't reached either. There may be
1723 some minor amount of cleanup and redirection still needed. */
75a70cf9 1724 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
4ee9c684 1725 lower_try_finally_nofallthru (state, &this_tf);
1726
1727 /* We can easily special-case redirection to a single destination. */
1728 else if (ndests == 1)
1729 lower_try_finally_onedest (state, &this_tf);
4c0315d0 1730 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1731 gimple_try_cleanup (tp)))
4ee9c684 1732 lower_try_finally_copy (state, &this_tf);
1733 else
1734 lower_try_finally_switch (state, &this_tf);
1735
1736 /* If someone requested we add a label at the end of the transformed
1737 block, do so. */
1738 if (this_tf.fallthru_label)
1739 {
75a70cf9 1740 /* This must be reached only if ndests == 0. */
1741 gimple x = gimple_build_label (this_tf.fallthru_label);
1742 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
4ee9c684 1743 }
1744
f1f41a6c 1745 this_tf.dest_array.release ();
dd045aee 1746 free (this_tf.goto_queue);
46699809 1747 if (this_tf.goto_queue_map)
06ecf488 1748 delete this_tf.goto_queue_map;
75a70cf9 1749
fa5d8988 1750 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1751 If there was no old eh_seq, then the append is trivially already done. */
1752 if (old_eh_seq)
1753 {
1754 if (eh_seq == NULL)
1755 eh_seq = old_eh_seq;
1756 else
1757 {
1758 gimple_seq new_eh_seq = eh_seq;
1759 eh_seq = old_eh_seq;
9af5ce0c 1760 gimple_seq_add_seq (&eh_seq, new_eh_seq);
fa5d8988 1761 }
1762 }
1763
75a70cf9 1764 return this_tf.top_p_seq;
4ee9c684 1765}
1766
75a70cf9 1767/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1768 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1769 exception region trees that records all the magic. */
4ee9c684 1770
75a70cf9 1771static gimple_seq
1a91d914 1772lower_catch (struct leh_state *state, gtry *tp)
4ee9c684 1773{
55d6d4e4 1774 eh_region try_region = NULL;
1775 struct leh_state this_state = *state;
75a70cf9 1776 gimple_stmt_iterator gsi;
4ee9c684 1777 tree out_label;
e3a19533 1778 gimple_seq new_seq, cleanup;
e38def9c 1779 gimple x;
e60a6f7b 1780 location_t try_catch_loc = gimple_location (tp);
4ee9c684 1781
55d6d4e4 1782 if (flag_exceptions)
1783 {
1784 try_region = gen_eh_region_try (state->cur_region);
1785 this_state.cur_region = try_region;
1786 }
4ee9c684 1787
e3a19533 1788 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1789
55d6d4e4 1790 if (!eh_region_may_contain_throw (try_region))
e38def9c 1791 return gimple_try_eval (tp);
1792
1793 new_seq = NULL;
1794 emit_eh_dispatch (&new_seq, try_region);
1795 emit_resx (&new_seq, try_region);
1796
1797 this_state.cur_region = state->cur_region;
1798 this_state.ehp_region = try_region;
4ee9c684 1799
4422041b 1800 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1801 itself, so that e.g. for coverage purposes the nested cleanups don't
1802 appear before the cleanup body. See PR64634 for details. */
1803 gimple_seq old_eh_seq = eh_seq;
1804 eh_seq = NULL;
1805
4ee9c684 1806 out_label = NULL;
e3a19533 1807 cleanup = gimple_try_cleanup (tp);
1808 for (gsi = gsi_start (cleanup);
e38def9c 1809 !gsi_end_p (gsi);
1810 gsi_next (&gsi))
4ee9c684 1811 {
e38def9c 1812 eh_catch c;
1a91d914 1813 gcatch *catch_stmt;
e38def9c 1814 gimple_seq handler;
4ee9c684 1815
1a91d914 1816 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1817 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
4ee9c684 1818
1a91d914 1819 handler = gimple_catch_handler (catch_stmt);
e3a19533 1820 lower_eh_constructs_1 (&this_state, &handler);
4ee9c684 1821
e38def9c 1822 c->label = create_artificial_label (UNKNOWN_LOCATION);
1823 x = gimple_build_label (c->label);
1824 gimple_seq_add_stmt (&new_seq, x);
4ee9c684 1825
e38def9c 1826 gimple_seq_add_seq (&new_seq, handler);
4ee9c684 1827
e38def9c 1828 if (gimple_seq_may_fallthru (new_seq))
4ee9c684 1829 {
1830 if (!out_label)
e60a6f7b 1831 out_label = create_artificial_label (try_catch_loc);
4ee9c684 1832
75a70cf9 1833 x = gimple_build_goto (out_label);
e38def9c 1834 gimple_seq_add_stmt (&new_seq, x);
4ee9c684 1835 }
3ded67b5 1836 if (!c->type_list)
1837 break;
4ee9c684 1838 }
1839
e38def9c 1840 gimple_try_set_cleanup (tp, new_seq);
1841
4422041b 1842 gimple_seq new_eh_seq = eh_seq;
1843 eh_seq = old_eh_seq;
1844 gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1845 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1846 return ret_seq;
4ee9c684 1847}
1848
75a70cf9 1849/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1850 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
4ee9c684 1851 region trees that record all the magic. */
1852
75a70cf9 1853static gimple_seq
1a91d914 1854lower_eh_filter (struct leh_state *state, gtry *tp)
4ee9c684 1855{
55d6d4e4 1856 struct leh_state this_state = *state;
1857 eh_region this_region = NULL;
e38def9c 1858 gimple inner, x;
1859 gimple_seq new_seq;
ac13e8d9 1860
75a70cf9 1861 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1862
55d6d4e4 1863 if (flag_exceptions)
1864 {
1865 this_region = gen_eh_region_allowed (state->cur_region,
1866 gimple_eh_filter_types (inner));
1867 this_state.cur_region = this_region;
1868 }
ac13e8d9 1869
e3a19533 1870 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1871
55d6d4e4 1872 if (!eh_region_may_contain_throw (this_region))
e38def9c 1873 return gimple_try_eval (tp);
1874
1875 new_seq = NULL;
1876 this_state.cur_region = state->cur_region;
1877 this_state.ehp_region = this_region;
1878
1879 emit_eh_dispatch (&new_seq, this_region);
1880 emit_resx (&new_seq, this_region);
1881
1882 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1883 x = gimple_build_label (this_region->u.allowed.label);
1884 gimple_seq_add_stmt (&new_seq, x);
1885
e3a19533 1886 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
e38def9c 1887 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1888
1889 gimple_try_set_cleanup (tp, new_seq);
4ee9c684 1890
e38def9c 1891 return frob_into_branch_around (tp, this_region, NULL);
1892}
1893
1894/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1895 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1896 plus the exception region trees that record all the magic. */
1897
1898static gimple_seq
1a91d914 1899lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
e38def9c 1900{
55d6d4e4 1901 struct leh_state this_state = *state;
e38def9c 1902
55d6d4e4 1903 if (flag_exceptions)
1904 {
1905 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1906 eh_region this_region;
e38def9c 1907
55d6d4e4 1908 this_region = gen_eh_region_must_not_throw (state->cur_region);
1909 this_region->u.must_not_throw.failure_decl
1a91d914 1910 = gimple_eh_must_not_throw_fndecl (
1911 as_a <geh_mnt *> (inner));
0565e0b5 1912 this_region->u.must_not_throw.failure_loc
1913 = LOCATION_LOCUS (gimple_location (tp));
e38def9c 1914
55d6d4e4 1915 /* In order to get mangling applied to this decl, we must mark it
1916 used now. Otherwise, pass_ipa_free_lang_data won't think it
1917 needs to happen. */
1918 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
e38def9c 1919
55d6d4e4 1920 this_state.cur_region = this_region;
1921 }
4ee9c684 1922
e3a19533 1923 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1924
e38def9c 1925 return gimple_try_eval (tp);
4ee9c684 1926}
1927
1928/* Implement a cleanup expression. This is similar to try-finally,
1929 except that we only execute the cleanup block for exception edges. */
1930
75a70cf9 1931static gimple_seq
1a91d914 1932lower_cleanup (struct leh_state *state, gtry *tp)
4ee9c684 1933{
55d6d4e4 1934 struct leh_state this_state = *state;
1935 eh_region this_region = NULL;
4ee9c684 1936 struct leh_tf_state fake_tf;
75a70cf9 1937 gimple_seq result;
f340b9ff 1938 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
4ee9c684 1939
f340b9ff 1940 if (flag_exceptions && !cleanup_dead)
4ee9c684 1941 {
55d6d4e4 1942 this_region = gen_eh_region_cleanup (state->cur_region);
1943 this_state.cur_region = this_region;
4ee9c684 1944 }
1945
e3a19533 1946 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
4ee9c684 1947
f340b9ff 1948 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
e38def9c 1949 return gimple_try_eval (tp);
4ee9c684 1950
1951 /* Build enough of a try-finally state so that we can reuse
1952 honor_protect_cleanup_actions. */
1953 memset (&fake_tf, 0, sizeof (fake_tf));
e60a6f7b 1954 fake_tf.top_p = fake_tf.try_finally_expr = tp;
4ee9c684 1955 fake_tf.outer = state;
1956 fake_tf.region = this_region;
75a70cf9 1957 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
4ee9c684 1958 fake_tf.may_throw = true;
1959
4ee9c684 1960 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1961
1962 if (fake_tf.may_throw)
1963 {
1964 /* In this case honor_protect_cleanup_actions had nothing to do,
1965 and we should process this normally. */
e3a19533 1966 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
e38def9c 1967 result = frob_into_branch_around (tp, this_region,
1968 fake_tf.fallthru_label);
4ee9c684 1969 }
1970 else
1971 {
1972 /* In this case honor_protect_cleanup_actions did nearly all of
1973 the work. All we have left is to append the fallthru_label. */
1974
75a70cf9 1975 result = gimple_try_eval (tp);
4ee9c684 1976 if (fake_tf.fallthru_label)
1977 {
75a70cf9 1978 gimple x = gimple_build_label (fake_tf.fallthru_label);
1979 gimple_seq_add_stmt (&result, x);
4ee9c684 1980 }
1981 }
75a70cf9 1982 return result;
4ee9c684 1983}
1984
e38def9c 1985/* Main loop for lowering eh constructs. Also moves gsi to the next
75a70cf9 1986 statement. */
4ee9c684 1987
1988static void
75a70cf9 1989lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
4ee9c684 1990{
75a70cf9 1991 gimple_seq replace;
1992 gimple x;
1993 gimple stmt = gsi_stmt (*gsi);
4ee9c684 1994
75a70cf9 1995 switch (gimple_code (stmt))
4ee9c684 1996 {
75a70cf9 1997 case GIMPLE_CALL:
e38def9c 1998 {
1999 tree fndecl = gimple_call_fndecl (stmt);
2000 tree rhs, lhs;
2001
2002 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2003 switch (DECL_FUNCTION_CODE (fndecl))
2004 {
2005 case BUILT_IN_EH_POINTER:
2006 /* The front end may have generated a call to
2007 __builtin_eh_pointer (0) within a catch region. Replace
2008 this zero argument with the current catch region number. */
2009 if (state->ehp_region)
2010 {
bad12c62 2011 tree nr = build_int_cst (integer_type_node,
2012 state->ehp_region->index);
e38def9c 2013 gimple_call_set_arg (stmt, 0, nr);
2014 }
2015 else
2016 {
2017 /* The user has dome something silly. Remove it. */
2512209b 2018 rhs = null_pointer_node;
e38def9c 2019 goto do_replace;
2020 }
2021 break;
2022
2023 case BUILT_IN_EH_FILTER:
2024 /* ??? This should never appear, but since it's a builtin it
2025 is accessible to abuse by users. Just remove it and
2026 replace the use with the arbitrary value zero. */
2027 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2028 do_replace:
2029 lhs = gimple_call_lhs (stmt);
2030 x = gimple_build_assign (lhs, rhs);
2031 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2032 /* FALLTHRU */
2033
2034 case BUILT_IN_EH_COPY_VALUES:
2035 /* Likewise this should not appear. Remove it. */
2036 gsi_remove (gsi, true);
2037 return;
2038
2039 default:
2040 break;
2041 }
2042 }
2043 /* FALLTHRU */
2044
75a70cf9 2045 case GIMPLE_ASSIGN:
47f11e84 2046 /* If the stmt can throw use a new temporary for the assignment
2047 to a LHS. This makes sure the old value of the LHS is
fa916956 2048 available on the EH edge. Only do so for statements that
9d75589a 2049 potentially fall through (no noreturn calls e.g.), otherwise
fa916956 2050 this new assignment might create fake fallthru regions. */
47f11e84 2051 if (stmt_could_throw_p (stmt)
2052 && gimple_has_lhs (stmt)
fa916956 2053 && gimple_stmt_may_fallthru (stmt)
47f11e84 2054 && !tree_could_throw_p (gimple_get_lhs (stmt))
2055 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2056 {
2057 tree lhs = gimple_get_lhs (stmt);
f9e245b2 2058 tree tmp = create_tmp_var (TREE_TYPE (lhs));
47f11e84 2059 gimple s = gimple_build_assign (lhs, tmp);
2060 gimple_set_location (s, gimple_location (stmt));
2061 gimple_set_block (s, gimple_block (stmt));
2062 gimple_set_lhs (stmt, tmp);
2063 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2064 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2065 DECL_GIMPLE_REG_P (tmp) = 1;
2066 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2067 }
4ee9c684 2068 /* Look for things that can throw exceptions, and record them. */
75a70cf9 2069 if (state->cur_region && stmt_could_throw_p (stmt))
4ee9c684 2070 {
75a70cf9 2071 record_stmt_eh_region (state->cur_region, stmt);
4ee9c684 2072 note_eh_region_may_contain_throw (state->cur_region);
4ee9c684 2073 }
2074 break;
2075
75a70cf9 2076 case GIMPLE_COND:
2077 case GIMPLE_GOTO:
2078 case GIMPLE_RETURN:
2079 maybe_record_in_goto_queue (state, stmt);
4ee9c684 2080 break;
2081
75a70cf9 2082 case GIMPLE_SWITCH:
1a91d914 2083 verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
4ee9c684 2084 break;
2085
75a70cf9 2086 case GIMPLE_TRY:
1a91d914 2087 {
2088 gtry *try_stmt = as_a <gtry *> (stmt);
2089 if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2090 replace = lower_try_finally (state, try_stmt);
2091 else
2092 {
2093 x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
2094 if (!x)
c90b5d40 2095 {
1a91d914 2096 replace = gimple_try_eval (try_stmt);
2097 lower_eh_constructs_1 (state, &replace);
2098 }
2099 else
2100 switch (gimple_code (x))
2101 {
c90b5d40 2102 case GIMPLE_CATCH:
1a91d914 2103 replace = lower_catch (state, try_stmt);
2104 break;
c90b5d40 2105 case GIMPLE_EH_FILTER:
1a91d914 2106 replace = lower_eh_filter (state, try_stmt);
2107 break;
c90b5d40 2108 case GIMPLE_EH_MUST_NOT_THROW:
1a91d914 2109 replace = lower_eh_must_not_throw (state, try_stmt);
2110 break;
4c0315d0 2111 case GIMPLE_EH_ELSE:
1a91d914 2112 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2113 gcc_unreachable ();
c90b5d40 2114 default:
1a91d914 2115 replace = lower_cleanup (state, try_stmt);
2116 break;
2117 }
2118 }
2119 }
75a70cf9 2120
2121 /* Remove the old stmt and insert the transformed sequence
2122 instead. */
2123 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2124 gsi_remove (gsi, true);
2125
2126 /* Return since we don't want gsi_next () */
2127 return;
4ee9c684 2128
4c0315d0 2129 case GIMPLE_EH_ELSE:
2130 /* We should be eliminating this in lower_try_finally et al. */
2131 gcc_unreachable ();
2132
4ee9c684 2133 default:
2134 /* A type, a decl, or some kind of statement that we're not
2135 interested in. Don't walk them. */
2136 break;
2137 }
75a70cf9 2138
2139 gsi_next (gsi);
2140}
2141
2142/* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2143
2144static void
e3a19533 2145lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
75a70cf9 2146{
2147 gimple_stmt_iterator gsi;
e3a19533 2148 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
75a70cf9 2149 lower_eh_constructs_2 (state, &gsi);
4ee9c684 2150}
2151
65b0537f 2152namespace {
2153
2154const pass_data pass_data_lower_eh =
2155{
2156 GIMPLE_PASS, /* type */
2157 "eh", /* name */
2158 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 2159 TV_TREE_EH, /* tv_id */
2160 PROP_gimple_lcf, /* properties_required */
2161 PROP_gimple_leh, /* properties_provided */
2162 0, /* properties_destroyed */
2163 0, /* todo_flags_start */
2164 0, /* todo_flags_finish */
2165};
2166
2167class pass_lower_eh : public gimple_opt_pass
2168{
2169public:
2170 pass_lower_eh (gcc::context *ctxt)
2171 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2172 {}
2173
2174 /* opt_pass methods: */
2175 virtual unsigned int execute (function *);
2176
2177}; // class pass_lower_eh
2178
2179unsigned int
2180pass_lower_eh::execute (function *fun)
4ee9c684 2181{
2182 struct leh_state null_state;
e38def9c 2183 gimple_seq bodyp;
75a70cf9 2184
e38def9c 2185 bodyp = gimple_body (current_function_decl);
2186 if (bodyp == NULL)
2187 return 0;
4ee9c684 2188
c1f445d2 2189 finally_tree = new hash_table<finally_tree_hasher> (31);
55d6d4e4 2190 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
e38def9c 2191 memset (&null_state, 0, sizeof (null_state));
4ee9c684 2192
75a70cf9 2193 collect_finally_tree_1 (bodyp, NULL);
e3a19533 2194 lower_eh_constructs_1 (&null_state, &bodyp);
2195 gimple_set_body (current_function_decl, bodyp);
4ee9c684 2196
e38def9c 2197 /* We assume there's a return statement, or something, at the end of
2198 the function, and thus ploping the EH sequence afterward won't
2199 change anything. */
2200 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2201 gimple_seq_add_seq (&bodyp, eh_seq);
2202
2203 /* We assume that since BODYP already existed, adding EH_SEQ to it
2204 didn't change its value, and we don't have to re-set the function. */
2205 gcc_assert (bodyp == gimple_body (current_function_decl));
4ee9c684 2206
c1f445d2 2207 delete finally_tree;
2208 finally_tree = NULL;
55d6d4e4 2209 BITMAP_FREE (eh_region_may_contain_throw_map);
e38def9c 2210 eh_seq = NULL;
58d82cd0 2211
2212 /* If this function needs a language specific EH personality routine
2213 and the frontend didn't already set one do so now. */
65b0537f 2214 if (function_needs_eh_personality (fun) == eh_personality_lang
58d82cd0 2215 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2216 DECL_FUNCTION_PERSONALITY (current_function_decl)
2217 = lang_hooks.eh_personality ();
2218
2a1990e9 2219 return 0;
4ee9c684 2220}
2221
cbe8bda8 2222} // anon namespace
2223
2224gimple_opt_pass *
2225make_pass_lower_eh (gcc::context *ctxt)
2226{
2227 return new pass_lower_eh (ctxt);
2228}
4ee9c684 2229\f
e38def9c 2230/* Create the multiple edges from an EH_DISPATCH statement to all of
2231 the possible handlers for its EH region. Return true if there's
2232 no fallthru edge; false if there is. */
4ee9c684 2233
e38def9c 2234bool
1a91d914 2235make_eh_dispatch_edges (geh_dispatch *stmt)
4ee9c684 2236{
e38def9c 2237 eh_region r;
2238 eh_catch c;
4ee9c684 2239 basic_block src, dst;
2240
e38def9c 2241 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
75a70cf9 2242 src = gimple_bb (stmt);
4ee9c684 2243
e38def9c 2244 switch (r->type)
2245 {
2246 case ERT_TRY:
2247 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2248 {
2249 dst = label_to_block (c->label);
2250 make_edge (src, dst, 0);
ac13e8d9 2251
e38def9c 2252 /* A catch-all handler doesn't have a fallthru. */
2253 if (c->type_list == NULL)
2254 return false;
2255 }
2256 break;
a5bfef5b 2257
e38def9c 2258 case ERT_ALLOWED_EXCEPTIONS:
2259 dst = label_to_block (r->u.allowed.label);
2260 make_edge (src, dst, 0);
2261 break;
2262
2263 default:
2264 gcc_unreachable ();
2265 }
2266
2267 return true;
a5bfef5b 2268}
2269
e38def9c 2270/* Create the single EH edge from STMT to its nearest landing pad,
2271 if there is such a landing pad within the current function. */
2272
4ee9c684 2273void
75a70cf9 2274make_eh_edges (gimple stmt)
4ee9c684 2275{
e38def9c 2276 basic_block src, dst;
2277 eh_landing_pad lp;
2278 int lp_nr;
4ee9c684 2279
e38def9c 2280 lp_nr = lookup_stmt_eh_lp (stmt);
2281 if (lp_nr <= 0)
2282 return;
4ee9c684 2283
e38def9c 2284 lp = get_eh_landing_pad_from_number (lp_nr);
2285 gcc_assert (lp != NULL);
d6d5ab2d 2286
e38def9c 2287 src = gimple_bb (stmt);
2288 dst = label_to_block (lp->post_landing_pad);
2289 make_edge (src, dst, EDGE_EH);
4ee9c684 2290}
2291
e38def9c 2292/* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2293 do not actually perform the final edge redirection.
927a6b6b 2294
e38def9c 2295 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2296 we intend to change the destination EH region as well; this means
2297 EH_LANDING_PAD_NR must already be set on the destination block label.
2298 If false, we're being called from generic cfg manipulation code and we
2299 should preserve our place within the region tree. */
2300
2301static void
2302redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
927a6b6b 2303{
e38def9c 2304 eh_landing_pad old_lp, new_lp;
2305 basic_block old_bb;
2306 gimple throw_stmt;
2307 int old_lp_nr, new_lp_nr;
2308 tree old_label, new_label;
2309 edge_iterator ei;
2310 edge e;
2311
2312 old_bb = edge_in->dest;
2313 old_label = gimple_block_label (old_bb);
2314 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2315 gcc_assert (old_lp_nr > 0);
2316 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2317
2318 throw_stmt = last_stmt (edge_in->src);
2319 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2320
2321 new_label = gimple_block_label (new_bb);
927a6b6b 2322
e38def9c 2323 /* Look for an existing region that might be using NEW_BB already. */
2324 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2325 if (new_lp_nr)
927a6b6b 2326 {
e38def9c 2327 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2328 gcc_assert (new_lp);
48e1416a 2329
e38def9c 2330 /* Unless CHANGE_REGION is true, the new and old landing pad
2331 had better be associated with the same EH region. */
2332 gcc_assert (change_region || new_lp->region == old_lp->region);
927a6b6b 2333 }
2334 else
2335 {
e38def9c 2336 new_lp = NULL;
2337 gcc_assert (!change_region);
927a6b6b 2338 }
2339
e38def9c 2340 /* Notice when we redirect the last EH edge away from OLD_BB. */
2341 FOR_EACH_EDGE (e, ei, old_bb->preds)
2342 if (e != edge_in && (e->flags & EDGE_EH))
2343 break;
b4ba5e9d 2344
e38def9c 2345 if (new_lp)
b4ba5e9d 2346 {
e38def9c 2347 /* NEW_LP already exists. If there are still edges into OLD_LP,
2348 there's nothing to do with the EH tree. If there are no more
2349 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2350 If CHANGE_REGION is true, then our caller is expecting to remove
2351 the landing pad. */
2352 if (e == NULL && !change_region)
2353 remove_eh_landing_pad (old_lp);
b4ba5e9d 2354 }
e38def9c 2355 else
b4ba5e9d 2356 {
e38def9c 2357 /* No correct landing pad exists. If there are no more edges
2358 into OLD_LP, then we can simply re-use the existing landing pad.
2359 Otherwise, we have to create a new landing pad. */
2360 if (e == NULL)
2361 {
2362 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2363 new_lp = old_lp;
2364 }
2365 else
2366 new_lp = gen_eh_landing_pad (old_lp->region);
2367 new_lp->post_landing_pad = new_label;
2368 EH_LANDING_PAD_NR (new_label) = new_lp->index;
b4ba5e9d 2369 }
e38def9c 2370
2371 /* Maybe move the throwing statement to the new region. */
2372 if (old_lp != new_lp)
b4ba5e9d 2373 {
e38def9c 2374 remove_stmt_from_eh_lp (throw_stmt);
2375 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
b4ba5e9d 2376 }
b4ba5e9d 2377}
2378
e38def9c 2379/* Redirect EH edge E to NEW_BB. */
75a70cf9 2380
e38def9c 2381edge
2382redirect_eh_edge (edge edge_in, basic_block new_bb)
b4ba5e9d 2383{
e38def9c 2384 redirect_eh_edge_1 (edge_in, new_bb, false);
2385 return ssa_redirect_edge (edge_in, new_bb);
2386}
b4ba5e9d 2387
e38def9c 2388/* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2389 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2390 The actual edge update will happen in the caller. */
b4ba5e9d 2391
e38def9c 2392void
1a91d914 2393redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
e38def9c 2394{
2395 tree new_lab = gimple_block_label (new_bb);
2396 bool any_changed = false;
2397 basic_block old_bb;
2398 eh_region r;
2399 eh_catch c;
2400
2401 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2402 switch (r->type)
b4ba5e9d 2403 {
e38def9c 2404 case ERT_TRY:
2405 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
b4ba5e9d 2406 {
e38def9c 2407 old_bb = label_to_block (c->label);
2408 if (old_bb == e->dest)
2409 {
2410 c->label = new_lab;
2411 any_changed = true;
2412 }
b4ba5e9d 2413 }
e38def9c 2414 break;
2415
2416 case ERT_ALLOWED_EXCEPTIONS:
2417 old_bb = label_to_block (r->u.allowed.label);
2418 gcc_assert (old_bb == e->dest);
2419 r->u.allowed.label = new_lab;
2420 any_changed = true;
2421 break;
2422
2423 default:
2424 gcc_unreachable ();
b4ba5e9d 2425 }
75a70cf9 2426
e38def9c 2427 gcc_assert (any_changed);
b4ba5e9d 2428}
4ee9c684 2429\f
75a70cf9 2430/* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2431
2ac47fdf 2432bool
75a70cf9 2433operation_could_trap_helper_p (enum tree_code op,
2434 bool fp_operation,
2435 bool honor_trapv,
2436 bool honor_nans,
2437 bool honor_snans,
2438 tree divisor,
2439 bool *handled)
2440{
2441 *handled = true;
2442 switch (op)
2443 {
2444 case TRUNC_DIV_EXPR:
2445 case CEIL_DIV_EXPR:
2446 case FLOOR_DIV_EXPR:
2447 case ROUND_DIV_EXPR:
2448 case EXACT_DIV_EXPR:
2449 case CEIL_MOD_EXPR:
2450 case FLOOR_MOD_EXPR:
2451 case ROUND_MOD_EXPR:
2452 case TRUNC_MOD_EXPR:
2453 case RDIV_EXPR:
2454 if (honor_snans || honor_trapv)
2455 return true;
2456 if (fp_operation)
2457 return flag_trapping_math;
2458 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2459 return true;
2460 return false;
2461
2462 case LT_EXPR:
2463 case LE_EXPR:
2464 case GT_EXPR:
2465 case GE_EXPR:
2466 case LTGT_EXPR:
2467 /* Some floating point comparisons may trap. */
2468 return honor_nans;
2469
2470 case EQ_EXPR:
2471 case NE_EXPR:
2472 case UNORDERED_EXPR:
2473 case ORDERED_EXPR:
2474 case UNLT_EXPR:
2475 case UNLE_EXPR:
2476 case UNGT_EXPR:
2477 case UNGE_EXPR:
2478 case UNEQ_EXPR:
2479 return honor_snans;
2480
75a70cf9 2481 case NEGATE_EXPR:
2482 case ABS_EXPR:
2483 case CONJ_EXPR:
2484 /* These operations don't trap with floating point. */
2485 if (honor_trapv)
2486 return true;
2487 return false;
2488
2489 case PLUS_EXPR:
2490 case MINUS_EXPR:
2491 case MULT_EXPR:
2492 /* Any floating arithmetic may trap. */
2493 if (fp_operation && flag_trapping_math)
2494 return true;
2495 if (honor_trapv)
2496 return true;
2497 return false;
2498
aa9d6f35 2499 case COMPLEX_EXPR:
2500 case CONSTRUCTOR:
2501 /* Constructing an object cannot trap. */
2502 return false;
2503
75a70cf9 2504 default:
2505 /* Any floating arithmetic may trap. */
2506 if (fp_operation && flag_trapping_math)
2507 return true;
2508
2509 *handled = false;
2510 return false;
2511 }
2512}
2513
2514/* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2515 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2516 type operands that may trap. If OP is a division operator, DIVISOR contains
2517 the value of the divisor. */
2518
2519bool
2520operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2521 tree divisor)
2522{
2523 bool honor_nans = (fp_operation && flag_trapping_math
2524 && !flag_finite_math_only);
2525 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2526 bool handled;
2527
2528 if (TREE_CODE_CLASS (op) != tcc_comparison
2529 && TREE_CODE_CLASS (op) != tcc_unary
2530 && TREE_CODE_CLASS (op) != tcc_binary)
2531 return false;
2532
2533 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2534 honor_nans, honor_snans, divisor,
2535 &handled);
2536}
2537
0e80b01d 2538
2539/* Returns true if it is possible to prove that the index of
2540 an array access REF (an ARRAY_REF expression) falls into the
2541 array bounds. */
2542
2543static bool
2544in_array_bounds_p (tree ref)
2545{
2546 tree idx = TREE_OPERAND (ref, 1);
2547 tree min, max;
2548
2549 if (TREE_CODE (idx) != INTEGER_CST)
2550 return false;
2551
2552 min = array_ref_low_bound (ref);
2553 max = array_ref_up_bound (ref);
2554 if (!min
2555 || !max
2556 || TREE_CODE (min) != INTEGER_CST
2557 || TREE_CODE (max) != INTEGER_CST)
2558 return false;
2559
2560 if (tree_int_cst_lt (idx, min)
2561 || tree_int_cst_lt (max, idx))
2562 return false;
2563
2564 return true;
2565}
2566
2567/* Returns true if it is possible to prove that the range of
2568 an array access REF (an ARRAY_RANGE_REF expression) falls
2569 into the array bounds. */
2570
2571static bool
2572range_in_array_bounds_p (tree ref)
2573{
2574 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2575 tree range_min, range_max, min, max;
2576
2577 range_min = TYPE_MIN_VALUE (domain_type);
2578 range_max = TYPE_MAX_VALUE (domain_type);
2579 if (!range_min
2580 || !range_max
2581 || TREE_CODE (range_min) != INTEGER_CST
2582 || TREE_CODE (range_max) != INTEGER_CST)
2583 return false;
2584
2585 min = array_ref_low_bound (ref);
2586 max = array_ref_up_bound (ref);
2587 if (!min
2588 || !max
2589 || TREE_CODE (min) != INTEGER_CST
2590 || TREE_CODE (max) != INTEGER_CST)
2591 return false;
2592
2593 if (tree_int_cst_lt (range_min, min)
2594 || tree_int_cst_lt (max, range_max))
2595 return false;
2596
2597 return true;
2598}
2599
75a70cf9 2600/* Return true if EXPR can trap, as in dereferencing an invalid pointer
35c15734 2601 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2602 This routine expects only GIMPLE lhs or rhs input. */
4ee9c684 2603
2604bool
2605tree_could_trap_p (tree expr)
2606{
75a70cf9 2607 enum tree_code code;
35c15734 2608 bool fp_operation = false;
db97ad41 2609 bool honor_trapv = false;
75a70cf9 2610 tree t, base, div = NULL_TREE;
4ee9c684 2611
75a70cf9 2612 if (!expr)
2613 return false;
e38def9c 2614
75a70cf9 2615 code = TREE_CODE (expr);
2616 t = TREE_TYPE (expr);
2617
2618 if (t)
35c15734 2619 {
7076cb5d 2620 if (COMPARISON_CLASS_P (expr))
2621 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2622 else
2623 fp_operation = FLOAT_TYPE_P (t);
75a70cf9 2624 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
35c15734 2625 }
2626
75a70cf9 2627 if (TREE_CODE_CLASS (code) == tcc_binary)
2628 div = TREE_OPERAND (expr, 1);
2629 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2630 return true;
2631
80f06481 2632 restart:
4ee9c684 2633 switch (code)
2634 {
4ee9c684 2635 case COMPONENT_REF:
2636 case REALPART_EXPR:
2637 case IMAGPART_EXPR:
2638 case BIT_FIELD_REF:
26d2ad79 2639 case VIEW_CONVERT_EXPR:
80f06481 2640 case WITH_SIZE_EXPR:
2641 expr = TREE_OPERAND (expr, 0);
2642 code = TREE_CODE (expr);
2643 goto restart;
7d23383d 2644
2645 case ARRAY_RANGE_REF:
2100c228 2646 base = TREE_OPERAND (expr, 0);
2647 if (tree_could_trap_p (base))
7d23383d 2648 return true;
2100c228 2649 if (TREE_THIS_NOTRAP (expr))
2650 return false;
2100c228 2651 return !range_in_array_bounds_p (expr);
7d23383d 2652
2653 case ARRAY_REF:
2654 base = TREE_OPERAND (expr, 0);
7d23383d 2655 if (tree_could_trap_p (base))
2656 return true;
7d23383d 2657 if (TREE_THIS_NOTRAP (expr))
2658 return false;
7d23383d 2659 return !in_array_bounds_p (expr);
4ee9c684 2660
cdf2f9e4 2661 case TARGET_MEM_REF:
182cf5a9 2662 case MEM_REF:
cdf2f9e4 2663 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2664 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2665 return true;
2666 if (TREE_THIS_NOTRAP (expr))
182cf5a9 2667 return false;
cdf2f9e4 2668 /* We cannot prove that the access is in-bounds when we have
2669 variable-index TARGET_MEM_REFs. */
2670 if (code == TARGET_MEM_REF
2671 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2672 return true;
2673 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2674 {
2675 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
7f646368 2676 offset_int off = mem_ref_offset (expr);
2677 if (wi::neg_p (off, SIGNED))
cdf2f9e4 2678 return true;
2679 if (TREE_CODE (base) == STRING_CST)
7f646368 2680 return wi::leu_p (TREE_STRING_LENGTH (base), off);
cdf2f9e4 2681 else if (DECL_SIZE_UNIT (base) == NULL_TREE
2682 || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
7f646368 2683 || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off))
cdf2f9e4 2684 return true;
2685 /* Now we are sure the first byte of the access is inside
2686 the object. */
2687 return false;
2688 }
2689 return true;
2690
4ee9c684 2691 case INDIRECT_REF:
35c15734 2692 return !TREE_THIS_NOTRAP (expr);
2693
2694 case ASM_EXPR:
2695 return TREE_THIS_VOLATILE (expr);
010d0641 2696
75a70cf9 2697 case CALL_EXPR:
2698 t = get_callee_fndecl (expr);
2699 /* Assume that calls to weak functions may trap. */
7e49f1a1 2700 if (!t || !DECL_P (t))
35c15734 2701 return true;
7e49f1a1 2702 if (DECL_WEAK (t))
2703 return tree_could_trap_p (t);
2704 return false;
2705
2706 case FUNCTION_DECL:
2707 /* Assume that accesses to weak functions may trap, unless we know
2708 they are certainly defined in current TU or in some other
2709 LTO partition. */
9427cf46 2710 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
7e49f1a1 2711 {
9427cf46 2712 cgraph_node *node = cgraph_node::get (expr);
2713 if (node)
2714 node = node->function_symbol ();
2715 return !(node && node->in_other_partition);
7e49f1a1 2716 }
2717 return false;
2718
2719 case VAR_DECL:
2720 /* Assume that accesses to weak vars may trap, unless we know
2721 they are certainly defined in current TU or in some other
2722 LTO partition. */
9427cf46 2723 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
7e49f1a1 2724 {
9427cf46 2725 varpool_node *node = varpool_node::get (expr);
2726 if (node)
2727 node = node->ultimate_alias_target ();
2728 return !(node && node->in_other_partition);
7e49f1a1 2729 }
35c15734 2730 return false;
2731
75a70cf9 2732 default:
2733 return false;
2734 }
2735}
35c15734 2736
35c15734 2737
75a70cf9 2738/* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2739 an assignment or a conditional) may throw. */
35c15734 2740
75a70cf9 2741static bool
2742stmt_could_throw_1_p (gimple stmt)
2743{
2744 enum tree_code code = gimple_expr_code (stmt);
2745 bool honor_nans = false;
2746 bool honor_snans = false;
2747 bool fp_operation = false;
2748 bool honor_trapv = false;
2749 tree t;
2750 size_t i;
2751 bool handled, ret;
db97ad41 2752
75a70cf9 2753 if (TREE_CODE_CLASS (code) == tcc_comparison
2754 || TREE_CODE_CLASS (code) == tcc_unary
2755 || TREE_CODE_CLASS (code) == tcc_binary)
2756 {
25f48be0 2757 if (is_gimple_assign (stmt)
2758 && TREE_CODE_CLASS (code) == tcc_comparison)
2759 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2760 else if (gimple_code (stmt) == GIMPLE_COND)
2761 t = TREE_TYPE (gimple_cond_lhs (stmt));
2762 else
2763 t = gimple_expr_type (stmt);
75a70cf9 2764 fp_operation = FLOAT_TYPE_P (t);
2765 if (fp_operation)
2766 {
2767 honor_nans = flag_trapping_math && !flag_finite_math_only;
2768 honor_snans = flag_signaling_nans != 0;
2769 }
2770 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2771 honor_trapv = true;
2772 }
2773
2774 /* Check if the main expression may trap. */
2775 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2776 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2777 honor_nans, honor_snans, t,
2778 &handled);
2779 if (handled)
2780 return ret;
2781
2782 /* If the expression does not trap, see if any of the individual operands may
2783 trap. */
2784 for (i = 0; i < gimple_num_ops (stmt); i++)
2785 if (tree_could_trap_p (gimple_op (stmt, i)))
2786 return true;
2787
2788 return false;
2789}
2790
2791
2792/* Return true if statement STMT could throw an exception. */
2793
2794bool
2795stmt_could_throw_p (gimple stmt)
2796{
75a70cf9 2797 if (!flag_exceptions)
2798 return false;
2799
2800 /* The only statements that can throw an exception are assignments,
e38def9c 2801 conditionals, calls, resx, and asms. */
2802 switch (gimple_code (stmt))
2803 {
2804 case GIMPLE_RESX:
2805 return true;
75a70cf9 2806
e38def9c 2807 case GIMPLE_CALL:
1a91d914 2808 return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
75a70cf9 2809
e38def9c 2810 case GIMPLE_ASSIGN:
2811 case GIMPLE_COND:
cbeb677e 2812 if (!cfun->can_throw_non_call_exceptions)
e38def9c 2813 return false;
2814 return stmt_could_throw_1_p (stmt);
75a70cf9 2815
e38def9c 2816 case GIMPLE_ASM:
cbeb677e 2817 if (!cfun->can_throw_non_call_exceptions)
e38def9c 2818 return false;
1a91d914 2819 return gimple_asm_volatile_p (as_a <gasm *> (stmt));
e38def9c 2820
2821 default:
2822 return false;
2823 }
4ee9c684 2824}
2825
75a70cf9 2826
2827/* Return true if expression T could throw an exception. */
2828
4ee9c684 2829bool
2830tree_could_throw_p (tree t)
2831{
2832 if (!flag_exceptions)
2833 return false;
75a70cf9 2834 if (TREE_CODE (t) == MODIFY_EXPR)
4ee9c684 2835 {
cbeb677e 2836 if (cfun->can_throw_non_call_exceptions
e38def9c 2837 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2838 return true;
75a70cf9 2839 t = TREE_OPERAND (t, 1);
4ee9c684 2840 }
2841
80f06481 2842 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2843 t = TREE_OPERAND (t, 0);
4ee9c684 2844 if (TREE_CODE (t) == CALL_EXPR)
2845 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
cbeb677e 2846 if (cfun->can_throw_non_call_exceptions)
3864ad30 2847 return tree_could_trap_p (t);
4ee9c684 2848 return false;
2849}
2850
b5cebd44 2851/* Return true if STMT can throw an exception that is not caught within
2852 the current function (CFUN). */
2853
2854bool
2855stmt_can_throw_external (gimple stmt)
2856{
e38def9c 2857 int lp_nr;
b5cebd44 2858
2859 if (!stmt_could_throw_p (stmt))
2860 return false;
2861
e38def9c 2862 lp_nr = lookup_stmt_eh_lp (stmt);
2863 return lp_nr == 0;
b5cebd44 2864}
75a70cf9 2865
2866/* Return true if STMT can throw an exception that is caught within
2867 the current function (CFUN). */
2868
4ee9c684 2869bool
75a70cf9 2870stmt_can_throw_internal (gimple stmt)
4ee9c684 2871{
e38def9c 2872 int lp_nr;
75a70cf9 2873
e38def9c 2874 if (!stmt_could_throw_p (stmt))
4ee9c684 2875 return false;
75a70cf9 2876
e38def9c 2877 lp_nr = lookup_stmt_eh_lp (stmt);
2878 return lp_nr > 0;
2879}
2880
2881/* Given a statement STMT in IFUN, if STMT can no longer throw, then
2882 remove any entry it might have from the EH table. Return true if
2883 any change was made. */
2884
2885bool
2886maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2887{
2888 if (stmt_could_throw_p (stmt))
2889 return false;
2890 return remove_stmt_from_eh_lp_fn (ifun, stmt);
4ee9c684 2891}
2892
e38def9c 2893/* Likewise, but always use the current function. */
2894
2895bool
2896maybe_clean_eh_stmt (gimple stmt)
2897{
2898 return maybe_clean_eh_stmt_fn (cfun, stmt);
2899}
4ee9c684 2900
4c27dd45 2901/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2902 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2903 in the table if it should be in there. Return TRUE if a replacement was
2904 done that my require an EH edge purge. */
2905
e38def9c 2906bool
2907maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
35c15734 2908{
e38def9c 2909 int lp_nr = lookup_stmt_eh_lp (old_stmt);
4c27dd45 2910
e38def9c 2911 if (lp_nr != 0)
4c27dd45 2912 {
75a70cf9 2913 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
4c27dd45 2914
2915 if (new_stmt == old_stmt && new_stmt_could_throw)
2916 return false;
2917
e38def9c 2918 remove_stmt_from_eh_lp (old_stmt);
4c27dd45 2919 if (new_stmt_could_throw)
2920 {
e38def9c 2921 add_stmt_to_eh_lp (new_stmt, lp_nr);
4c27dd45 2922 return false;
2923 }
2924 else
2925 return true;
2926 }
2927
35c15734 2928 return false;
2929}
e38def9c 2930
9d75589a 2931/* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
e38def9c 2932 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2933 operand is the return value of duplicate_eh_regions. */
2934
2935bool
2936maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2937 struct function *old_fun, gimple old_stmt,
06ecf488 2938 hash_map<void *, void *> *map,
2939 int default_lp_nr)
e38def9c 2940{
2941 int old_lp_nr, new_lp_nr;
e38def9c 2942
2943 if (!stmt_could_throw_p (new_stmt))
2944 return false;
2945
2946 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2947 if (old_lp_nr == 0)
2948 {
2949 if (default_lp_nr == 0)
2950 return false;
2951 new_lp_nr = default_lp_nr;
2952 }
2953 else if (old_lp_nr > 0)
2954 {
2955 eh_landing_pad old_lp, new_lp;
2956
f1f41a6c 2957 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
06ecf488 2958 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
e38def9c 2959 new_lp_nr = new_lp->index;
2960 }
2961 else
2962 {
2963 eh_region old_r, new_r;
2964
f1f41a6c 2965 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
06ecf488 2966 new_r = static_cast<eh_region> (*map->get (old_r));
e38def9c 2967 new_lp_nr = -new_r->index;
2968 }
2969
2970 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2971 return true;
2972}
2973
2974/* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2975 and thus no remapping is required. */
2976
2977bool
2978maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2979{
2980 int lp_nr;
2981
2982 if (!stmt_could_throw_p (new_stmt))
2983 return false;
2984
2985 lp_nr = lookup_stmt_eh_lp (old_stmt);
2986 if (lp_nr == 0)
2987 return false;
2988
2989 add_stmt_to_eh_lp (new_stmt, lp_nr);
2990 return true;
2991}
4888ab9a 2992\f
75a70cf9 2993/* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2994 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2995 this only handles handlers consisting of a single call, as that's the
2996 important case for C++: a destructor call for a particular object showing
2997 up in multiple handlers. */
4888ab9a 2998
2999static bool
75a70cf9 3000same_handler_p (gimple_seq oneh, gimple_seq twoh)
4888ab9a 3001{
75a70cf9 3002 gimple_stmt_iterator gsi;
3003 gimple ones, twos;
3004 unsigned int ai;
4888ab9a 3005
75a70cf9 3006 gsi = gsi_start (oneh);
3007 if (!gsi_one_before_end_p (gsi))
4888ab9a 3008 return false;
75a70cf9 3009 ones = gsi_stmt (gsi);
4888ab9a 3010
75a70cf9 3011 gsi = gsi_start (twoh);
3012 if (!gsi_one_before_end_p (gsi))
4888ab9a 3013 return false;
75a70cf9 3014 twos = gsi_stmt (gsi);
3015
3016 if (!is_gimple_call (ones)
3017 || !is_gimple_call (twos)
3018 || gimple_call_lhs (ones)
3019 || gimple_call_lhs (twos)
3020 || gimple_call_chain (ones)
3021 || gimple_call_chain (twos)
fb049fba 3022 || !gimple_call_same_target_p (ones, twos)
75a70cf9 3023 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
4888ab9a 3024 return false;
3025
75a70cf9 3026 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3027 if (!operand_equal_p (gimple_call_arg (ones, ai),
e38def9c 3028 gimple_call_arg (twos, ai), 0))
4888ab9a 3029 return false;
3030
3031 return true;
3032}
3033
3034/* Optimize
3035 try { A() } finally { try { ~B() } catch { ~A() } }
3036 try { ... } finally { ~A() }
3037 into
3038 try { A() } catch { ~B() }
3039 try { ~B() ... } finally { ~A() }
3040
3041 This occurs frequently in C++, where A is a local variable and B is a
3042 temporary used in the initializer for A. */
3043
3044static void
1a91d914 3045optimize_double_finally (gtry *one, gtry *two)
4888ab9a 3046{
75a70cf9 3047 gimple oneh;
3048 gimple_stmt_iterator gsi;
e3a19533 3049 gimple_seq cleanup;
4888ab9a 3050
e3a19533 3051 cleanup = gimple_try_cleanup (one);
3052 gsi = gsi_start (cleanup);
75a70cf9 3053 if (!gsi_one_before_end_p (gsi))
4888ab9a 3054 return;
3055
75a70cf9 3056 oneh = gsi_stmt (gsi);
3057 if (gimple_code (oneh) != GIMPLE_TRY
3058 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
4888ab9a 3059 return;
3060
75a70cf9 3061 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
4888ab9a 3062 {
75a70cf9 3063 gimple_seq seq = gimple_try_eval (oneh);
4888ab9a 3064
75a70cf9 3065 gimple_try_set_cleanup (one, seq);
3066 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3067 seq = copy_gimple_seq_and_replace_locals (seq);
3068 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3069 gimple_try_set_eval (two, seq);
4888ab9a 3070 }
3071}
3072
3073/* Perform EH refactoring optimizations that are simpler to do when code
c7684b8e 3074 flow has been lowered but EH structures haven't. */
4888ab9a 3075
3076static void
75a70cf9 3077refactor_eh_r (gimple_seq seq)
4888ab9a 3078{
75a70cf9 3079 gimple_stmt_iterator gsi;
3080 gimple one, two;
4888ab9a 3081
75a70cf9 3082 one = NULL;
3083 two = NULL;
3084 gsi = gsi_start (seq);
3085 while (1)
3086 {
3087 one = two;
3088 if (gsi_end_p (gsi))
3089 two = NULL;
3090 else
3091 two = gsi_stmt (gsi);
1a91d914 3092 if (one && two)
3093 if (gtry *try_one = dyn_cast <gtry *> (one))
3094 if (gtry *try_two = dyn_cast <gtry *> (two))
3095 if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
3096 && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
3097 optimize_double_finally (try_one, try_two);
75a70cf9 3098 if (one)
3099 switch (gimple_code (one))
4888ab9a 3100 {
75a70cf9 3101 case GIMPLE_TRY:
3102 refactor_eh_r (gimple_try_eval (one));
3103 refactor_eh_r (gimple_try_cleanup (one));
3104 break;
3105 case GIMPLE_CATCH:
1a91d914 3106 refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
75a70cf9 3107 break;
3108 case GIMPLE_EH_FILTER:
3109 refactor_eh_r (gimple_eh_filter_failure (one));
3110 break;
4c0315d0 3111 case GIMPLE_EH_ELSE:
1a91d914 3112 {
3113 geh_else *eh_else_stmt = as_a <geh_else *> (one);
3114 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
3115 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
3116 }
4c0315d0 3117 break;
75a70cf9 3118 default:
3119 break;
4888ab9a 3120 }
75a70cf9 3121 if (two)
3122 gsi_next (&gsi);
3123 else
3124 break;
4888ab9a 3125 }
3126}
3127
cbe8bda8 3128namespace {
3129
3130const pass_data pass_data_refactor_eh =
4888ab9a 3131{
cbe8bda8 3132 GIMPLE_PASS, /* type */
3133 "ehopt", /* name */
3134 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 3135 TV_TREE_EH, /* tv_id */
3136 PROP_gimple_lcf, /* properties_required */
3137 0, /* properties_provided */
3138 0, /* properties_destroyed */
3139 0, /* todo_flags_start */
3140 0, /* todo_flags_finish */
4888ab9a 3141};
cbe8bda8 3142
3143class pass_refactor_eh : public gimple_opt_pass
3144{
3145public:
9af5ce0c 3146 pass_refactor_eh (gcc::context *ctxt)
3147 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
cbe8bda8 3148 {}
3149
3150 /* opt_pass methods: */
31315c24 3151 virtual bool gate (function *) { return flag_exceptions != 0; }
65b0537f 3152 virtual unsigned int execute (function *)
3153 {
3154 refactor_eh_r (gimple_body (current_function_decl));
3155 return 0;
3156 }
cbe8bda8 3157
3158}; // class pass_refactor_eh
3159
3160} // anon namespace
3161
3162gimple_opt_pass *
3163make_pass_refactor_eh (gcc::context *ctxt)
3164{
3165 return new pass_refactor_eh (ctxt);
3166}
e38def9c 3167\f
3168/* At the end of gimple optimization, we can lower RESX. */
4c5fcca6 3169
e38def9c 3170static bool
1a91d914 3171lower_resx (basic_block bb, gresx *stmt,
3172 hash_map<eh_region, tree> *mnt_map)
4c5fcca6 3173{
e38def9c 3174 int lp_nr;
3175 eh_region src_r, dst_r;
3176 gimple_stmt_iterator gsi;
3177 gimple x;
3178 tree fn, src_nr;
3179 bool ret = false;
4c5fcca6 3180
e38def9c 3181 lp_nr = lookup_stmt_eh_lp (stmt);
3182 if (lp_nr != 0)
3183 dst_r = get_eh_region_from_lp_number (lp_nr);
3184 else
3185 dst_r = NULL;
4c5fcca6 3186
e38def9c 3187 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
e38def9c 3188 gsi = gsi_last_bb (bb);
4c5fcca6 3189
395fc2bb 3190 if (src_r == NULL)
3191 {
3192 /* We can wind up with no source region when pass_cleanup_eh shows
3193 that there are no entries into an eh region and deletes it, but
3194 then the block that contains the resx isn't removed. This can
3195 happen without optimization when the switch statement created by
3196 lower_try_finally_switch isn't simplified to remove the eh case.
3197
3198 Resolve this by expanding the resx node to an abort. */
3199
b9a16870 3200 fn = builtin_decl_implicit (BUILT_IN_TRAP);
395fc2bb 3201 x = gimple_build_call (fn, 0);
3202 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3203
3204 while (EDGE_COUNT (bb->succs) > 0)
3205 remove_edge (EDGE_SUCC (bb, 0));
3206 }
3207 else if (dst_r)
e38def9c 3208 {
3209 /* When we have a destination region, we resolve this by copying
3210 the excptr and filter values into place, and changing the edge
3211 to immediately after the landing pad. */
3212 edge e;
4c5fcca6 3213
e38def9c 3214 if (lp_nr < 0)
3215 {
3216 basic_block new_bb;
e38def9c 3217 tree lab;
3d1eacdb 3218
e38def9c 3219 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3220 the failure decl into a new block, if needed. */
3221 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
4c5fcca6 3222
06ecf488 3223 tree *slot = mnt_map->get (dst_r);
e38def9c 3224 if (slot == NULL)
3225 {
3226 gimple_stmt_iterator gsi2;
4c5fcca6 3227
e38def9c 3228 new_bb = create_empty_bb (bb);
b3083327 3229 add_bb_to_loop (new_bb, bb->loop_father);
e38def9c 3230 lab = gimple_block_label (new_bb);
3231 gsi2 = gsi_start_bb (new_bb);
4c5fcca6 3232
e38def9c 3233 fn = dst_r->u.must_not_throw.failure_decl;
3234 x = gimple_build_call (fn, 0);
3235 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3236 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3bd82487 3237
06ecf488 3238 mnt_map->put (dst_r, lab);
e38def9c 3239 }
3240 else
3241 {
06ecf488 3242 lab = *slot;
e38def9c 3243 new_bb = label_to_block (lab);
3244 }
4c5fcca6 3245
e38def9c 3246 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3247 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3248 e->count = bb->count;
3249 e->probability = REG_BR_PROB_BASE;
3250 }
3251 else
3252 {
3253 edge_iterator ei;
bad12c62 3254 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
4c5fcca6 3255
b9a16870 3256 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
bad12c62 3257 src_nr = build_int_cst (integer_type_node, src_r->index);
e38def9c 3258 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3259 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
4c5fcca6 3260
e38def9c 3261 /* Update the flags for the outgoing edge. */
3262 e = single_succ_edge (bb);
3263 gcc_assert (e->flags & EDGE_EH);
3264 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4c5fcca6 3265
e38def9c 3266 /* If there are no more EH users of the landing pad, delete it. */
3267 FOR_EACH_EDGE (e, ei, e->dest->preds)
3268 if (e->flags & EDGE_EH)
3269 break;
3270 if (e == NULL)
3271 {
3272 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3273 remove_eh_landing_pad (lp);
3274 }
3275 }
4c5fcca6 3276
e38def9c 3277 ret = true;
3278 }
3279 else
3280 {
3281 tree var;
4c5fcca6 3282
e38def9c 3283 /* When we don't have a destination region, this exception escapes
3284 up the call chain. We resolve this by generating a call to the
3285 _Unwind_Resume library function. */
4c5fcca6 3286
471eff36 3287 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
e38def9c 3288 with no arguments for C++ and Java. Check for that. */
471eff36 3289 if (src_r->use_cxa_end_cleanup)
3290 {
b9a16870 3291 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
471eff36 3292 x = gimple_build_call (fn, 0);
3293 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3294 }
3295 else
3bd82487 3296 {
b9a16870 3297 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
bad12c62 3298 src_nr = build_int_cst (integer_type_node, src_r->index);
e38def9c 3299 x = gimple_build_call (fn, 1, src_nr);
f9e245b2 3300 var = create_tmp_var (ptr_type_node);
e38def9c 3301 var = make_ssa_name (var, x);
3302 gimple_call_set_lhs (x, var);
3303 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3304
b9a16870 3305 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
e38def9c 3306 x = gimple_build_call (fn, 1, var);
3307 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3bd82487 3308 }
4c5fcca6 3309
e38def9c 3310 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3bd82487 3311 }
3d1eacdb 3312
e38def9c 3313 gsi_remove (&gsi, true);
3314
3315 return ret;
3bd82487 3316}
3317
cbe8bda8 3318namespace {
3319
3320const pass_data pass_data_lower_resx =
3bd82487 3321{
cbe8bda8 3322 GIMPLE_PASS, /* type */
3323 "resx", /* name */
3324 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 3325 TV_TREE_EH, /* tv_id */
3326 PROP_gimple_lcf, /* properties_required */
3327 0, /* properties_provided */
3328 0, /* properties_destroyed */
3329 0, /* todo_flags_start */
8b88439e 3330 0, /* todo_flags_finish */
3bd82487 3331};
3332
cbe8bda8 3333class pass_lower_resx : public gimple_opt_pass
3334{
3335public:
9af5ce0c 3336 pass_lower_resx (gcc::context *ctxt)
3337 : gimple_opt_pass (pass_data_lower_resx, ctxt)
cbe8bda8 3338 {}
3339
3340 /* opt_pass methods: */
31315c24 3341 virtual bool gate (function *) { return flag_exceptions != 0; }
65b0537f 3342 virtual unsigned int execute (function *);
cbe8bda8 3343
3344}; // class pass_lower_resx
3345
65b0537f 3346unsigned
3347pass_lower_resx::execute (function *fun)
e38def9c 3348{
3349 basic_block bb;
e38def9c 3350 bool dominance_invalidated = false;
3351 bool any_rewritten = false;
3bd82487 3352
06ecf488 3353 hash_map<eh_region, tree> mnt_map;
3bd82487 3354
65b0537f 3355 FOR_EACH_BB_FN (bb, fun)
e38def9c 3356 {
3357 gimple last = last_stmt (bb);
3358 if (last && is_gimple_resx (last))
3359 {
1a91d914 3360 dominance_invalidated |=
3361 lower_resx (bb, as_a <gresx *> (last), &mnt_map);
e38def9c 3362 any_rewritten = true;
3363 }
3364 }
3365
e38def9c 3366 if (dominance_invalidated)
3367 {
3368 free_dominance_info (CDI_DOMINATORS);
3369 free_dominance_info (CDI_POST_DOMINATORS);
3bd82487 3370 }
4c5fcca6 3371
e38def9c 3372 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3373}
4c5fcca6 3374
cbe8bda8 3375} // anon namespace
3376
3377gimple_opt_pass *
3378make_pass_lower_resx (gcc::context *ctxt)
3379{
3380 return new pass_lower_resx (ctxt);
3381}
3382
1227a337 3383/* Try to optimize var = {v} {CLOBBER} stmts followed just by
3384 external throw. */
3385
3386static void
3387optimize_clobbers (basic_block bb)
3388{
3389 gimple_stmt_iterator gsi = gsi_last_bb (bb);
896a0c42 3390 bool any_clobbers = false;
3391 bool seen_stack_restore = false;
3392 edge_iterator ei;
3393 edge e;
3394
3395 /* Only optimize anything if the bb contains at least one clobber,
3396 ends with resx (checked by caller), optionally contains some
3397 debug stmts or labels, or at most one __builtin_stack_restore
3398 call, and has an incoming EH edge. */
d1d905ee 3399 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
1227a337 3400 {
3401 gimple stmt = gsi_stmt (gsi);
3402 if (is_gimple_debug (stmt))
d1d905ee 3403 continue;
896a0c42 3404 if (gimple_clobber_p (stmt))
3405 {
3406 any_clobbers = true;
3407 continue;
3408 }
3409 if (!seen_stack_restore
3410 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3411 {
3412 seen_stack_restore = true;
3413 continue;
3414 }
3415 if (gimple_code (stmt) == GIMPLE_LABEL)
3416 break;
3417 return;
3418 }
3419 if (!any_clobbers)
3420 return;
3421 FOR_EACH_EDGE (e, ei, bb->preds)
3422 if (e->flags & EDGE_EH)
3423 break;
3424 if (e == NULL)
3425 return;
3426 gsi = gsi_last_bb (bb);
3427 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3428 {
3429 gimple stmt = gsi_stmt (gsi);
3430 if (!gimple_clobber_p (stmt))
3431 continue;
1227a337 3432 unlink_stmt_vdef (stmt);
3433 gsi_remove (&gsi, true);
3434 release_defs (stmt);
3435 }
3436}
e38def9c 3437
07428872 3438/* Try to sink var = {v} {CLOBBER} stmts followed just by
3439 internal throw to successor BB. */
3440
3441static int
3442sink_clobbers (basic_block bb)
3443{
3444 edge e;
3445 edge_iterator ei;
3446 gimple_stmt_iterator gsi, dgsi;
3447 basic_block succbb;
3448 bool any_clobbers = false;
8aacb2c5 3449 unsigned todo = 0;
07428872 3450
3451 /* Only optimize if BB has a single EH successor and
3452 all predecessor edges are EH too. */
3453 if (!single_succ_p (bb)
3454 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3455 return 0;
3456
3457 FOR_EACH_EDGE (e, ei, bb->preds)
3458 {
3459 if ((e->flags & EDGE_EH) == 0)
3460 return 0;
3461 }
3462
3463 /* And BB contains only CLOBBER stmts before the final
3464 RESX. */
3465 gsi = gsi_last_bb (bb);
3466 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3467 {
3468 gimple stmt = gsi_stmt (gsi);
3469 if (is_gimple_debug (stmt))
3470 continue;
3471 if (gimple_code (stmt) == GIMPLE_LABEL)
3472 break;
896a0c42 3473 if (!gimple_clobber_p (stmt))
07428872 3474 return 0;
3475 any_clobbers = true;
3476 }
3477 if (!any_clobbers)
3478 return 0;
3479
0ba38440 3480 edge succe = single_succ_edge (bb);
3481 succbb = succe->dest;
3482
3483 /* See if there is a virtual PHI node to take an updated virtual
3484 operand from. */
1a91d914 3485 gphi *vphi = NULL;
0ba38440 3486 tree vuse = NULL_TREE;
1a91d914 3487 for (gphi_iterator gpi = gsi_start_phis (succbb);
3488 !gsi_end_p (gpi); gsi_next (&gpi))
0ba38440 3489 {
1a91d914 3490 tree res = gimple_phi_result (gpi.phi ());
0ba38440 3491 if (virtual_operand_p (res))
3492 {
1a91d914 3493 vphi = gpi.phi ();
0ba38440 3494 vuse = res;
3495 break;
3496 }
3497 }
3498
07428872 3499 dgsi = gsi_after_labels (succbb);
3500 gsi = gsi_last_bb (bb);
3501 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3502 {
3503 gimple stmt = gsi_stmt (gsi);
896a0c42 3504 tree lhs;
07428872 3505 if (is_gimple_debug (stmt))
3506 continue;
3507 if (gimple_code (stmt) == GIMPLE_LABEL)
3508 break;
896a0c42 3509 lhs = gimple_assign_lhs (stmt);
3510 /* Unfortunately we don't have dominance info updated at this
3511 point, so checking if
3512 dominated_by_p (CDI_DOMINATORS, succbb,
3513 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3514 would be too costly. Thus, avoid sinking any clobbers that
3515 refer to non-(D) SSA_NAMEs. */
3516 if (TREE_CODE (lhs) == MEM_REF
3517 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3518 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3519 {
0ba38440 3520 unlink_stmt_vdef (stmt);
896a0c42 3521 gsi_remove (&gsi, true);
3522 release_defs (stmt);
3523 continue;
3524 }
0ba38440 3525
3526 /* As we do not change stmt order when sinking across a
3527 forwarder edge we can keep virtual operands in place. */
07428872 3528 gsi_remove (&gsi, false);
0ba38440 3529 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3530
3531 /* But adjust virtual operands if we sunk across a PHI node. */
3532 if (vuse)
3533 {
3534 gimple use_stmt;
3535 imm_use_iterator iter;
3536 use_operand_p use_p;
3537 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3538 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3539 SET_USE (use_p, gimple_vdef (stmt));
587a19f1 3540 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3541 {
3542 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3543 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3544 }
0ba38440 3545 /* Adjust the incoming virtual operand. */
3546 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3547 SET_USE (gimple_vuse_op (stmt), vuse);
3548 }
8aacb2c5 3549 /* If there isn't a single predecessor but no virtual PHI node
3550 arrange for virtual operands to be renamed. */
3551 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3552 && !single_pred_p (succbb))
3553 {
3554 /* In this case there will be no use of the VDEF of this stmt.
3555 ??? Unless this is a secondary opportunity and we have not
3556 removed unreachable blocks yet, so we cannot assert this.
3557 Which also means we will end up renaming too many times. */
3558 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3559 mark_virtual_operands_for_renaming (cfun);
3560 todo |= TODO_update_ssa_only_virtuals;
3561 }
07428872 3562 }
3563
8aacb2c5 3564 return todo;
07428872 3565}
3566
778f5bdd 3567/* At the end of inlining, we can lower EH_DISPATCH. Return true when
3568 we have found some duplicate labels and removed some edges. */
3bd82487 3569
778f5bdd 3570static bool
1a91d914 3571lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
3bd82487 3572{
e38def9c 3573 gimple_stmt_iterator gsi;
3574 int region_nr;
3575 eh_region r;
3576 tree filter, fn;
3577 gimple x;
778f5bdd 3578 bool redirected = false;
3bd82487 3579
e38def9c 3580 region_nr = gimple_eh_dispatch_region (stmt);
3581 r = get_eh_region_from_number (region_nr);
3bd82487 3582
e38def9c 3583 gsi = gsi_last_bb (src);
3bd82487 3584
e38def9c 3585 switch (r->type)
3bd82487 3586 {
e38def9c 3587 case ERT_TRY:
3588 {
c2078b80 3589 auto_vec<tree> labels;
e38def9c 3590 tree default_label = NULL;
3591 eh_catch c;
3592 edge_iterator ei;
3593 edge e;
431205b7 3594 hash_set<tree> seen_values;
e38def9c 3595
3596 /* Collect the labels for a switch. Zero the post_landing_pad
3597 field becase we'll no longer have anything keeping these labels
9d75589a 3598 in existence and the optimizer will be free to merge these
e38def9c 3599 blocks at will. */
3600 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3601 {
3602 tree tp_node, flt_node, lab = c->label;
778f5bdd 3603 bool have_label = false;
3bd82487 3604
e38def9c 3605 c->label = NULL;
3606 tp_node = c->type_list;
3607 flt_node = c->filter_list;
3608
3609 if (tp_node == NULL)
3610 {
3611 default_label = lab;
3612 break;
3613 }
3614 do
3615 {
778f5bdd 3616 /* Filter out duplicate labels that arise when this handler
3617 is shadowed by an earlier one. When no labels are
3618 attached to the handler anymore, we remove
3619 the corresponding edge and then we delete unreachable
3620 blocks at the end of this pass. */
431205b7 3621 if (! seen_values.contains (TREE_VALUE (flt_node)))
778f5bdd 3622 {
b6e3dd65 3623 tree t = build_case_label (TREE_VALUE (flt_node),
3624 NULL, lab);
f1f41a6c 3625 labels.safe_push (t);
431205b7 3626 seen_values.add (TREE_VALUE (flt_node));
778f5bdd 3627 have_label = true;
3628 }
e38def9c 3629
3630 tp_node = TREE_CHAIN (tp_node);
3631 flt_node = TREE_CHAIN (flt_node);
3632 }
3633 while (tp_node);
778f5bdd 3634 if (! have_label)
3635 {
3636 remove_edge (find_edge (src, label_to_block (lab)));
3637 redirected = true;
3638 }
e38def9c 3639 }
3640
3641 /* Clean up the edge flags. */
3642 FOR_EACH_EDGE (e, ei, src->succs)
3643 {
3644 if (e->flags & EDGE_FALLTHRU)
3645 {
3646 /* If there was no catch-all, use the fallthru edge. */
3647 if (default_label == NULL)
3648 default_label = gimple_block_label (e->dest);
3649 e->flags &= ~EDGE_FALLTHRU;
3650 }
3651 }
3652 gcc_assert (default_label != NULL);
3653
3654 /* Don't generate a switch if there's only a default case.
3655 This is common in the form of try { A; } catch (...) { B; }. */
f1f41a6c 3656 if (!labels.exists ())
e38def9c 3657 {
3658 e = single_succ_edge (src);
3659 e->flags |= EDGE_FALLTHRU;
3660 }
3661 else
3662 {
b9a16870 3663 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
bad12c62 3664 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3665 region_nr));
f9e245b2 3666 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
e38def9c 3667 filter = make_ssa_name (filter, x);
3668 gimple_call_set_lhs (x, filter);
3669 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3670
3671 /* Turn the default label into a default case. */
b6e3dd65 3672 default_label = build_case_label (NULL, NULL, default_label);
e38def9c 3673 sort_case_labels (labels);
3674
49a70175 3675 x = gimple_build_switch (filter, default_label, labels);
e38def9c 3676 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
e38def9c 3677 }
3678 }
3679 break;
3680
3681 case ERT_ALLOWED_EXCEPTIONS:
3682 {
3683 edge b_e = BRANCH_EDGE (src);
3684 edge f_e = FALLTHRU_EDGE (src);
3685
b9a16870 3686 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
bad12c62 3687 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3688 region_nr));
f9e245b2 3689 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
e38def9c 3690 filter = make_ssa_name (filter, x);
3691 gimple_call_set_lhs (x, filter);
3692 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3693
3694 r->u.allowed.label = NULL;
3695 x = gimple_build_cond (EQ_EXPR, filter,
3696 build_int_cst (TREE_TYPE (filter),
3697 r->u.allowed.filter),
3698 NULL_TREE, NULL_TREE);
3699 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3700
3701 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3702 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3703 }
3704 break;
3705
3706 default:
3707 gcc_unreachable ();
3bd82487 3708 }
e38def9c 3709
3710 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3711 gsi_remove (&gsi, true);
778f5bdd 3712 return redirected;
3bd82487 3713}
3714
65b0537f 3715namespace {
3716
3717const pass_data pass_data_lower_eh_dispatch =
3718{
3719 GIMPLE_PASS, /* type */
3720 "ehdisp", /* name */
3721 OPTGROUP_NONE, /* optinfo_flags */
65b0537f 3722 TV_TREE_EH, /* tv_id */
3723 PROP_gimple_lcf, /* properties_required */
3724 0, /* properties_provided */
3725 0, /* properties_destroyed */
3726 0, /* todo_flags_start */
8b88439e 3727 0, /* todo_flags_finish */
65b0537f 3728};
3729
3730class pass_lower_eh_dispatch : public gimple_opt_pass
3731{
3732public:
3733 pass_lower_eh_dispatch (gcc::context *ctxt)
3734 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3735 {}
3736
3737 /* opt_pass methods: */
3738 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3739 virtual unsigned int execute (function *);
3740
3741}; // class pass_lower_eh_dispatch
3742
3743unsigned
3744pass_lower_eh_dispatch::execute (function *fun)
e38def9c 3745{
3746 basic_block bb;
07428872 3747 int flags = 0;
778f5bdd 3748 bool redirected = false;
3bd82487 3749
e38def9c 3750 assign_filter_values ();
3d1eacdb 3751
65b0537f 3752 FOR_EACH_BB_FN (bb, fun)
e38def9c 3753 {
3754 gimple last = last_stmt (bb);
1227a337 3755 if (last == NULL)
3756 continue;
3757 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
e38def9c 3758 {
1a91d914 3759 redirected |= lower_eh_dispatch (bb,
3760 as_a <geh_dispatch *> (last));
07428872 3761 flags |= TODO_update_ssa_only_virtuals;
3762 }
3763 else if (gimple_code (last) == GIMPLE_RESX)
3764 {
3765 if (stmt_can_throw_external (last))
3766 optimize_clobbers (bb);
3767 else
3768 flags |= sink_clobbers (bb);
e38def9c 3769 }
3770 }
3771
778f5bdd 3772 if (redirected)
3773 delete_unreachable_blocks ();
07428872 3774 return flags;
e38def9c 3775}
3776
cbe8bda8 3777} // anon namespace
3778
3779gimple_opt_pass *
3780make_pass_lower_eh_dispatch (gcc::context *ctxt)
3781{
3782 return new pass_lower_eh_dispatch (ctxt);
3783}
e38def9c 3784\f
390f4a4b 3785/* Walk statements, see what regions and, optionally, landing pads
3786 are really referenced.
3787
3788 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3789 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3790
3791 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3792 regions are marked.
3793
3794 The caller is responsible for freeing the returned sbitmaps. */
e38def9c 3795
3796static void
390f4a4b 3797mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
e38def9c 3798{
3799 sbitmap r_reachable, lp_reachable;
e38def9c 3800 basic_block bb;
390f4a4b 3801 bool mark_landing_pads = (lp_reachablep != NULL);
3802 gcc_checking_assert (r_reachablep != NULL);
3bd82487 3803
f1f41a6c 3804 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
53c5d9d4 3805 bitmap_clear (r_reachable);
390f4a4b 3806 *r_reachablep = r_reachable;
3807
3808 if (mark_landing_pads)
3809 {
3810 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3811 bitmap_clear (lp_reachable);
3812 *lp_reachablep = lp_reachable;
3813 }
3814 else
3815 lp_reachable = NULL;
3bd82487 3816
fc00614f 3817 FOR_EACH_BB_FN (bb, cfun)
3bd82487 3818 {
d0ac3b8a 3819 gimple_stmt_iterator gsi;
e38def9c 3820
3821 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3822 {
3823 gimple stmt = gsi_stmt (gsi);
e38def9c 3824
390f4a4b 3825 if (mark_landing_pads)
e38def9c 3826 {
390f4a4b 3827 int lp_nr = lookup_stmt_eh_lp (stmt);
3828
3829 /* Negative LP numbers are MUST_NOT_THROW regions which
3830 are not considered BB enders. */
3831 if (lp_nr < 0)
3832 bitmap_set_bit (r_reachable, -lp_nr);
3833
3834 /* Positive LP numbers are real landing pads, and BB enders. */
3835 else if (lp_nr > 0)
3836 {
3837 gcc_assert (gsi_one_before_end_p (gsi));
3838 eh_region region = get_eh_region_from_lp_number (lp_nr);
3839 bitmap_set_bit (r_reachable, region->index);
3840 bitmap_set_bit (lp_reachable, lp_nr);
3841 }
e38def9c 3842 }
4e392ca1 3843
3844 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3845 switch (gimple_code (stmt))
3846 {
3847 case GIMPLE_RESX:
1a91d914 3848 bitmap_set_bit (r_reachable,
3849 gimple_resx_region (as_a <gresx *> (stmt)));
4e392ca1 3850 break;
3851 case GIMPLE_EH_DISPATCH:
1a91d914 3852 bitmap_set_bit (r_reachable,
3853 gimple_eh_dispatch_region (
3854 as_a <geh_dispatch *> (stmt)));
4e392ca1 3855 break;
9eb0aee3 3856 case GIMPLE_CALL:
3857 if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES))
3858 for (int i = 0; i < 2; ++i)
3859 {
3860 tree rt = gimple_call_arg (stmt, i);
3861 HOST_WIDE_INT ri = tree_to_shwi (rt);
3862
3863 gcc_assert (ri = (int)ri);
3864 bitmap_set_bit (r_reachable, ri);
3865 }
3866 break;
4e392ca1 3867 default:
3868 break;
3869 }
e38def9c 3870 }
3bd82487 3871 }
390f4a4b 3872}
3873
3874/* Remove unreachable handlers and unreachable landing pads. */
3875
3876static void
3877remove_unreachable_handlers (void)
3878{
3879 sbitmap r_reachable, lp_reachable;
3880 eh_region region;
3881 eh_landing_pad lp;
3882 unsigned i;
3883
3884 mark_reachable_handlers (&r_reachable, &lp_reachable);
e38def9c 3885
3886 if (dump_file)
3bd82487 3887 {
e38def9c 3888 fprintf (dump_file, "Before removal of unreachable regions:\n");
3889 dump_eh_tree (dump_file, cfun);
3890 fprintf (dump_file, "Reachable regions: ");
53c5d9d4 3891 dump_bitmap_file (dump_file, r_reachable);
e38def9c 3892 fprintf (dump_file, "Reachable landing pads: ");
53c5d9d4 3893 dump_bitmap_file (dump_file, lp_reachable);
3bd82487 3894 }
3895
390f4a4b 3896 if (dump_file)
3897 {
3898 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3899 if (region && !bitmap_bit_p (r_reachable, region->index))
3900 fprintf (dump_file,
3901 "Removing unreachable region %d\n",
3902 region->index);
3903 }
3904
3905 remove_unreachable_eh_regions (r_reachable);
3bd82487 3906
390f4a4b 3907 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3908 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
e38def9c 3909 {
3910 if (dump_file)
390f4a4b 3911 fprintf (dump_file,
3912 "Removing unreachable landing pad %d\n",
3913 lp->index);
e38def9c 3914 remove_eh_landing_pad (lp);
3915 }
48e1416a 3916
e38def9c 3917 if (dump_file)
3bd82487 3918 {
e38def9c 3919 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3920 dump_eh_tree (dump_file, cfun);
3921 fprintf (dump_file, "\n\n");
3bd82487 3922 }
3923
e38def9c 3924 sbitmap_free (r_reachable);
3925 sbitmap_free (lp_reachable);
3926
3927#ifdef ENABLE_CHECKING
3928 verify_eh_tree (cfun);
3929#endif
3930}
3931
b00b0dc4 3932/* Remove unreachable handlers if any landing pads have been removed after
3933 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3934
3935void
3936maybe_remove_unreachable_handlers (void)
3937{
3938 eh_landing_pad lp;
390f4a4b 3939 unsigned i;
b00b0dc4 3940
3941 if (cfun->eh == NULL)
3942 return;
390f4a4b 3943
3944 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
b00b0dc4 3945 if (lp && lp->post_landing_pad)
3946 {
3947 if (label_to_block (lp->post_landing_pad) == NULL)
3948 {
3949 remove_unreachable_handlers ();
3950 return;
3951 }
3952 }
3953}
3954
e38def9c 3955/* Remove regions that do not have landing pads. This assumes
3956 that remove_unreachable_handlers has already been run, and
390f4a4b 3957 that we've just manipulated the landing pads since then.
3958
3959 Preserve regions with landing pads and regions that prevent
3960 exceptions from propagating further, even if these regions
3961 are not reachable. */
e38def9c 3962
3963static void
3964remove_unreachable_handlers_no_lp (void)
3965{
390f4a4b 3966 eh_region region;
4b393c71 3967 sbitmap r_reachable;
390f4a4b 3968 unsigned i;
4b393c71 3969
390f4a4b 3970 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
4b393c71 3971
390f4a4b 3972 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
4b393c71 3973 {
390f4a4b 3974 if (! region)
3975 continue;
3976
3977 if (region->landing_pads != NULL
3978 || region->type == ERT_MUST_NOT_THROW)
3979 bitmap_set_bit (r_reachable, region->index);
3980
3981 if (dump_file
3982 && !bitmap_bit_p (r_reachable, region->index))
3983 fprintf (dump_file,
3984 "Removing unreachable region %d\n",
3985 region->index);
4b393c71 3986 }
e38def9c 3987
390f4a4b 3988 remove_unreachable_eh_regions (r_reachable);
4b393c71 3989
3990 sbitmap_free (r_reachable);
3bd82487 3991}
3992
e38def9c 3993/* Undo critical edge splitting on an EH landing pad. Earlier, we
3994 optimisticaly split all sorts of edges, including EH edges. The
3995 optimization passes in between may not have needed them; if not,
3996 we should undo the split.
3997
3998 Recognize this case by having one EH edge incoming to the BB and
3999 one normal edge outgoing; BB should be empty apart from the
4000 post_landing_pad label.
4001
4002 Note that this is slightly different from the empty handler case
4003 handled by cleanup_empty_eh, in that the actual handler may yet
4004 have actual code but the landing pad has been separated from the
4005 handler. As such, cleanup_empty_eh relies on this transformation
4006 having been done first. */
4c5fcca6 4007
4008static bool
e38def9c 4009unsplit_eh (eh_landing_pad lp)
4c5fcca6 4010{
e38def9c 4011 basic_block bb = label_to_block (lp->post_landing_pad);
4012 gimple_stmt_iterator gsi;
4013 edge e_in, e_out;
4014
4015 /* Quickly check the edge counts on BB for singularity. */
896a0c42 4016 if (!single_pred_p (bb) || !single_succ_p (bb))
e38def9c 4017 return false;
896a0c42 4018 e_in = single_pred_edge (bb);
4019 e_out = single_succ_edge (bb);
4c5fcca6 4020
e38def9c 4021 /* Input edge must be EH and output edge must be normal. */
4022 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4023 return false;
4024
0b76e49c 4025 /* The block must be empty except for the labels and debug insns. */
4026 gsi = gsi_after_labels (bb);
4027 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4028 gsi_next_nondebug (&gsi);
4029 if (!gsi_end_p (gsi))
e38def9c 4030 return false;
4031
4032 /* The destination block must not already have a landing pad
4033 for a different region. */
4034 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4c5fcca6 4035 {
1a91d914 4036 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
e38def9c 4037 tree lab;
4038 int lp_nr;
4c5fcca6 4039
1a91d914 4040 if (!label_stmt)
e38def9c 4041 break;
1a91d914 4042 lab = gimple_label_label (label_stmt);
e38def9c 4043 lp_nr = EH_LANDING_PAD_NR (lab);
4044 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4045 return false;
4046 }
4c5fcca6 4047
e9d5f86f 4048 /* The new destination block must not already be a destination of
4049 the source block, lest we merge fallthru and eh edges and get
4050 all sorts of confused. */
4051 if (find_edge (e_in->src, e_out->dest))
4052 return false;
4053
c57e3b9d 4054 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4055 thought this should have been cleaned up by a phicprop pass, but
4056 that doesn't appear to handle virtuals. Propagate by hand. */
4057 if (!gimple_seq_empty_p (phi_nodes (bb)))
4058 {
1a91d914 4059 for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
c57e3b9d 4060 {
1a91d914 4061 gimple use_stmt;
4062 gphi *phi = gpi.phi ();
c57e3b9d 4063 tree lhs = gimple_phi_result (phi);
4064 tree rhs = gimple_phi_arg_def (phi, 0);
4065 use_operand_p use_p;
4066 imm_use_iterator iter;
4067
4068 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4069 {
4070 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4071 SET_USE (use_p, rhs);
4072 }
4073
4074 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4075 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4076
1a91d914 4077 remove_phi_node (&gpi, true);
c57e3b9d 4078 }
4079 }
3d1eacdb 4080
e38def9c 4081 if (dump_file && (dump_flags & TDF_DETAILS))
4082 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4083 lp->index, e_out->dest->index);
4084
4085 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4086 a successor edge, humor it. But do the real CFG change with the
4087 predecessor of E_OUT in order to preserve the ordering of arguments
4088 to the PHI nodes in E_OUT->DEST. */
4089 redirect_eh_edge_1 (e_in, e_out->dest, false);
4090 redirect_edge_pred (e_out, e_in->src);
4091 e_out->flags = e_in->flags;
4092 e_out->probability = e_in->probability;
4093 e_out->count = e_in->count;
4094 remove_edge (e_in);
3d1eacdb 4095
e38def9c 4096 return true;
4097}
3d1eacdb 4098
e38def9c 4099/* Examine each landing pad block and see if it matches unsplit_eh. */
3d1eacdb 4100
e38def9c 4101static bool
4102unsplit_all_eh (void)
4103{
4104 bool changed = false;
4105 eh_landing_pad lp;
4106 int i;
3d1eacdb 4107
f1f41a6c 4108 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
e38def9c 4109 if (lp)
4110 changed |= unsplit_eh (lp);
4111
4112 return changed;
4113}
4114
4115/* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4116 to OLD_BB to NEW_BB; return true on success, false on failure.
4117
4118 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4119 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4120 Virtual PHIs may be deleted and marked for renaming. */
4121
4122static bool
4123cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
c57e3b9d 4124 edge old_bb_out, bool change_region)
e38def9c 4125{
1a91d914 4126 gphi_iterator ngsi, ogsi;
e38def9c 4127 edge_iterator ei;
4128 edge e;
e38def9c 4129 bitmap ophi_handled;
4130
19bcc424 4131 /* The destination block must not be a regular successor for any
4132 of the preds of the landing pad. Thus, avoid turning
4133 <..>
4134 | \ EH
4135 | <..>
4136 | /
4137 <..>
4138 into
4139 <..>
4140 | | EH
4141 <..>
4142 which CFG verification would choke on. See PR45172 and PR51089. */
4143 FOR_EACH_EDGE (e, ei, old_bb->preds)
4144 if (find_edge (e->src, new_bb))
4145 return false;
4146
e38def9c 4147 FOR_EACH_EDGE (e, ei, old_bb->preds)
4148 redirect_edge_var_map_clear (e);
4149
4150 ophi_handled = BITMAP_ALLOC (NULL);
e38def9c 4151
4152 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4153 for the edges we're going to move. */
4154 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4155 {
1a91d914 4156 gphi *ophi, *nphi = ngsi.phi ();
e38def9c 4157 tree nresult, nop;
4158
4159 nresult = gimple_phi_result (nphi);
4160 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4161
4162 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4163 the source ssa_name. */
4164 ophi = NULL;
4165 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4166 {
1a91d914 4167 ophi = ogsi.phi ();
e38def9c 4168 if (gimple_phi_result (ophi) == nop)
4169 break;
4170 ophi = NULL;
927a6b6b 4171 }
3d1eacdb 4172
e38def9c 4173 /* If we did find the corresponding PHI, copy those inputs. */
4174 if (ophi)
4c5fcca6 4175 {
6e21b2e0 4176 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4177 if (!has_single_use (nop))
4178 {
4179 imm_use_iterator imm_iter;
4180 use_operand_p use_p;
4181
4182 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4183 {
4184 if (!gimple_debug_bind_p (USE_STMT (use_p))
4185 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4186 || gimple_bb (USE_STMT (use_p)) != new_bb))
4187 goto fail;
4188 }
4189 }
e38def9c 4190 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4191 FOR_EACH_EDGE (e, ei, old_bb->preds)
3d1eacdb 4192 {
e38def9c 4193 location_t oloc;
4194 tree oop;
4195
4196 if ((e->flags & EDGE_EH) == 0)
4197 continue;
4198 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4199 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
60d535d2 4200 redirect_edge_var_map_add (e, nresult, oop, oloc);
3d1eacdb 4201 }
e38def9c 4202 }
077b5b8a 4203 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
e38def9c 4204 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4205 variable is unchanged from input to the block and we can simply
4206 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4207 else
4208 {
4209 location_t nloc
4210 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4211 FOR_EACH_EDGE (e, ei, old_bb->preds)
60d535d2 4212 redirect_edge_var_map_add (e, nresult, nop, nloc);
e38def9c 4213 }
4214 }
4215
4216 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4217 we don't know what values from the other edges into NEW_BB to use. */
4218 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4219 {
1a91d914 4220 gphi *ophi = ogsi.phi ();
e38def9c 4221 tree oresult = gimple_phi_result (ophi);
4222 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4223 goto fail;
4224 }
4225
e38def9c 4226 /* Finally, move the edges and update the PHIs. */
4227 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4228 if (e->flags & EDGE_EH)
4229 {
3a37f7bd 4230 /* ??? CFG manipluation routines do not try to update loop
4231 form on edge redirection. Do so manually here for now. */
4232 /* If we redirect a loop entry or latch edge that will either create
4233 a multiple entry loop or rotate the loop. If the loops merge
4234 we may have created a loop with multiple latches.
4235 All of this isn't easily fixed thus cancel the affected loop
4236 and mark the other loop as possibly having multiple latches. */
b3083327 4237 if (e->dest == e->dest->loop_father->header)
3a37f7bd 4238 {
d25159cc 4239 mark_loop_for_removal (e->dest->loop_father);
3a37f7bd 4240 new_bb->loop_father->latch = NULL;
d25159cc 4241 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
3a37f7bd 4242 }
c57e3b9d 4243 redirect_eh_edge_1 (e, new_bb, change_region);
e38def9c 4244 redirect_edge_succ (e, new_bb);
4245 flush_pending_stmts (e);
4246 }
4247 else
4248 ei_next (&ei);
3bd82487 4249
e38def9c 4250 BITMAP_FREE (ophi_handled);
e38def9c 4251 return true;
4252
4253 fail:
4254 FOR_EACH_EDGE (e, ei, old_bb->preds)
4255 redirect_edge_var_map_clear (e);
4256 BITMAP_FREE (ophi_handled);
e38def9c 4257 return false;
4258}
4259
4260/* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4261 old region to NEW_REGION at BB. */
4262
4263static void
4264cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4265 eh_landing_pad lp, eh_region new_region)
4266{
4267 gimple_stmt_iterator gsi;
4268 eh_landing_pad *pp;
4269
4270 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4271 continue;
4272 *pp = lp->next_lp;
4273
4274 lp->region = new_region;
4275 lp->next_lp = new_region->landing_pads;
4276 new_region->landing_pads = lp;
4277
4278 /* Delete the RESX that was matched within the empty handler block. */
4279 gsi = gsi_last_bb (bb);
bc8a8451 4280 unlink_stmt_vdef (gsi_stmt (gsi));
e38def9c 4281 gsi_remove (&gsi, true);
4282
4283 /* Clean up E_OUT for the fallthru. */
4284 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4285 e_out->probability = REG_BR_PROB_BASE;
4286}
4287
4288/* A subroutine of cleanup_empty_eh. Handle more complex cases of
48e1416a 4289 unsplitting than unsplit_eh was prepared to handle, e.g. when
e38def9c 4290 multiple incoming edges and phis are involved. */
4291
4292static bool
c57e3b9d 4293cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
e38def9c 4294{
4295 gimple_stmt_iterator gsi;
e38def9c 4296 tree lab;
4297
4298 /* We really ought not have totally lost everything following
4299 a landing pad label. Given that BB is empty, there had better
4300 be a successor. */
4301 gcc_assert (e_out != NULL);
4302
c57e3b9d 4303 /* The destination block must not already have a landing pad
4304 for a different region. */
e38def9c 4305 lab = NULL;
4306 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4307 {
1a91d914 4308 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
c57e3b9d 4309 int lp_nr;
4310
1a91d914 4311 if (!stmt)
e38def9c 4312 break;
4313 lab = gimple_label_label (stmt);
c57e3b9d 4314 lp_nr = EH_LANDING_PAD_NR (lab);
4315 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4316 return false;
e38def9c 4317 }
e38def9c 4318
4319 /* Attempt to move the PHIs into the successor block. */
c57e3b9d 4320 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
e38def9c 4321 {
4322 if (dump_file && (dump_flags & TDF_DETAILS))
4323 fprintf (dump_file,
c57e3b9d 4324 "Unsplit EH landing pad %d to block %i "
4325 "(via cleanup_empty_eh).\n",
4326 lp->index, e_out->dest->index);
e38def9c 4327 return true;
4328 }
4329
4330 return false;
4331}
4332
a9309f85 4333/* Return true if edge E_FIRST is part of an empty infinite loop
4334 or leads to such a loop through a series of single successor
4335 empty bbs. */
4336
4337static bool
4338infinite_empty_loop_p (edge e_first)
4339{
4340 bool inf_loop = false;
4341 edge e;
4342
4343 if (e_first->dest == e_first->src)
4344 return true;
4345
4346 e_first->src->aux = (void *) 1;
4347 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4348 {
4349 gimple_stmt_iterator gsi;
4350 if (e->dest->aux)
4351 {
4352 inf_loop = true;
4353 break;
4354 }
4355 e->dest->aux = (void *) 1;
4356 gsi = gsi_after_labels (e->dest);
4357 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4358 gsi_next_nondebug (&gsi);
4359 if (!gsi_end_p (gsi))
4360 break;
4361 }
4362 e_first->src->aux = NULL;
4363 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4364 e->dest->aux = NULL;
4365
4366 return inf_loop;
4367}
4368
e38def9c 4369/* Examine the block associated with LP to determine if it's an empty
4370 handler for its EH region. If so, attempt to redirect EH edges to
4371 an outer region. Return true the CFG was updated in any way. This
4372 is similar to jump forwarding, just across EH edges. */
4373
4374static bool
4375cleanup_empty_eh (eh_landing_pad lp)
4376{
4377 basic_block bb = label_to_block (lp->post_landing_pad);
4378 gimple_stmt_iterator gsi;
4379 gimple resx;
4380 eh_region new_region;
4381 edge_iterator ei;
4382 edge e, e_out;
4383 bool has_non_eh_pred;
b74338cf 4384 bool ret = false;
e38def9c 4385 int new_lp_nr;
4386
4387 /* There can be zero or one edges out of BB. This is the quickest test. */
4388 switch (EDGE_COUNT (bb->succs))
4389 {
4390 case 0:
4391 e_out = NULL;
4392 break;
4393 case 1:
896a0c42 4394 e_out = single_succ_edge (bb);
e38def9c 4395 break;
4396 default:
4397 return false;
4398 }
b74338cf 4399
4400 resx = last_stmt (bb);
4401 if (resx && is_gimple_resx (resx))
4402 {
4403 if (stmt_can_throw_external (resx))
4404 optimize_clobbers (bb);
4405 else if (sink_clobbers (bb))
4406 ret = true;
4407 }
4408
e38def9c 4409 gsi = gsi_after_labels (bb);
4410
4411 /* Make sure to skip debug statements. */
4412 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4413 gsi_next_nondebug (&gsi);
4414
4415 /* If the block is totally empty, look for more unsplitting cases. */
4416 if (gsi_end_p (gsi))
e54fce5c 4417 {
ae3a21c9 4418 /* For the degenerate case of an infinite loop bail out.
4419 If bb has no successors and is totally empty, which can happen e.g.
4420 because of incorrect noreturn attribute, bail out too. */
4421 if (e_out == NULL
4422 || infinite_empty_loop_p (e_out))
b74338cf 4423 return ret;
e54fce5c 4424
b74338cf 4425 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
e54fce5c 4426 }
e38def9c 4427
367113ea 4428 /* The block should consist only of a single RESX statement, modulo a
4429 preceding call to __builtin_stack_restore if there is no outgoing
4430 edge, since the call can be eliminated in this case. */
e38def9c 4431 resx = gsi_stmt (gsi);
367113ea 4432 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4433 {
4434 gsi_next (&gsi);
4435 resx = gsi_stmt (gsi);
4436 }
e38def9c 4437 if (!is_gimple_resx (resx))
b74338cf 4438 return ret;
e38def9c 4439 gcc_assert (gsi_one_before_end_p (gsi));
4440
4441 /* Determine if there are non-EH edges, or resx edges into the handler. */
4442 has_non_eh_pred = false;
4443 FOR_EACH_EDGE (e, ei, bb->preds)
4444 if (!(e->flags & EDGE_EH))
4445 has_non_eh_pred = true;
4446
4447 /* Find the handler that's outer of the empty handler by looking at
4448 where the RESX instruction was vectored. */
4449 new_lp_nr = lookup_stmt_eh_lp (resx);
4450 new_region = get_eh_region_from_lp_number (new_lp_nr);
4451
4452 /* If there's no destination region within the current function,
4453 redirection is trivial via removing the throwing statements from
4454 the EH region, removing the EH edges, and allowing the block
4455 to go unreachable. */
4456 if (new_region == NULL)
4457 {
4458 gcc_assert (e_out == NULL);
4459 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4460 if (e->flags & EDGE_EH)
4461 {
4462 gimple stmt = last_stmt (e->src);
4463 remove_stmt_from_eh_lp (stmt);
4464 remove_edge (e);
4465 }
4466 else
4467 ei_next (&ei);
4468 goto succeed;
4469 }
4470
4471 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4472 to handle the abort and allow the blocks to go unreachable. */
4473 if (new_region->type == ERT_MUST_NOT_THROW)
4474 {
4475 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4476 if (e->flags & EDGE_EH)
4477 {
4478 gimple stmt = last_stmt (e->src);
4479 remove_stmt_from_eh_lp (stmt);
4480 add_stmt_to_eh_lp (stmt, new_lp_nr);
4481 remove_edge (e);
4482 }
4483 else
4484 ei_next (&ei);
4485 goto succeed;
4486 }
4487
4488 /* Try to redirect the EH edges and merge the PHIs into the destination
4489 landing pad block. If the merge succeeds, we'll already have redirected
4490 all the EH edges. The handler itself will go unreachable if there were
4491 no normal edges. */
c57e3b9d 4492 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
e38def9c 4493 goto succeed;
4494
4495 /* Finally, if all input edges are EH edges, then we can (potentially)
4496 reduce the number of transfers from the runtime by moving the landing
4497 pad from the original region to the new region. This is a win when
4498 we remove the last CLEANUP region along a particular exception
4499 propagation path. Since nothing changes except for the region with
4500 which the landing pad is associated, the PHI nodes do not need to be
4501 adjusted at all. */
4502 if (!has_non_eh_pred)
4503 {
4504 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4505 if (dump_file && (dump_flags & TDF_DETAILS))
4506 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4507 lp->index, new_region->index);
4508
4509 /* ??? The CFG didn't change, but we may have rendered the
4510 old EH region unreachable. Trigger a cleanup there. */
4c5fcca6 4511 return true;
4512 }
e38def9c 4513
b74338cf 4514 return ret;
e38def9c 4515
4516 succeed:
4517 if (dump_file && (dump_flags & TDF_DETAILS))
4518 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4519 remove_eh_landing_pad (lp);
4520 return true;
4c5fcca6 4521}
4522
e38def9c 4523/* Do a post-order traversal of the EH region tree. Examine each
4524 post_landing_pad block and see if we can eliminate it as empty. */
4525
4526static bool
4527cleanup_all_empty_eh (void)
4528{
4529 bool changed = false;
4530 eh_landing_pad lp;
4531 int i;
4532
f1f41a6c 4533 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
e38def9c 4534 if (lp)
4535 changed |= cleanup_empty_eh (lp);
4536
4537 return changed;
4538}
4c5fcca6 4539
4540/* Perform cleanups and lowering of exception handling
4541 1) cleanups regions with handlers doing nothing are optimized out
4542 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4543 3) Info about regions that are containing instructions, and regions
4544 reachable via local EH edges is collected
c31fb425 4545 4) Eh tree is pruned for regions no longer necessary.
e38def9c 4546
4547 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4548 Unify those that have the same failure decl and locus.
4549*/
4c5fcca6 4550
4551static unsigned int
15100018 4552execute_cleanup_eh_1 (void)
4c5fcca6 4553{
e38def9c 4554 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4555 looking up unreachable landing pads. */
4556 remove_unreachable_handlers ();
4c5fcca6 4557
e38def9c 4558 /* Watch out for the region tree vanishing due to all unreachable. */
6b3688f5 4559 if (cfun->eh->region_tree)
4c5fcca6 4560 {
e38def9c 4561 bool changed = false;
4c5fcca6 4562
6b3688f5 4563 if (optimize)
4564 changed |= unsplit_all_eh ();
e38def9c 4565 changed |= cleanup_all_empty_eh ();
4566
4567 if (changed)
48d5ef93 4568 {
4569 free_dominance_info (CDI_DOMINATORS);
4570 free_dominance_info (CDI_POST_DOMINATORS);
4c5fcca6 4571
e38def9c 4572 /* We delayed all basic block deletion, as we may have performed
4573 cleanups on EH edges while non-EH edges were still present. */
4574 delete_unreachable_blocks ();
4c5fcca6 4575
e38def9c 4576 /* We manipulated the landing pads. Remove any region that no
4577 longer has a landing pad. */
4578 remove_unreachable_handlers_no_lp ();
4579
4580 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4581 }
4c5fcca6 4582 }
4583
e38def9c 4584 return 0;
4585}
4586
cbe8bda8 4587namespace {
4588
4589const pass_data pass_data_cleanup_eh =
4590{
4591 GIMPLE_PASS, /* type */
4592 "ehcleanup", /* name */
4593 OPTGROUP_NONE, /* optinfo_flags */
cbe8bda8 4594 TV_TREE_EH, /* tv_id */
4595 PROP_gimple_lcf, /* properties_required */
4596 0, /* properties_provided */
4597 0, /* properties_destroyed */
4598 0, /* todo_flags_start */
8b88439e 4599 0, /* todo_flags_finish */
4c5fcca6 4600};
cbe8bda8 4601
4602class pass_cleanup_eh : public gimple_opt_pass
4603{
4604public:
9af5ce0c 4605 pass_cleanup_eh (gcc::context *ctxt)
4606 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
cbe8bda8 4607 {}
4608
4609 /* opt_pass methods: */
ae84f584 4610 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
31315c24 4611 virtual bool gate (function *fun)
4612 {
4613 return fun->eh != NULL && fun->eh->region_tree != NULL;
4614 }
4615
65b0537f 4616 virtual unsigned int execute (function *);
cbe8bda8 4617
4618}; // class pass_cleanup_eh
4619
65b0537f 4620unsigned int
4621pass_cleanup_eh::execute (function *fun)
4622{
4623 int ret = execute_cleanup_eh_1 ();
4624
4625 /* If the function no longer needs an EH personality routine
4626 clear it. This exposes cross-language inlining opportunities
4627 and avoids references to a never defined personality routine. */
4628 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4629 && function_needs_eh_personality (fun) != eh_personality_lang)
4630 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4631
4632 return ret;
4633}
4634
cbe8bda8 4635} // anon namespace
4636
4637gimple_opt_pass *
4638make_pass_cleanup_eh (gcc::context *ctxt)
4639{
4640 return new pass_cleanup_eh (ctxt);
4641}
e38def9c 4642\f
4643/* Verify that BB containing STMT as the last statement, has precisely the
4644 edge that make_eh_edges would create. */
4645
4b987fac 4646DEBUG_FUNCTION bool
e38def9c 4647verify_eh_edges (gimple stmt)
4648{
4649 basic_block bb = gimple_bb (stmt);
4650 eh_landing_pad lp = NULL;
4651 int lp_nr;
4652 edge_iterator ei;
4653 edge e, eh_edge;
4654
4655 lp_nr = lookup_stmt_eh_lp (stmt);
4656 if (lp_nr > 0)
4657 lp = get_eh_landing_pad_from_number (lp_nr);
4658
4659 eh_edge = NULL;
4660 FOR_EACH_EDGE (e, ei, bb->succs)
4661 {
4662 if (e->flags & EDGE_EH)
4663 {
4664 if (eh_edge)
4665 {
4666 error ("BB %i has multiple EH edges", bb->index);
4667 return true;
4668 }
4669 else
4670 eh_edge = e;
4671 }
4672 }
4673
4674 if (lp == NULL)
4675 {
4676 if (eh_edge)
4677 {
4678 error ("BB %i can not throw but has an EH edge", bb->index);
4679 return true;
4680 }
4681 return false;
4682 }
4683
4684 if (!stmt_could_throw_p (stmt))
4685 {
4686 error ("BB %i last statement has incorrectly set lp", bb->index);
4687 return true;
4688 }
4689
4690 if (eh_edge == NULL)
4691 {
4692 error ("BB %i is missing an EH edge", bb->index);
4693 return true;
4694 }
4695
4696 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4697 {
4698 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4699 return true;
4700 }
4701
4702 return false;
4703}
4704
4705/* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4706
4b987fac 4707DEBUG_FUNCTION bool
1a91d914 4708verify_eh_dispatch_edge (geh_dispatch *stmt)
e38def9c 4709{
4710 eh_region r;
4711 eh_catch c;
4712 basic_block src, dst;
4713 bool want_fallthru = true;
4714 edge_iterator ei;
4715 edge e, fall_edge;
4716
4717 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4718 src = gimple_bb (stmt);
4719
4720 FOR_EACH_EDGE (e, ei, src->succs)
4721 gcc_assert (e->aux == NULL);
4722
4723 switch (r->type)
4724 {
4725 case ERT_TRY:
4726 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4727 {
4728 dst = label_to_block (c->label);
4729 e = find_edge (src, dst);
4730 if (e == NULL)
4731 {
4732 error ("BB %i is missing an edge", src->index);
4733 return true;
4734 }
4735 e->aux = (void *)e;
4736
4737 /* A catch-all handler doesn't have a fallthru. */
4738 if (c->type_list == NULL)
4739 {
4740 want_fallthru = false;
4741 break;
4742 }
4743 }
4744 break;
4745
4746 case ERT_ALLOWED_EXCEPTIONS:
4747 dst = label_to_block (r->u.allowed.label);
4748 e = find_edge (src, dst);
4749 if (e == NULL)
4750 {
4751 error ("BB %i is missing an edge", src->index);
4752 return true;
4753 }
4754 e->aux = (void *)e;
4755 break;
4756
4757 default:
4758 gcc_unreachable ();
4759 }
4760
4761 fall_edge = NULL;
4762 FOR_EACH_EDGE (e, ei, src->succs)
4763 {
4764 if (e->flags & EDGE_FALLTHRU)
4765 {
4766 if (fall_edge != NULL)
4767 {
4768 error ("BB %i too many fallthru edges", src->index);
4769 return true;
4770 }
4771 fall_edge = e;
4772 }
4773 else if (e->aux)
4774 e->aux = NULL;
4775 else
4776 {
4777 error ("BB %i has incorrect edge", src->index);
4778 return true;
4779 }
4780 }
4781 if ((fall_edge != NULL) ^ want_fallthru)
4782 {
4783 error ("BB %i has incorrect fallthru edge", src->index);
4784 return true;
4785 }
4786
4787 return false;
4788}