]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-eh.c
PR other/16615 [1/5]
[thirdparty/gcc.git] / gcc / tree-eh.c
CommitLineData
6de9cd9a 1/* Exception handling semantics and decomposition for trees.
a5544970 2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
6de9cd9a
DN
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
6de9cd9a
DN
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
6de9cd9a
DN
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
c7131fb2 23#include "backend.h"
957060b5 24#include "rtl.h"
6de9cd9a 25#include "tree.h"
c7131fb2 26#include "gimple.h"
957060b5
AM
27#include "cfghooks.h"
28#include "tree-pass.h"
c7131fb2 29#include "ssa.h"
957060b5
AM
30#include "cgraph.h"
31#include "diagnostic-core.h"
c7131fb2 32#include "fold-const.h"
36566b39 33#include "calls.h"
6de9cd9a 34#include "except.h"
60393bbc
AM
35#include "cfganal.h"
36#include "cfgcleanup.h"
2fb9a547 37#include "tree-eh.h"
5be5c238 38#include "gimple-iterator.h"
442b4905 39#include "tree-cfg.h"
442b4905 40#include "tree-into-ssa.h"
7a300452 41#include "tree-ssa.h"
6de9cd9a 42#include "tree-inline.h"
6de9cd9a 43#include "langhooks.h"
7d776ee2 44#include "cfgloop.h"
4484a35a 45#include "gimple-low.h"
314e6352
ML
46#include "stringpool.h"
47#include "attribs.h"
c497284c 48#include "asan.h"
31b6733b 49#include "gimplify.h"
726a989a
RB
50
51/* In some instances a tree and a gimple need to be stored in a same table,
52 i.e. in hash tables. This is a structure to do this. */
355fe088 53typedef union {tree *tp; tree t; gimple *g;} treemple;
6de9cd9a 54
6de9cd9a
DN
55/* Misc functions used in this file. */
56
1d65f45c 57/* Remember and lookup EH landing pad data for arbitrary statements.
6de9cd9a
DN
58 Really this means any statement that could_throw_p. We could
59 stuff this information into the stmt_ann data structure, but:
60
61 (1) We absolutely rely on this information being kept until
62 we get to rtl. Once we're done with lowering here, if we lose
63 the information there's no way to recover it!
64
19114537 65 (2) There are many more statements that *cannot* throw as
6de9cd9a
DN
66 compared to those that can. We should be saving some amount
67 of space by only allocating memory for those that can throw. */
68
1d65f45c 69/* Add statement T in function IFUN to landing pad NUM. */
726a989a 70
481d1b81 71static void
355fe088 72add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num)
6de9cd9a 73{
1d65f45c 74 gcc_assert (num != 0);
6de9cd9a 75
98f464e0 76 if (!get_eh_throw_stmt_table (ifun))
355fe088 77 set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31));
98f464e0 78
b086d530 79 gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
6de9cd9a 80}
1eaba2f2 81
1d65f45c 82/* Add statement T in the current function (cfun) to EH landing pad NUM. */
726a989a 83
b4660e5a 84void
355fe088 85add_stmt_to_eh_lp (gimple *t, int num)
b4660e5a 86{
1d65f45c
RH
87 add_stmt_to_eh_lp_fn (cfun, t, num);
88}
89
90/* Add statement T to the single EH landing pad in REGION. */
91
92static void
355fe088 93record_stmt_eh_region (eh_region region, gimple *t)
1d65f45c
RH
94{
95 if (region == NULL)
96 return;
97 if (region->type == ERT_MUST_NOT_THROW)
98 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
99 else
100 {
101 eh_landing_pad lp = region->landing_pads;
102 if (lp == NULL)
103 lp = gen_eh_landing_pad (region);
104 else
105 gcc_assert (lp->next_lp == NULL);
106 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
107 }
b4660e5a
JH
108}
109
726a989a 110
1d65f45c 111/* Remove statement T in function IFUN from its EH landing pad. */
726a989a 112
1eaba2f2 113bool
355fe088 114remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t)
1eaba2f2 115{
b4660e5a 116 if (!get_eh_throw_stmt_table (ifun))
1eaba2f2
RH
117 return false;
118
b086d530 119 if (!get_eh_throw_stmt_table (ifun)->get (t))
1eaba2f2 120 return false;
b086d530
TS
121
122 get_eh_throw_stmt_table (ifun)->remove (t);
123 return true;
1eaba2f2
RH
124}
125
726a989a 126
1d65f45c
RH
127/* Remove statement T in the current function (cfun) from its
128 EH landing pad. */
726a989a 129
b4660e5a 130bool
355fe088 131remove_stmt_from_eh_lp (gimple *t)
b4660e5a 132{
1d65f45c 133 return remove_stmt_from_eh_lp_fn (cfun, t);
b4660e5a
JH
134}
135
726a989a 136/* Determine if statement T is inside an EH region in function IFUN.
1d65f45c
RH
137 Positive numbers indicate a landing pad index; negative numbers
138 indicate a MUST_NOT_THROW region index; zero indicates that the
139 statement is not recorded in the region table. */
726a989a 140
6de9cd9a 141int
355fe088 142lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t)
6de9cd9a 143{
1d65f45c
RH
144 if (ifun->eh->throw_stmt_table == NULL)
145 return 0;
6de9cd9a 146
b086d530
TS
147 int *lp_nr = ifun->eh->throw_stmt_table->get (t);
148 return lp_nr ? *lp_nr : 0;
6de9cd9a
DN
149}
150
1d65f45c 151/* Likewise, but always use the current function. */
726a989a 152
b4660e5a 153int
355fe088 154lookup_stmt_eh_lp (gimple *t)
b4660e5a
JH
155{
156 /* We can get called from initialized data when -fnon-call-exceptions
157 is on; prevent crash. */
158 if (!cfun)
1d65f45c
RH
159 return 0;
160 return lookup_stmt_eh_lp_fn (cfun, t);
b4660e5a 161}
6de9cd9a 162
726a989a 163/* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
6de9cd9a
DN
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
166
167struct finally_tree_node
168{
726a989a
RB
169 /* When storing a GIMPLE_TRY, we have to record a gimple. However
170 when deciding whether a GOTO to a certain LABEL_DECL (which is a
171 tree) leaves the TRY block, its necessary to record a tree in
172 this field. Thus a treemple is used. */
1d65f45c 173 treemple child;
538dd0b7 174 gtry *parent;
6de9cd9a
DN
175};
176
4a8fb1a1
LC
177/* Hashtable helpers. */
178
95fbe13e 179struct finally_tree_hasher : free_ptr_hash <finally_tree_node>
4a8fb1a1 180{
67f58944
TS
181 static inline hashval_t hash (const finally_tree_node *);
182 static inline bool equal (const finally_tree_node *,
183 const finally_tree_node *);
4a8fb1a1
LC
184};
185
186inline hashval_t
67f58944 187finally_tree_hasher::hash (const finally_tree_node *v)
4a8fb1a1
LC
188{
189 return (intptr_t)v->child.t >> 4;
190}
191
192inline bool
67f58944
TS
193finally_tree_hasher::equal (const finally_tree_node *v,
194 const finally_tree_node *c)
4a8fb1a1
LC
195{
196 return v->child.t == c->child.t;
197}
198
6de9cd9a 199/* Note that this table is *not* marked GTY. It is short-lived. */
c203e8a7 200static hash_table<finally_tree_hasher> *finally_tree;
6de9cd9a
DN
201
202static void
538dd0b7 203record_in_finally_tree (treemple child, gtry *parent)
6de9cd9a
DN
204{
205 struct finally_tree_node *n;
4a8fb1a1 206 finally_tree_node **slot;
6de9cd9a 207
858904db 208 n = XNEW (struct finally_tree_node);
6de9cd9a
DN
209 n->child = child;
210 n->parent = parent;
211
c203e8a7 212 slot = finally_tree->find_slot (n, INSERT);
1e128c5f 213 gcc_assert (!*slot);
6de9cd9a
DN
214 *slot = n;
215}
216
217static void
355fe088 218collect_finally_tree (gimple *stmt, gtry *region);
726a989a 219
1d65f45c 220/* Go through the gimple sequence. Works with collect_finally_tree to
726a989a
RB
221 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
222
223static void
538dd0b7 224collect_finally_tree_1 (gimple_seq seq, gtry *region)
6de9cd9a 225{
726a989a 226 gimple_stmt_iterator gsi;
6de9cd9a 227
726a989a
RB
228 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
229 collect_finally_tree (gsi_stmt (gsi), region);
230}
6de9cd9a 231
726a989a 232static void
355fe088 233collect_finally_tree (gimple *stmt, gtry *region)
726a989a
RB
234{
235 treemple temp;
236
237 switch (gimple_code (stmt))
238 {
239 case GIMPLE_LABEL:
538dd0b7 240 temp.t = gimple_label_label (as_a <glabel *> (stmt));
726a989a
RB
241 record_in_finally_tree (temp, region);
242 break;
6de9cd9a 243
726a989a
RB
244 case GIMPLE_TRY:
245 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
246 {
247 temp.g = stmt;
248 record_in_finally_tree (temp, region);
538dd0b7
DM
249 collect_finally_tree_1 (gimple_try_eval (stmt),
250 as_a <gtry *> (stmt));
726a989a
RB
251 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
252 }
253 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
254 {
255 collect_finally_tree_1 (gimple_try_eval (stmt), region);
256 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
257 }
258 break;
6de9cd9a 259
726a989a 260 case GIMPLE_CATCH:
538dd0b7
DM
261 collect_finally_tree_1 (gimple_catch_handler (
262 as_a <gcatch *> (stmt)),
263 region);
726a989a 264 break;
6de9cd9a 265
726a989a
RB
266 case GIMPLE_EH_FILTER:
267 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
6de9cd9a
DN
268 break;
269
0a35513e 270 case GIMPLE_EH_ELSE:
538dd0b7
DM
271 {
272 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
273 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
274 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
275 }
0a35513e
AH
276 break;
277
6de9cd9a
DN
278 default:
279 /* A type, a decl, or some kind of statement that we're not
280 interested in. Don't walk them. */
281 break;
282 }
283}
284
726a989a 285
6de9cd9a
DN
286/* Use the finally tree to determine if a jump from START to TARGET
287 would leave the try_finally node that START lives in. */
288
289static bool
355fe088 290outside_finally_tree (treemple start, gimple *target)
6de9cd9a
DN
291{
292 struct finally_tree_node n, *p;
293
294 do
295 {
296 n.child = start;
c203e8a7 297 p = finally_tree->find (&n);
6de9cd9a
DN
298 if (!p)
299 return true;
726a989a 300 start.g = p->parent;
6de9cd9a 301 }
726a989a 302 while (start.g != target);
6de9cd9a
DN
303
304 return false;
305}
726a989a
RB
306
307/* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
308 nodes into a set of gotos, magic labels, and eh regions.
6de9cd9a
DN
309 The eh region creation is straight-forward, but frobbing all the gotos
310 and such into shape isn't. */
311
b8698a0f 312/* The sequence into which we record all EH stuff. This will be
1d65f45c
RH
313 placed at the end of the function when we're all done. */
314static gimple_seq eh_seq;
315
316/* Record whether an EH region contains something that can throw,
317 indexed by EH region number. */
b7da9fd4 318static bitmap eh_region_may_contain_throw_map;
1d65f45c 319
026c3cfd 320/* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN
24b97832
ILT
321 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
322 The idea is to record a gimple statement for everything except for
323 the conditionals, which get their labels recorded. Since labels are
324 of type 'tree', we need this node to store both gimple and tree
325 objects. REPL_STMT is the sequence used to replace the goto/return
326 statement. CONT_STMT is used to store the statement that allows
327 the return/goto to jump to the original destination. */
328
329struct goto_queue_node
330{
331 treemple stmt;
820055a0 332 location_t location;
24b97832 333 gimple_seq repl_stmt;
355fe088 334 gimple *cont_stmt;
24b97832
ILT
335 int index;
336 /* This is used when index >= 0 to indicate that stmt is a label (as
337 opposed to a goto stmt). */
338 int is_label;
339};
340
6de9cd9a
DN
341/* State of the world while lowering. */
342
343struct leh_state
344{
19114537 345 /* What's "current" while constructing the eh region tree. These
6de9cd9a
DN
346 correspond to variables of the same name in cfun->eh, which we
347 don't have easy access to. */
1d65f45c
RH
348 eh_region cur_region;
349
350 /* What's "current" for the purposes of __builtin_eh_pointer. For
351 a CATCH, this is the associated TRY. For an EH_FILTER, this is
352 the associated ALLOWED_EXCEPTIONS, etc. */
353 eh_region ehp_region;
6de9cd9a
DN
354
355 /* Processing of TRY_FINALLY requires a bit more state. This is
356 split out into a separate structure so that we don't have to
357 copy so much when processing other nodes. */
358 struct leh_tf_state *tf;
359};
360
361struct leh_tf_state
362{
726a989a
RB
363 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
364 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
365 this so that outside_finally_tree can reliably reference the tree used
366 in the collect_finally_tree data structures. */
538dd0b7
DM
367 gtry *try_finally_expr;
368 gtry *top_p;
1d65f45c 369
726a989a
RB
370 /* While lowering a top_p usually it is expanded into multiple statements,
371 thus we need the following field to store them. */
372 gimple_seq top_p_seq;
6de9cd9a
DN
373
374 /* The state outside this try_finally node. */
375 struct leh_state *outer;
376
377 /* The exception region created for it. */
1d65f45c 378 eh_region region;
6de9cd9a 379
24b97832
ILT
380 /* The goto queue. */
381 struct goto_queue_node *goto_queue;
6de9cd9a
DN
382 size_t goto_queue_size;
383 size_t goto_queue_active;
384
fa10beec 385 /* Pointer map to help in searching goto_queue when it is large. */
355fe088 386 hash_map<gimple *, goto_queue_node *> *goto_queue_map;
0f547d3d 387
6de9cd9a 388 /* The set of unique labels seen as entries in the goto queue. */
9771b263 389 vec<tree> dest_array;
6de9cd9a
DN
390
391 /* A label to be added at the end of the completed transformed
392 sequence. It will be set if may_fallthru was true *at one time*,
393 though subsequent transformations may have cleared that flag. */
394 tree fallthru_label;
395
6de9cd9a
DN
396 /* True if it is possible to fall out the bottom of the try block.
397 Cleared if the fallthru is converted to a goto. */
398 bool may_fallthru;
399
726a989a 400 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
6de9cd9a
DN
401 bool may_return;
402
403 /* True if the finally block can receive an exception edge.
404 Cleared if the exception case is handled by code duplication. */
405 bool may_throw;
406};
407
538dd0b7 408static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
6de9cd9a 409
6de9cd9a
DN
410/* Search for STMT in the goto queue. Return the replacement,
411 or null if the statement isn't in the queue. */
412
0f547d3d
SE
413#define LARGE_GOTO_QUEUE 20
414
355a7673 415static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
726a989a
RB
416
417static gimple_seq
418find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
6de9cd9a 419{
0f547d3d 420 unsigned int i;
0f547d3d
SE
421
422 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
423 {
424 for (i = 0; i < tf->goto_queue_active; i++)
726a989a 425 if ( tf->goto_queue[i].stmt.g == stmt.g)
0f547d3d
SE
426 return tf->goto_queue[i].repl_stmt;
427 return NULL;
428 }
429
430 /* If we have a large number of entries in the goto_queue, create a
431 pointer map and use that for searching. */
432
433 if (!tf->goto_queue_map)
434 {
355fe088 435 tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>;
0f547d3d
SE
436 for (i = 0; i < tf->goto_queue_active; i++)
437 {
b787e7a2
TS
438 bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
439 &tf->goto_queue[i]);
440 gcc_assert (!existed);
0f547d3d
SE
441 }
442 }
443
b787e7a2 444 goto_queue_node **slot = tf->goto_queue_map->get (stmt.g);
0f547d3d 445 if (slot != NULL)
b787e7a2 446 return ((*slot)->repl_stmt);
0f547d3d
SE
447
448 return NULL;
6de9cd9a
DN
449}
450
451/* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
726a989a 452 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
6de9cd9a 453 then we can just splat it in, otherwise we add the new stmts immediately
726a989a 454 after the GIMPLE_COND and redirect. */
6de9cd9a
DN
455
456static void
457replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
726a989a 458 gimple_stmt_iterator *gsi)
6de9cd9a 459{
726a989a 460 tree label;
82d6e6fc 461 gimple_seq new_seq;
726a989a 462 treemple temp;
c2255bc4 463 location_t loc = gimple_location (gsi_stmt (*gsi));
6de9cd9a 464
726a989a 465 temp.tp = tp;
82d6e6fc
KG
466 new_seq = find_goto_replacement (tf, temp);
467 if (!new_seq)
6de9cd9a
DN
468 return;
469
82d6e6fc
KG
470 if (gimple_seq_singleton_p (new_seq)
471 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
6de9cd9a 472 {
82d6e6fc 473 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
6de9cd9a
DN
474 return;
475 }
476
c2255bc4 477 label = create_artificial_label (loc);
726a989a
RB
478 /* Set the new label for the GIMPLE_COND */
479 *tp = label;
6de9cd9a 480
726a989a 481 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
82d6e6fc 482 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
6de9cd9a
DN
483}
484
19114537 485/* The real work of replace_goto_queue. Returns with TSI updated to
6de9cd9a
DN
486 point to the next statement. */
487
355a7673 488static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
6de9cd9a
DN
489
490static void
355fe088 491replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf,
726a989a 492 gimple_stmt_iterator *gsi)
6de9cd9a 493{
726a989a
RB
494 gimple_seq seq;
495 treemple temp;
496 temp.g = NULL;
497
498 switch (gimple_code (stmt))
6de9cd9a 499 {
726a989a
RB
500 case GIMPLE_GOTO:
501 case GIMPLE_RETURN:
502 temp.g = stmt;
503 seq = find_goto_replacement (tf, temp);
504 if (seq)
6de9cd9a 505 {
726a989a
RB
506 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
507 gsi_remove (gsi, false);
6de9cd9a
DN
508 return;
509 }
510 break;
511
726a989a
RB
512 case GIMPLE_COND:
513 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
514 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
6de9cd9a
DN
515 break;
516
726a989a 517 case GIMPLE_TRY:
355a7673
MM
518 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
519 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
6de9cd9a 520 break;
726a989a 521 case GIMPLE_CATCH:
538dd0b7
DM
522 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
523 as_a <gcatch *> (stmt)),
524 tf);
6de9cd9a 525 break;
726a989a 526 case GIMPLE_EH_FILTER:
355a7673 527 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
6de9cd9a 528 break;
0a35513e 529 case GIMPLE_EH_ELSE:
538dd0b7
DM
530 {
531 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
532 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
533 tf);
534 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
535 tf);
536 }
0a35513e 537 break;
6de9cd9a 538
6de9cd9a
DN
539 default:
540 /* These won't have gotos in them. */
541 break;
542 }
543
726a989a 544 gsi_next (gsi);
6de9cd9a
DN
545}
546
726a989a 547/* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
6de9cd9a
DN
548
549static void
355a7673 550replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
6de9cd9a 551{
355a7673 552 gimple_stmt_iterator gsi = gsi_start (*seq);
726a989a
RB
553
554 while (!gsi_end_p (gsi))
555 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
6de9cd9a
DN
556}
557
558/* Replace all goto queue members. */
559
560static void
561replace_goto_queue (struct leh_tf_state *tf)
562{
8287d24a
EB
563 if (tf->goto_queue_active == 0)
564 return;
355a7673
MM
565 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
566 replace_goto_queue_stmt_list (&eh_seq, tf);
6de9cd9a
DN
567}
568
726a989a
RB
569/* Add a new record to the goto queue contained in TF. NEW_STMT is the
570 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
571 a gimple return. */
6de9cd9a
DN
572
573static void
726a989a
RB
574record_in_goto_queue (struct leh_tf_state *tf,
575 treemple new_stmt,
576 int index,
820055a0
DC
577 bool is_label,
578 location_t location)
6de9cd9a 579{
6de9cd9a 580 size_t active, size;
726a989a 581 struct goto_queue_node *q;
6de9cd9a 582
0f547d3d
SE
583 gcc_assert (!tf->goto_queue_map);
584
6de9cd9a
DN
585 active = tf->goto_queue_active;
586 size = tf->goto_queue_size;
587 if (active >= size)
588 {
589 size = (size ? size * 2 : 32);
590 tf->goto_queue_size = size;
591 tf->goto_queue
858904db 592 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
6de9cd9a
DN
593 }
594
595 q = &tf->goto_queue[active];
596 tf->goto_queue_active = active + 1;
19114537 597
6de9cd9a 598 memset (q, 0, sizeof (*q));
726a989a 599 q->stmt = new_stmt;
6de9cd9a 600 q->index = index;
820055a0 601 q->location = location;
726a989a
RB
602 q->is_label = is_label;
603}
604
605/* Record the LABEL label in the goto queue contained in TF.
606 TF is not null. */
607
608static void
820055a0
DC
609record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
610 location_t location)
726a989a
RB
611{
612 int index;
613 treemple temp, new_stmt;
614
615 if (!label)
616 return;
617
618 /* Computed and non-local gotos do not get processed. Given
619 their nature we can neither tell whether we've escaped the
620 finally block nor redirect them if we knew. */
621 if (TREE_CODE (label) != LABEL_DECL)
622 return;
623
624 /* No need to record gotos that don't leave the try block. */
625 temp.t = label;
626 if (!outside_finally_tree (temp, tf->try_finally_expr))
627 return;
628
9771b263 629 if (! tf->dest_array.exists ())
726a989a 630 {
9771b263
DN
631 tf->dest_array.create (10);
632 tf->dest_array.quick_push (label);
726a989a
RB
633 index = 0;
634 }
635 else
636 {
9771b263 637 int n = tf->dest_array.length ();
726a989a 638 for (index = 0; index < n; ++index)
9771b263 639 if (tf->dest_array[index] == label)
726a989a
RB
640 break;
641 if (index == n)
9771b263 642 tf->dest_array.safe_push (label);
726a989a
RB
643 }
644
645 /* In the case of a GOTO we want to record the destination label,
646 since with a GIMPLE_COND we have an easy access to the then/else
647 labels. */
648 new_stmt = stmt;
820055a0 649 record_in_goto_queue (tf, new_stmt, index, true, location);
726a989a
RB
650}
651
652/* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
653 node, and if so record that fact in the goto queue associated with that
654 try_finally node. */
655
656static void
355fe088 657maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
726a989a
RB
658{
659 struct leh_tf_state *tf = state->tf;
660 treemple new_stmt;
661
662 if (!tf)
663 return;
664
665 switch (gimple_code (stmt))
666 {
667 case GIMPLE_COND:
538dd0b7
DM
668 {
669 gcond *cond_stmt = as_a <gcond *> (stmt);
670 new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
671 record_in_goto_queue_label (tf, new_stmt,
672 gimple_cond_true_label (cond_stmt),
673 EXPR_LOCATION (*new_stmt.tp));
674 new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
675 record_in_goto_queue_label (tf, new_stmt,
676 gimple_cond_false_label (cond_stmt),
677 EXPR_LOCATION (*new_stmt.tp));
678 }
726a989a
RB
679 break;
680 case GIMPLE_GOTO:
681 new_stmt.g = stmt;
820055a0
DC
682 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
683 gimple_location (stmt));
726a989a
RB
684 break;
685
686 case GIMPLE_RETURN:
687 tf->may_return = true;
688 new_stmt.g = stmt;
820055a0 689 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
726a989a
RB
690 break;
691
692 default:
693 gcc_unreachable ();
694 }
6de9cd9a
DN
695}
696
726a989a 697
b2b29377 698#if CHECKING_P
726a989a 699/* We do not process GIMPLE_SWITCHes for now. As long as the original source
6de9cd9a 700 was in fact structured, and we've not yet done jump threading, then none
726a989a 701 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
6de9cd9a
DN
702
703static void
538dd0b7
DM
704verify_norecord_switch_expr (struct leh_state *state,
705 gswitch *switch_expr)
6de9cd9a
DN
706{
707 struct leh_tf_state *tf = state->tf;
708 size_t i, n;
6de9cd9a
DN
709
710 if (!tf)
711 return;
712
726a989a 713 n = gimple_switch_num_labels (switch_expr);
6de9cd9a
DN
714
715 for (i = 0; i < n; ++i)
716 {
726a989a
RB
717 treemple temp;
718 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
719 temp.t = lab;
720 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
6de9cd9a
DN
721 }
722}
723#else
724#define verify_norecord_switch_expr(state, switch_expr)
725#endif
726
8d686507
ILT
727/* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
728 non-null, insert it before the new branch. */
6de9cd9a
DN
729
730static void
8d686507 731do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
6de9cd9a 732{
355fe088 733 gimple *x;
726a989a 734
8d686507 735 /* In the case of a return, the queue node must be a gimple statement. */
726a989a
RB
736 gcc_assert (!q->is_label);
737
8d686507 738 /* Note that the return value may have already been computed, e.g.,
6de9cd9a 739
8d686507
ILT
740 int x;
741 int foo (void)
6de9cd9a 742 {
8d686507
ILT
743 x = 0;
744 try {
745 return x;
746 } finally {
747 x++;
748 }
6de9cd9a 749 }
8d686507
ILT
750
751 should return 0, not 1. We don't have to do anything to make
752 this happens because the return value has been placed in the
753 RESULT_DECL already. */
754
755 q->cont_stmt = q->stmt.g;
726a989a 756
6de9cd9a 757 if (mod)
726a989a 758 gimple_seq_add_seq (&q->repl_stmt, mod);
6de9cd9a 759
726a989a 760 x = gimple_build_goto (finlab);
29f5bccb 761 gimple_set_location (x, q->location);
726a989a 762 gimple_seq_add_stmt (&q->repl_stmt, x);
6de9cd9a
DN
763}
764
726a989a 765/* Similar, but easier, for GIMPLE_GOTO. */
6de9cd9a
DN
766
767static void
726a989a
RB
768do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
769 struct leh_tf_state *tf)
6de9cd9a 770{
538dd0b7 771 ggoto *x;
726a989a
RB
772
773 gcc_assert (q->is_label);
726a989a 774
9771b263 775 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
6de9cd9a 776
6de9cd9a 777 if (mod)
726a989a 778 gimple_seq_add_seq (&q->repl_stmt, mod);
6de9cd9a 779
726a989a 780 x = gimple_build_goto (finlab);
29f5bccb 781 gimple_set_location (x, q->location);
726a989a 782 gimple_seq_add_stmt (&q->repl_stmt, x);
6de9cd9a
DN
783}
784
1d65f45c
RH
785/* Emit a standard landing pad sequence into SEQ for REGION. */
786
787static void
788emit_post_landing_pad (gimple_seq *seq, eh_region region)
789{
790 eh_landing_pad lp = region->landing_pads;
538dd0b7 791 glabel *x;
1d65f45c
RH
792
793 if (lp == NULL)
794 lp = gen_eh_landing_pad (region);
795
796 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
797 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
798
799 x = gimple_build_label (lp->post_landing_pad);
800 gimple_seq_add_stmt (seq, x);
801}
802
803/* Emit a RESX statement into SEQ for REGION. */
804
805static void
806emit_resx (gimple_seq *seq, eh_region region)
807{
538dd0b7 808 gresx *x = gimple_build_resx (region->index);
1d65f45c
RH
809 gimple_seq_add_stmt (seq, x);
810 if (region->outer)
811 record_stmt_eh_region (region->outer, x);
812}
813
814/* Emit an EH_DISPATCH statement into SEQ for REGION. */
815
816static void
817emit_eh_dispatch (gimple_seq *seq, eh_region region)
818{
538dd0b7 819 geh_dispatch *x = gimple_build_eh_dispatch (region->index);
1d65f45c
RH
820 gimple_seq_add_stmt (seq, x);
821}
822
823/* Note that the current EH region may contain a throw, or a
824 call to a function which itself may contain a throw. */
825
826static void
827note_eh_region_may_contain_throw (eh_region region)
828{
fcaa4ca4 829 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
1d65f45c 830 {
6788475a
JJ
831 if (region->type == ERT_MUST_NOT_THROW)
832 break;
1d65f45c
RH
833 region = region->outer;
834 if (region == NULL)
835 break;
836 }
837}
838
b7da9fd4
RH
839/* Check if REGION has been marked as containing a throw. If REGION is
840 NULL, this predicate is false. */
841
842static inline bool
843eh_region_may_contain_throw (eh_region r)
844{
845 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
846}
847
6de9cd9a
DN
848/* We want to transform
849 try { body; } catch { stuff; }
850 to
3b445b24 851 normal_sequence:
1d65f45c
RH
852 body;
853 over:
3b445b24 854 eh_sequence:
1d65f45c
RH
855 landing_pad:
856 stuff;
857 goto over;
858
859 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
6de9cd9a
DN
860 should be placed before the second operand, or NULL. OVER is
861 an existing label that should be put at the exit, or NULL. */
862
726a989a 863static gimple_seq
538dd0b7 864frob_into_branch_around (gtry *tp, eh_region region, tree over)
6de9cd9a 865{
355fe088 866 gimple *x;
726a989a 867 gimple_seq cleanup, result;
c2255bc4 868 location_t loc = gimple_location (tp);
6de9cd9a 869
726a989a
RB
870 cleanup = gimple_try_cleanup (tp);
871 result = gimple_try_eval (tp);
6de9cd9a 872
1d65f45c
RH
873 if (region)
874 emit_post_landing_pad (&eh_seq, region);
875
876 if (gimple_seq_may_fallthru (cleanup))
6de9cd9a
DN
877 {
878 if (!over)
c2255bc4 879 over = create_artificial_label (loc);
726a989a 880 x = gimple_build_goto (over);
29f5bccb 881 gimple_set_location (x, loc);
1d65f45c 882 gimple_seq_add_stmt (&cleanup, x);
6de9cd9a 883 }
1d65f45c 884 gimple_seq_add_seq (&eh_seq, cleanup);
6de9cd9a
DN
885
886 if (over)
887 {
726a989a
RB
888 x = gimple_build_label (over);
889 gimple_seq_add_stmt (&result, x);
6de9cd9a 890 }
726a989a 891 return result;
6de9cd9a
DN
892}
893
894/* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
895 Make sure to record all new labels found. */
896
726a989a 897static gimple_seq
820055a0
DC
898lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
899 location_t loc)
6de9cd9a 900{
538dd0b7 901 gtry *region = NULL;
726a989a 902 gimple_seq new_seq;
820055a0 903 gimple_stmt_iterator gsi;
6de9cd9a 904
726a989a 905 new_seq = copy_gimple_seq_and_replace_locals (seq);
6de9cd9a 906
820055a0 907 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
62d4d60c 908 {
355fe088 909 gimple *stmt = gsi_stmt (gsi);
cc6fbd80
EB
910 /* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating
911 it on the EH paths. When it is not eliminated, make it transparent in
912 the debug info. */
913 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
914 gimple_set_location (stmt, UNKNOWN_LOCATION);
915 else if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
62d4d60c
DC
916 {
917 tree block = gimple_block (stmt);
918 gimple_set_location (stmt, loc);
919 gimple_set_block (stmt, block);
920 }
921 }
820055a0 922
6de9cd9a
DN
923 if (outer_state->tf)
924 region = outer_state->tf->try_finally_expr;
726a989a 925 collect_finally_tree_1 (new_seq, region);
6de9cd9a 926
726a989a 927 return new_seq;
6de9cd9a
DN
928}
929
930/* A subroutine of lower_try_finally. Create a fallthru label for
931 the given try_finally state. The only tricky bit here is that
932 we have to make sure to record the label in our outer context. */
933
934static tree
935lower_try_finally_fallthru_label (struct leh_tf_state *tf)
936{
937 tree label = tf->fallthru_label;
726a989a
RB
938 treemple temp;
939
6de9cd9a
DN
940 if (!label)
941 {
c2255bc4 942 label = create_artificial_label (gimple_location (tf->try_finally_expr));
6de9cd9a
DN
943 tf->fallthru_label = label;
944 if (tf->outer->tf)
726a989a
RB
945 {
946 temp.t = label;
947 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
948 }
6de9cd9a
DN
949 }
950 return label;
951}
952
0a35513e
AH
953/* A subroutine of lower_try_finally. If FINALLY consits of a
954 GIMPLE_EH_ELSE node, return it. */
955
538dd0b7 956static inline geh_else *
0a35513e
AH
957get_eh_else (gimple_seq finally)
958{
355fe088 959 gimple *x = gimple_seq_first_stmt (finally);
0a35513e
AH
960 if (gimple_code (x) == GIMPLE_EH_ELSE)
961 {
962 gcc_assert (gimple_seq_singleton_p (finally));
538dd0b7 963 return as_a <geh_else *> (x);
0a35513e
AH
964 }
965 return NULL;
966}
967
3b06d379
SB
968/* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
969 langhook returns non-null, then the language requires that the exception
970 path out of a try_finally be treated specially. To wit: the code within
971 the finally block may not itself throw an exception. We have two choices
972 here. First we can duplicate the finally block and wrap it in a
973 must_not_throw region. Second, we can generate code like
6de9cd9a
DN
974
975 try {
976 finally_block;
977 } catch {
978 if (fintmp == eh_edge)
979 protect_cleanup_actions;
980 }
981
982 where "fintmp" is the temporary used in the switch statement generation
983 alternative considered below. For the nonce, we always choose the first
19114537 984 option.
6de9cd9a 985
3f117656 986 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
6de9cd9a
DN
987
988static void
989honor_protect_cleanup_actions (struct leh_state *outer_state,
990 struct leh_state *this_state,
991 struct leh_tf_state *tf)
992{
b5c4bc31 993 gimple_seq finally = gimple_try_cleanup (tf->top_p);
6de9cd9a 994
b5c4bc31
RH
995 /* EH_ELSE doesn't come from user code; only compiler generated stuff.
996 It does need to be handled here, so as to separate the (different)
997 EH path from the normal path. But we should not attempt to wrap
998 it with a must-not-throw node (which indeed gets in the way). */
999 if (geh_else *eh_else = get_eh_else (finally))
0a35513e 1000 {
0a35513e 1001 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
b5c4bc31
RH
1002 finally = gimple_eh_else_e_body (eh_else);
1003
1004 /* Let the ELSE see the exception that's being processed. */
1005 eh_region save_ehp = this_state->ehp_region;
1006 this_state->ehp_region = this_state->cur_region;
1007 lower_eh_constructs_1 (this_state, &finally);
1008 this_state->ehp_region = save_ehp;
0a35513e 1009 }
b5c4bc31 1010 else
33b45227 1011 {
b5c4bc31
RH
1012 /* First check for nothing to do. */
1013 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1014 return;
1015 tree actions = lang_hooks.eh_protect_cleanup_actions ();
1016 if (actions == NULL)
1017 return;
1018
1019 if (this_state)
1020 finally = lower_try_finally_dup_block (finally, outer_state,
1021 gimple_location (tf->try_finally_expr));
1022
1023 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1024 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1025 to be in an enclosing scope, but needs to be implemented at this level
1026 to avoid a nesting violation (see wrap_temporary_cleanups in
1027 cp/decl.c). Since it's logically at an outer level, we should call
1028 terminate before we get to it, so strip it away before adding the
1029 MUST_NOT_THROW filter. */
1030 gimple_stmt_iterator gsi = gsi_start (finally);
1031 gimple *x = gsi_stmt (gsi);
1032 if (gimple_code (x) == GIMPLE_TRY
1033 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1034 && gimple_try_catch_is_cleanup (x))
1035 {
1036 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1037 gsi_remove (&gsi, false);
1038 }
33b45227 1039
b5c4bc31
RH
1040 /* Wrap the block with protect_cleanup_actions as the action. */
1041 geh_mnt *eh_mnt = gimple_build_eh_must_not_throw (actions);
1042 gtry *try_stmt = gimple_build_try (finally,
1043 gimple_seq_alloc_with_stmt (eh_mnt),
1044 GIMPLE_TRY_CATCH);
1045 finally = lower_eh_must_not_throw (outer_state, try_stmt);
1046 }
1d65f45c
RH
1047
1048 /* Drop all of this into the exception sequence. */
1049 emit_post_landing_pad (&eh_seq, tf->region);
1050 gimple_seq_add_seq (&eh_seq, finally);
b5c4bc31 1051 if (gimple_seq_may_fallthru (finally))
1d65f45c 1052 emit_resx (&eh_seq, tf->region);
6de9cd9a
DN
1053
1054 /* Having now been handled, EH isn't to be considered with
1055 the rest of the outgoing edges. */
1056 tf->may_throw = false;
1057}
1058
1059/* A subroutine of lower_try_finally. We have determined that there is
1060 no fallthru edge out of the finally block. This means that there is
1061 no outgoing edge corresponding to any incoming edge. Restructure the
1062 try_finally node for this special case. */
1063
1064static void
726a989a
RB
1065lower_try_finally_nofallthru (struct leh_state *state,
1066 struct leh_tf_state *tf)
6de9cd9a 1067{
8d686507 1068 tree lab;
355fe088 1069 gimple *x;
538dd0b7 1070 geh_else *eh_else;
726a989a 1071 gimple_seq finally;
6de9cd9a
DN
1072 struct goto_queue_node *q, *qe;
1073
1d65f45c 1074 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
6de9cd9a 1075
726a989a
RB
1076 /* We expect that tf->top_p is a GIMPLE_TRY. */
1077 finally = gimple_try_cleanup (tf->top_p);
1078 tf->top_p_seq = gimple_try_eval (tf->top_p);
6de9cd9a 1079
726a989a
RB
1080 x = gimple_build_label (lab);
1081 gimple_seq_add_stmt (&tf->top_p_seq, x);
6de9cd9a 1082
6de9cd9a
DN
1083 q = tf->goto_queue;
1084 qe = q + tf->goto_queue_active;
1085 for (; q < qe; ++q)
1086 if (q->index < 0)
8d686507 1087 do_return_redirection (q, lab, NULL);
6de9cd9a 1088 else
726a989a 1089 do_goto_redirection (q, lab, NULL, tf);
6de9cd9a
DN
1090
1091 replace_goto_queue (tf);
1092
0a35513e
AH
1093 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1094 eh_else = get_eh_else (finally);
1095 if (eh_else)
1096 {
1097 finally = gimple_eh_else_n_body (eh_else);
355a7673 1098 lower_eh_constructs_1 (state, &finally);
0a35513e 1099 gimple_seq_add_seq (&tf->top_p_seq, finally);
1d65f45c 1100
0a35513e
AH
1101 if (tf->may_throw)
1102 {
1103 finally = gimple_eh_else_e_body (eh_else);
355a7673 1104 lower_eh_constructs_1 (state, &finally);
0a35513e
AH
1105
1106 emit_post_landing_pad (&eh_seq, tf->region);
1107 gimple_seq_add_seq (&eh_seq, finally);
1108 }
1109 }
1110 else
1d65f45c 1111 {
355a7673 1112 lower_eh_constructs_1 (state, &finally);
0a35513e 1113 gimple_seq_add_seq (&tf->top_p_seq, finally);
1d65f45c 1114
0a35513e
AH
1115 if (tf->may_throw)
1116 {
1117 emit_post_landing_pad (&eh_seq, tf->region);
1118
1119 x = gimple_build_goto (lab);
29f5bccb 1120 gimple_set_location (x, gimple_location (tf->try_finally_expr));
0a35513e
AH
1121 gimple_seq_add_stmt (&eh_seq, x);
1122 }
1d65f45c 1123 }
6de9cd9a
DN
1124}
1125
1126/* A subroutine of lower_try_finally. We have determined that there is
1127 exactly one destination of the finally block. Restructure the
1128 try_finally node for this special case. */
1129
1130static void
1131lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1132{
1133 struct goto_queue_node *q, *qe;
538dd0b7
DM
1134 geh_else *eh_else;
1135 glabel *label_stmt;
355fe088 1136 gimple *x;
726a989a 1137 gimple_seq finally;
e368f44f 1138 gimple_stmt_iterator gsi;
726a989a 1139 tree finally_label;
c2255bc4 1140 location_t loc = gimple_location (tf->try_finally_expr);
6de9cd9a 1141
726a989a
RB
1142 finally = gimple_try_cleanup (tf->top_p);
1143 tf->top_p_seq = gimple_try_eval (tf->top_p);
6de9cd9a 1144
0a35513e
AH
1145 /* Since there's only one destination, and the destination edge can only
1146 either be EH or non-EH, that implies that all of our incoming edges
1147 are of the same type. Therefore we can lower EH_ELSE immediately. */
538dd0b7
DM
1148 eh_else = get_eh_else (finally);
1149 if (eh_else)
0a35513e
AH
1150 {
1151 if (tf->may_throw)
538dd0b7 1152 finally = gimple_eh_else_e_body (eh_else);
0a35513e 1153 else
538dd0b7 1154 finally = gimple_eh_else_n_body (eh_else);
0a35513e
AH
1155 }
1156
355a7673 1157 lower_eh_constructs_1 (state, &finally);
6de9cd9a 1158
e368f44f
DC
1159 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1160 {
355fe088 1161 gimple *stmt = gsi_stmt (gsi);
e368f44f
DC
1162 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1163 {
1164 tree block = gimple_block (stmt);
1165 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1166 gimple_set_block (stmt, block);
1167 }
1168 }
1169
6de9cd9a
DN
1170 if (tf->may_throw)
1171 {
1172 /* Only reachable via the exception edge. Add the given label to
1173 the head of the FINALLY block. Append a RESX at the end. */
1d65f45c
RH
1174 emit_post_landing_pad (&eh_seq, tf->region);
1175 gimple_seq_add_seq (&eh_seq, finally);
1176 emit_resx (&eh_seq, tf->region);
6de9cd9a
DN
1177 return;
1178 }
1179
1180 if (tf->may_fallthru)
1181 {
1182 /* Only reachable via the fallthru edge. Do nothing but let
1183 the two blocks run together; we'll fall out the bottom. */
726a989a 1184 gimple_seq_add_seq (&tf->top_p_seq, finally);
6de9cd9a
DN
1185 return;
1186 }
1187
c2255bc4 1188 finally_label = create_artificial_label (loc);
538dd0b7
DM
1189 label_stmt = gimple_build_label (finally_label);
1190 gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
6de9cd9a 1191
726a989a 1192 gimple_seq_add_seq (&tf->top_p_seq, finally);
6de9cd9a
DN
1193
1194 q = tf->goto_queue;
1195 qe = q + tf->goto_queue_active;
1196
1197 if (tf->may_return)
1198 {
1199 /* Reachable by return expressions only. Redirect them. */
6de9cd9a 1200 for (; q < qe; ++q)
8d686507 1201 do_return_redirection (q, finally_label, NULL);
6de9cd9a
DN
1202 replace_goto_queue (tf);
1203 }
1204 else
1205 {
1206 /* Reachable by goto expressions only. Redirect them. */
1207 for (; q < qe; ++q)
726a989a 1208 do_goto_redirection (q, finally_label, NULL, tf);
6de9cd9a 1209 replace_goto_queue (tf);
19114537 1210
9771b263 1211 if (tf->dest_array[0] == tf->fallthru_label)
6de9cd9a
DN
1212 {
1213 /* Reachable by goto to fallthru label only. Redirect it
1214 to the new label (already created, sadly), and do not
1215 emit the final branch out, or the fallthru label. */
1216 tf->fallthru_label = NULL;
1217 return;
1218 }
1219 }
1220
726a989a
RB
1221 /* Place the original return/goto to the original destination
1222 immediately after the finally block. */
1223 x = tf->goto_queue[0].cont_stmt;
1224 gimple_seq_add_stmt (&tf->top_p_seq, x);
1225 maybe_record_in_goto_queue (state, x);
6de9cd9a
DN
1226}
1227
1228/* A subroutine of lower_try_finally. There are multiple edges incoming
1229 and outgoing from the finally block. Implement this by duplicating the
1230 finally block for every destination. */
1231
1232static void
1233lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1234{
726a989a
RB
1235 gimple_seq finally;
1236 gimple_seq new_stmt;
1237 gimple_seq seq;
355fe088 1238 gimple *x;
538dd0b7 1239 geh_else *eh_else;
726a989a 1240 tree tmp;
c2255bc4 1241 location_t tf_loc = gimple_location (tf->try_finally_expr);
6de9cd9a 1242
726a989a 1243 finally = gimple_try_cleanup (tf->top_p);
0a35513e
AH
1244
1245 /* Notice EH_ELSE, and simplify some of the remaining code
1246 by considering FINALLY to be the normal return path only. */
1247 eh_else = get_eh_else (finally);
1248 if (eh_else)
1249 finally = gimple_eh_else_n_body (eh_else);
1250
726a989a
RB
1251 tf->top_p_seq = gimple_try_eval (tf->top_p);
1252 new_stmt = NULL;
6de9cd9a
DN
1253
1254 if (tf->may_fallthru)
1255 {
820055a0 1256 seq = lower_try_finally_dup_block (finally, state, tf_loc);
355a7673 1257 lower_eh_constructs_1 (state, &seq);
726a989a 1258 gimple_seq_add_seq (&new_stmt, seq);
6de9cd9a 1259
726a989a
RB
1260 tmp = lower_try_finally_fallthru_label (tf);
1261 x = gimple_build_goto (tmp);
29f5bccb 1262 gimple_set_location (x, tf_loc);
726a989a 1263 gimple_seq_add_stmt (&new_stmt, x);
6de9cd9a
DN
1264 }
1265
1266 if (tf->may_throw)
1267 {
0a35513e
AH
1268 /* We don't need to copy the EH path of EH_ELSE,
1269 since it is only emitted once. */
1270 if (eh_else)
1271 seq = gimple_eh_else_e_body (eh_else);
1272 else
820055a0 1273 seq = lower_try_finally_dup_block (finally, state, tf_loc);
355a7673 1274 lower_eh_constructs_1 (state, &seq);
6de9cd9a 1275
288f5b2e
RH
1276 emit_post_landing_pad (&eh_seq, tf->region);
1277 gimple_seq_add_seq (&eh_seq, seq);
1d65f45c 1278 emit_resx (&eh_seq, tf->region);
6de9cd9a
DN
1279 }
1280
1281 if (tf->goto_queue)
1282 {
1283 struct goto_queue_node *q, *qe;
dd58eb5a 1284 int return_index, index;
858904db 1285 struct labels_s
dd58eb5a
AO
1286 {
1287 struct goto_queue_node *q;
1288 tree label;
1289 } *labels;
6de9cd9a 1290
9771b263 1291 return_index = tf->dest_array.length ();
858904db 1292 labels = XCNEWVEC (struct labels_s, return_index + 1);
6de9cd9a
DN
1293
1294 q = tf->goto_queue;
1295 qe = q + tf->goto_queue_active;
1296 for (; q < qe; q++)
1297 {
dd58eb5a
AO
1298 index = q->index < 0 ? return_index : q->index;
1299
1300 if (!labels[index].q)
1301 labels[index].q = q;
1302 }
1303
1304 for (index = 0; index < return_index + 1; index++)
1305 {
1306 tree lab;
1307
1308 q = labels[index].q;
1309 if (! q)
1310 continue;
1311
c2255bc4
AH
1312 lab = labels[index].label
1313 = create_artificial_label (tf_loc);
6de9cd9a
DN
1314
1315 if (index == return_index)
8d686507 1316 do_return_redirection (q, lab, NULL);
6de9cd9a 1317 else
726a989a 1318 do_goto_redirection (q, lab, NULL, tf);
6de9cd9a 1319
726a989a
RB
1320 x = gimple_build_label (lab);
1321 gimple_seq_add_stmt (&new_stmt, x);
6de9cd9a 1322
820055a0 1323 seq = lower_try_finally_dup_block (finally, state, q->location);
355a7673 1324 lower_eh_constructs_1 (state, &seq);
726a989a 1325 gimple_seq_add_seq (&new_stmt, seq);
6de9cd9a 1326
726a989a 1327 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
dd58eb5a 1328 maybe_record_in_goto_queue (state, q->cont_stmt);
6de9cd9a 1329 }
dd58eb5a
AO
1330
1331 for (q = tf->goto_queue; q < qe; q++)
1332 {
1333 tree lab;
1334
1335 index = q->index < 0 ? return_index : q->index;
1336
1337 if (labels[index].q == q)
1338 continue;
1339
1340 lab = labels[index].label;
1341
1342 if (index == return_index)
8d686507 1343 do_return_redirection (q, lab, NULL);
dd58eb5a 1344 else
726a989a 1345 do_goto_redirection (q, lab, NULL, tf);
dd58eb5a 1346 }
1d65f45c 1347
6de9cd9a
DN
1348 replace_goto_queue (tf);
1349 free (labels);
1350 }
1351
1352 /* Need to link new stmts after running replace_goto_queue due
1353 to not wanting to process the same goto stmts twice. */
726a989a 1354 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
6de9cd9a
DN
1355}
1356
1357/* A subroutine of lower_try_finally. There are multiple edges incoming
1358 and outgoing from the finally block. Implement this by instrumenting
1359 each incoming edge and creating a switch statement at the end of the
1360 finally block that branches to the appropriate destination. */
1361
1362static void
1363lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1364{
1365 struct goto_queue_node *q, *qe;
726a989a 1366 tree finally_tmp, finally_label;
6de9cd9a
DN
1367 int return_index, eh_index, fallthru_index;
1368 int nlabels, ndests, j, last_case_index;
726a989a 1369 tree last_case;
af121e82 1370 auto_vec<tree> case_label_vec;
355a7673 1371 gimple_seq switch_body = NULL;
355fe088 1372 gimple *x;
538dd0b7 1373 geh_else *eh_else;
726a989a 1374 tree tmp;
355fe088 1375 gimple *switch_stmt;
726a989a 1376 gimple_seq finally;
355fe088 1377 hash_map<tree, gimple *> *cont_map = NULL;
c2255bc4 1378 /* The location of the TRY_FINALLY stmt. */
d40eb158 1379 location_t tf_loc = gimple_location (tf->try_finally_expr);
c2255bc4
AH
1380 /* The location of the finally block. */
1381 location_t finally_loc;
726a989a 1382
0a35513e
AH
1383 finally = gimple_try_cleanup (tf->top_p);
1384 eh_else = get_eh_else (finally);
6de9cd9a
DN
1385
1386 /* Mash the TRY block to the head of the chain. */
726a989a 1387 tf->top_p_seq = gimple_try_eval (tf->top_p);
6de9cd9a 1388
c2255bc4
AH
1389 /* The location of the finally is either the last stmt in the finally
1390 block or the location of the TRY_FINALLY itself. */
0118b919
EB
1391 x = gimple_seq_last_stmt (finally);
1392 finally_loc = x ? gimple_location (x) : tf_loc;
c2255bc4 1393
6de9cd9a 1394 /* Prepare for switch statement generation. */
9771b263 1395 nlabels = tf->dest_array.length ();
6de9cd9a
DN
1396 return_index = nlabels;
1397 eh_index = return_index + tf->may_return;
0a35513e 1398 fallthru_index = eh_index + (tf->may_throw && !eh_else);
6de9cd9a
DN
1399 ndests = fallthru_index + tf->may_fallthru;
1400
1401 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
c2255bc4 1402 finally_label = create_artificial_label (finally_loc);
6de9cd9a 1403
9771b263 1404 /* We use vec::quick_push on case_label_vec throughout this function,
726a989a
RB
1405 since we know the size in advance and allocate precisely as muce
1406 space as needed. */
9771b263 1407 case_label_vec.create (ndests);
6de9cd9a
DN
1408 last_case = NULL;
1409 last_case_index = 0;
1410
1411 /* Begin inserting code for getting to the finally block. Things
1412 are done in this order to correspond to the sequence the code is
073a8998 1413 laid out. */
6de9cd9a
DN
1414
1415 if (tf->may_fallthru)
1416 {
1d65f45c 1417 x = gimple_build_assign (finally_tmp,
413581ba
RG
1418 build_int_cst (integer_type_node,
1419 fallthru_index));
ba0cecd6 1420 gimple_set_location (x, finally_loc);
726a989a 1421 gimple_seq_add_stmt (&tf->top_p_seq, x);
6de9cd9a 1422
3d528853
NF
1423 tmp = build_int_cst (integer_type_node, fallthru_index);
1424 last_case = build_case_label (tmp, NULL,
ba0cecd6 1425 create_artificial_label (finally_loc));
9771b263 1426 case_label_vec.quick_push (last_case);
6de9cd9a
DN
1427 last_case_index++;
1428
726a989a
RB
1429 x = gimple_build_label (CASE_LABEL (last_case));
1430 gimple_seq_add_stmt (&switch_body, x);
6de9cd9a 1431
726a989a
RB
1432 tmp = lower_try_finally_fallthru_label (tf);
1433 x = gimple_build_goto (tmp);
ba0cecd6 1434 gimple_set_location (x, finally_loc);
726a989a 1435 gimple_seq_add_stmt (&switch_body, x);
6de9cd9a
DN
1436 }
1437
0a35513e
AH
1438 /* For EH_ELSE, emit the exception path (plus resx) now, then
1439 subsequently we only need consider the normal path. */
1440 if (eh_else)
1441 {
1442 if (tf->may_throw)
1443 {
1444 finally = gimple_eh_else_e_body (eh_else);
355a7673 1445 lower_eh_constructs_1 (state, &finally);
0a35513e
AH
1446
1447 emit_post_landing_pad (&eh_seq, tf->region);
1448 gimple_seq_add_seq (&eh_seq, finally);
1449 emit_resx (&eh_seq, tf->region);
1450 }
1451
1452 finally = gimple_eh_else_n_body (eh_else);
1453 }
1454 else if (tf->may_throw)
6de9cd9a 1455 {
1d65f45c 1456 emit_post_landing_pad (&eh_seq, tf->region);
6de9cd9a 1457
1d65f45c 1458 x = gimple_build_assign (finally_tmp,
413581ba 1459 build_int_cst (integer_type_node, eh_index));
1d65f45c
RH
1460 gimple_seq_add_stmt (&eh_seq, x);
1461
1462 x = gimple_build_goto (finally_label);
29f5bccb 1463 gimple_set_location (x, tf_loc);
1d65f45c 1464 gimple_seq_add_stmt (&eh_seq, x);
6de9cd9a 1465
3d528853
NF
1466 tmp = build_int_cst (integer_type_node, eh_index);
1467 last_case = build_case_label (tmp, NULL,
1468 create_artificial_label (tf_loc));
9771b263 1469 case_label_vec.quick_push (last_case);
6de9cd9a
DN
1470 last_case_index++;
1471
726a989a 1472 x = gimple_build_label (CASE_LABEL (last_case));
1d65f45c
RH
1473 gimple_seq_add_stmt (&eh_seq, x);
1474 emit_resx (&eh_seq, tf->region);
6de9cd9a
DN
1475 }
1476
726a989a
RB
1477 x = gimple_build_label (finally_label);
1478 gimple_seq_add_stmt (&tf->top_p_seq, x);
6de9cd9a 1479
efa7882f 1480 lower_eh_constructs_1 (state, &finally);
726a989a 1481 gimple_seq_add_seq (&tf->top_p_seq, finally);
6de9cd9a
DN
1482
1483 /* Redirect each incoming goto edge. */
1484 q = tf->goto_queue;
1485 qe = q + tf->goto_queue_active;
1486 j = last_case_index + tf->may_return;
726a989a
RB
1487 /* Prepare the assignments to finally_tmp that are executed upon the
1488 entrance through a particular edge. */
6de9cd9a
DN
1489 for (; q < qe; ++q)
1490 {
355a7673 1491 gimple_seq mod = NULL;
726a989a
RB
1492 int switch_id;
1493 unsigned int case_index;
1494
6de9cd9a
DN
1495 if (q->index < 0)
1496 {
726a989a 1497 x = gimple_build_assign (finally_tmp,
413581ba
RG
1498 build_int_cst (integer_type_node,
1499 return_index));
726a989a 1500 gimple_seq_add_stmt (&mod, x);
8d686507 1501 do_return_redirection (q, finally_label, mod);
6de9cd9a
DN
1502 switch_id = return_index;
1503 }
1504 else
1505 {
726a989a 1506 x = gimple_build_assign (finally_tmp,
413581ba 1507 build_int_cst (integer_type_node, q->index));
726a989a
RB
1508 gimple_seq_add_stmt (&mod, x);
1509 do_goto_redirection (q, finally_label, mod, tf);
6de9cd9a
DN
1510 switch_id = q->index;
1511 }
1512
1513 case_index = j + q->index;
9771b263 1514 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
726a989a
RB
1515 {
1516 tree case_lab;
3d528853
NF
1517 tmp = build_int_cst (integer_type_node, switch_id);
1518 case_lab = build_case_label (tmp, NULL,
1519 create_artificial_label (tf_loc));
726a989a 1520 /* We store the cont_stmt in the pointer map, so that we can recover
ffa03772 1521 it in the loop below. */
726a989a 1522 if (!cont_map)
355fe088 1523 cont_map = new hash_map<tree, gimple *>;
b787e7a2 1524 cont_map->put (case_lab, q->cont_stmt);
9771b263 1525 case_label_vec.quick_push (case_lab);
726a989a 1526 }
dd58eb5a
AO
1527 }
1528 for (j = last_case_index; j < last_case_index + nlabels; j++)
1529 {
355fe088 1530 gimple *cont_stmt;
dd58eb5a 1531
9771b263 1532 last_case = case_label_vec[j];
dd58eb5a
AO
1533
1534 gcc_assert (last_case);
726a989a 1535 gcc_assert (cont_map);
dd58eb5a 1536
b787e7a2 1537 cont_stmt = *cont_map->get (last_case);
dd58eb5a 1538
ffa03772 1539 x = gimple_build_label (CASE_LABEL (last_case));
726a989a
RB
1540 gimple_seq_add_stmt (&switch_body, x);
1541 gimple_seq_add_stmt (&switch_body, cont_stmt);
dd58eb5a 1542 maybe_record_in_goto_queue (state, cont_stmt);
6de9cd9a 1543 }
726a989a 1544 if (cont_map)
b787e7a2 1545 delete cont_map;
726a989a 1546
6de9cd9a 1547 replace_goto_queue (tf);
6de9cd9a 1548
0f1f6967
SB
1549 /* Make sure that the last case is the default label, as one is required.
1550 Then sort the labels, which is also required in GIMPLE. */
6de9cd9a 1551 CASE_LOW (last_case) = NULL;
e9ff9caf
RB
1552 tree tem = case_label_vec.pop ();
1553 gcc_assert (tem == last_case);
0f1f6967 1554 sort_case_labels (case_label_vec);
6de9cd9a 1555
726a989a
RB
1556 /* Build the switch statement, setting last_case to be the default
1557 label. */
fd8d363e
SB
1558 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1559 case_label_vec);
c2255bc4 1560 gimple_set_location (switch_stmt, finally_loc);
726a989a
RB
1561
1562 /* Need to link SWITCH_STMT after running replace_goto_queue
1563 due to not wanting to process the same goto stmts twice. */
1564 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1565 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
6de9cd9a
DN
1566}
1567
1568/* Decide whether or not we are going to duplicate the finally block.
1569 There are several considerations.
1570
6de9cd9a
DN
1571 Second, we'd like to prevent egregious code growth. One way to
1572 do this is to estimate the size of the finally block, multiply
1573 that by the number of copies we'd need to make, and compare against
1574 the estimate of the size of the switch machinery we'd have to add. */
1575
1576static bool
0a35513e 1577decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
6de9cd9a
DN
1578{
1579 int f_estimate, sw_estimate;
538dd0b7 1580 geh_else *eh_else;
0a35513e
AH
1581
1582 /* If there's an EH_ELSE involved, the exception path is separate
1583 and really doesn't come into play for this computation. */
1584 eh_else = get_eh_else (finally);
1585 if (eh_else)
1586 {
1587 ndests -= may_throw;
1588 finally = gimple_eh_else_n_body (eh_else);
1589 }
6de9cd9a
DN
1590
1591 if (!optimize)
bccc50d4
JJ
1592 {
1593 gimple_stmt_iterator gsi;
1594
1595 if (ndests == 1)
1596 return true;
1597
1598 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1599 {
cc6fbd80
EB
1600 /* Duplicate __builtin_stack_restore in the hope of eliminating it
1601 on the EH paths and, consequently, useless cleanups. */
355fe088 1602 gimple *stmt = gsi_stmt (gsi);
cc6fbd80
EB
1603 if (!is_gimple_debug (stmt)
1604 && !gimple_clobber_p (stmt)
1605 && !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
bccc50d4
JJ
1606 return false;
1607 }
1608 return true;
1609 }
6de9cd9a
DN
1610
1611 /* Finally estimate N times, plus N gotos. */
fcb901cd 1612 f_estimate = estimate_num_insns_seq (finally, &eni_size_weights);
6de9cd9a
DN
1613 f_estimate = (f_estimate + 1) * ndests;
1614
1615 /* Switch statement (cost 10), N variable assignments, N gotos. */
1616 sw_estimate = 10 + 2 * ndests;
1617
1618 /* Optimize for size clearly wants our best guess. */
efd8f750 1619 if (optimize_function_for_size_p (cfun))
6de9cd9a
DN
1620 return f_estimate < sw_estimate;
1621
1622 /* ??? These numbers are completely made up so far. */
1623 if (optimize > 1)
7465ed07 1624 return f_estimate < 100 || f_estimate < sw_estimate * 2;
6de9cd9a 1625 else
7465ed07 1626 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
6de9cd9a
DN
1627}
1628
d3f28910
JM
1629/* REG is the enclosing region for a possible cleanup region, or the region
1630 itself. Returns TRUE if such a region would be unreachable.
1631
1632 Cleanup regions within a must-not-throw region aren't actually reachable
1633 even if there are throwing stmts within them, because the personality
1634 routine will call terminate before unwinding. */
1635
1636static bool
1637cleanup_is_dead_in (eh_region reg)
1638{
1639 while (reg && reg->type == ERT_CLEANUP)
1640 reg = reg->outer;
1641 return (reg && reg->type == ERT_MUST_NOT_THROW);
1642}
726a989a
RB
1643
1644/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
6de9cd9a 1645 to a sequence of labels and blocks, plus the exception region trees
19114537 1646 that record all the magic. This is complicated by the need to
6de9cd9a
DN
1647 arrange for the FINALLY block to be executed on all exits. */
1648
726a989a 1649static gimple_seq
538dd0b7 1650lower_try_finally (struct leh_state *state, gtry *tp)
6de9cd9a
DN
1651{
1652 struct leh_tf_state this_tf;
1653 struct leh_state this_state;
1654 int ndests;
e19d1f06 1655 gimple_seq old_eh_seq;
6de9cd9a
DN
1656
1657 /* Process the try block. */
1658
1659 memset (&this_tf, 0, sizeof (this_tf));
726a989a 1660 this_tf.try_finally_expr = tp;
6de9cd9a
DN
1661 this_tf.top_p = tp;
1662 this_tf.outer = state;
481d1b81 1663 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
d3f28910
JM
1664 {
1665 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1666 this_state.cur_region = this_tf.region;
1667 }
6de9cd9a 1668 else
d3f28910
JM
1669 {
1670 this_tf.region = NULL;
1671 this_state.cur_region = state->cur_region;
1672 }
6de9cd9a 1673
1d65f45c 1674 this_state.ehp_region = state->ehp_region;
6de9cd9a
DN
1675 this_state.tf = &this_tf;
1676
e19d1f06
RH
1677 old_eh_seq = eh_seq;
1678 eh_seq = NULL;
1679
355a7673 1680 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
6de9cd9a
DN
1681
1682 /* Determine if the try block is escaped through the bottom. */
726a989a 1683 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
6de9cd9a
DN
1684
1685 /* Determine if any exceptions are possible within the try block. */
d3f28910 1686 if (this_tf.region)
b7da9fd4 1687 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
6de9cd9a 1688 if (this_tf.may_throw)
1d65f45c 1689 honor_protect_cleanup_actions (state, &this_state, &this_tf);
6de9cd9a 1690
6de9cd9a
DN
1691 /* Determine how many edges (still) reach the finally block. Or rather,
1692 how many destinations are reached by the finally block. Use this to
1693 determine how we process the finally block itself. */
1694
9771b263 1695 ndests = this_tf.dest_array.length ();
6de9cd9a
DN
1696 ndests += this_tf.may_fallthru;
1697 ndests += this_tf.may_return;
1698 ndests += this_tf.may_throw;
1699
1700 /* If the FINALLY block is not reachable, dike it out. */
1701 if (ndests == 0)
726a989a
RB
1702 {
1703 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1704 gimple_try_set_cleanup (tp, NULL);
1705 }
6de9cd9a
DN
1706 /* If the finally block doesn't fall through, then any destination
1707 we might try to impose there isn't reached either. There may be
1708 some minor amount of cleanup and redirection still needed. */
726a989a 1709 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
6de9cd9a
DN
1710 lower_try_finally_nofallthru (state, &this_tf);
1711
1712 /* We can easily special-case redirection to a single destination. */
1713 else if (ndests == 1)
1714 lower_try_finally_onedest (state, &this_tf);
0a35513e
AH
1715 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1716 gimple_try_cleanup (tp)))
6de9cd9a
DN
1717 lower_try_finally_copy (state, &this_tf);
1718 else
1719 lower_try_finally_switch (state, &this_tf);
1720
1721 /* If someone requested we add a label at the end of the transformed
1722 block, do so. */
1723 if (this_tf.fallthru_label)
1724 {
726a989a 1725 /* This must be reached only if ndests == 0. */
355fe088 1726 gimple *x = gimple_build_label (this_tf.fallthru_label);
726a989a 1727 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
6de9cd9a
DN
1728 }
1729
9771b263 1730 this_tf.dest_array.release ();
04695783 1731 free (this_tf.goto_queue);
0f547d3d 1732 if (this_tf.goto_queue_map)
b787e7a2 1733 delete this_tf.goto_queue_map;
726a989a 1734
e19d1f06
RH
1735 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1736 If there was no old eh_seq, then the append is trivially already done. */
1737 if (old_eh_seq)
1738 {
1739 if (eh_seq == NULL)
1740 eh_seq = old_eh_seq;
1741 else
1742 {
1743 gimple_seq new_eh_seq = eh_seq;
1744 eh_seq = old_eh_seq;
c3284718 1745 gimple_seq_add_seq (&eh_seq, new_eh_seq);
e19d1f06
RH
1746 }
1747 }
1748
726a989a 1749 return this_tf.top_p_seq;
6de9cd9a
DN
1750}
1751
726a989a
RB
1752/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1753 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1754 exception region trees that records all the magic. */
6de9cd9a 1755
726a989a 1756static gimple_seq
538dd0b7 1757lower_catch (struct leh_state *state, gtry *tp)
6de9cd9a 1758{
b7da9fd4
RH
1759 eh_region try_region = NULL;
1760 struct leh_state this_state = *state;
726a989a 1761 gimple_stmt_iterator gsi;
6de9cd9a 1762 tree out_label;
355a7673 1763 gimple_seq new_seq, cleanup;
355fe088 1764 gimple *x;
c2255bc4 1765 location_t try_catch_loc = gimple_location (tp);
6de9cd9a 1766
b7da9fd4
RH
1767 if (flag_exceptions)
1768 {
1769 try_region = gen_eh_region_try (state->cur_region);
1770 this_state.cur_region = try_region;
1771 }
6de9cd9a 1772
355a7673 1773 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
6de9cd9a 1774
b7da9fd4 1775 if (!eh_region_may_contain_throw (try_region))
1d65f45c
RH
1776 return gimple_try_eval (tp);
1777
1778 new_seq = NULL;
1779 emit_eh_dispatch (&new_seq, try_region);
1780 emit_resx (&new_seq, try_region);
1781
1782 this_state.cur_region = state->cur_region;
1783 this_state.ehp_region = try_region;
6de9cd9a 1784
3b445b24
JJ
1785 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1786 itself, so that e.g. for coverage purposes the nested cleanups don't
1787 appear before the cleanup body. See PR64634 for details. */
1788 gimple_seq old_eh_seq = eh_seq;
1789 eh_seq = NULL;
1790
6de9cd9a 1791 out_label = NULL;
355a7673
MM
1792 cleanup = gimple_try_cleanup (tp);
1793 for (gsi = gsi_start (cleanup);
1d65f45c
RH
1794 !gsi_end_p (gsi);
1795 gsi_next (&gsi))
6de9cd9a 1796 {
1d65f45c 1797 eh_catch c;
538dd0b7 1798 gcatch *catch_stmt;
1d65f45c 1799 gimple_seq handler;
6de9cd9a 1800
538dd0b7
DM
1801 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1802 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
6de9cd9a 1803
538dd0b7 1804 handler = gimple_catch_handler (catch_stmt);
355a7673 1805 lower_eh_constructs_1 (&this_state, &handler);
6de9cd9a 1806
1d65f45c
RH
1807 c->label = create_artificial_label (UNKNOWN_LOCATION);
1808 x = gimple_build_label (c->label);
1809 gimple_seq_add_stmt (&new_seq, x);
6de9cd9a 1810
1d65f45c 1811 gimple_seq_add_seq (&new_seq, handler);
6de9cd9a 1812
1d65f45c 1813 if (gimple_seq_may_fallthru (new_seq))
6de9cd9a
DN
1814 {
1815 if (!out_label)
c2255bc4 1816 out_label = create_artificial_label (try_catch_loc);
6de9cd9a 1817
726a989a 1818 x = gimple_build_goto (out_label);
1d65f45c 1819 gimple_seq_add_stmt (&new_seq, x);
6de9cd9a 1820 }
d815d34e
MM
1821 if (!c->type_list)
1822 break;
6de9cd9a
DN
1823 }
1824
1d65f45c
RH
1825 gimple_try_set_cleanup (tp, new_seq);
1826
3b445b24
JJ
1827 gimple_seq new_eh_seq = eh_seq;
1828 eh_seq = old_eh_seq;
1829 gimple_seq ret_seq = frob_into_branch_around (tp, try_region, out_label);
1830 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1831 return ret_seq;
6de9cd9a
DN
1832}
1833
726a989a
RB
1834/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1835 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
6de9cd9a
DN
1836 region trees that record all the magic. */
1837
726a989a 1838static gimple_seq
538dd0b7 1839lower_eh_filter (struct leh_state *state, gtry *tp)
6de9cd9a 1840{
b7da9fd4
RH
1841 struct leh_state this_state = *state;
1842 eh_region this_region = NULL;
355fe088 1843 gimple *inner, *x;
1d65f45c 1844 gimple_seq new_seq;
19114537 1845
726a989a
RB
1846 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1847
b7da9fd4
RH
1848 if (flag_exceptions)
1849 {
1850 this_region = gen_eh_region_allowed (state->cur_region,
1851 gimple_eh_filter_types (inner));
1852 this_state.cur_region = this_region;
1853 }
19114537 1854
355a7673 1855 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
6de9cd9a 1856
b7da9fd4 1857 if (!eh_region_may_contain_throw (this_region))
1d65f45c
RH
1858 return gimple_try_eval (tp);
1859
1860 new_seq = NULL;
1861 this_state.cur_region = state->cur_region;
1862 this_state.ehp_region = this_region;
1863
1864 emit_eh_dispatch (&new_seq, this_region);
1865 emit_resx (&new_seq, this_region);
1866
1867 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1868 x = gimple_build_label (this_region->u.allowed.label);
1869 gimple_seq_add_stmt (&new_seq, x);
1870
355a7673 1871 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1d65f45c
RH
1872 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1873
1874 gimple_try_set_cleanup (tp, new_seq);
6de9cd9a 1875
1d65f45c
RH
1876 return frob_into_branch_around (tp, this_region, NULL);
1877}
1878
1879/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1880 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1881 plus the exception region trees that record all the magic. */
1882
1883static gimple_seq
538dd0b7 1884lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
1d65f45c 1885{
b7da9fd4 1886 struct leh_state this_state = *state;
1d65f45c 1887
b7da9fd4
RH
1888 if (flag_exceptions)
1889 {
355fe088 1890 gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
b7da9fd4 1891 eh_region this_region;
1d65f45c 1892
b7da9fd4
RH
1893 this_region = gen_eh_region_must_not_throw (state->cur_region);
1894 this_region->u.must_not_throw.failure_decl
538dd0b7
DM
1895 = gimple_eh_must_not_throw_fndecl (
1896 as_a <geh_mnt *> (inner));
c16fd676
RB
1897 this_region->u.must_not_throw.failure_loc
1898 = LOCATION_LOCUS (gimple_location (tp));
1d65f45c 1899
b7da9fd4
RH
1900 /* In order to get mangling applied to this decl, we must mark it
1901 used now. Otherwise, pass_ipa_free_lang_data won't think it
1902 needs to happen. */
1903 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1d65f45c 1904
b7da9fd4
RH
1905 this_state.cur_region = this_region;
1906 }
6de9cd9a 1907
355a7673 1908 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
6de9cd9a 1909
1d65f45c 1910 return gimple_try_eval (tp);
6de9cd9a
DN
1911}
1912
1913/* Implement a cleanup expression. This is similar to try-finally,
1914 except that we only execute the cleanup block for exception edges. */
1915
726a989a 1916static gimple_seq
538dd0b7 1917lower_cleanup (struct leh_state *state, gtry *tp)
6de9cd9a 1918{
b7da9fd4
RH
1919 struct leh_state this_state = *state;
1920 eh_region this_region = NULL;
6de9cd9a 1921 struct leh_tf_state fake_tf;
726a989a 1922 gimple_seq result;
d3f28910 1923 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
6de9cd9a 1924
d3f28910 1925 if (flag_exceptions && !cleanup_dead)
6de9cd9a 1926 {
b7da9fd4
RH
1927 this_region = gen_eh_region_cleanup (state->cur_region);
1928 this_state.cur_region = this_region;
6de9cd9a
DN
1929 }
1930
355a7673 1931 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
6de9cd9a 1932
d3f28910 1933 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1d65f45c 1934 return gimple_try_eval (tp);
6de9cd9a
DN
1935
1936 /* Build enough of a try-finally state so that we can reuse
1937 honor_protect_cleanup_actions. */
1938 memset (&fake_tf, 0, sizeof (fake_tf));
c2255bc4 1939 fake_tf.top_p = fake_tf.try_finally_expr = tp;
6de9cd9a
DN
1940 fake_tf.outer = state;
1941 fake_tf.region = this_region;
726a989a 1942 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
6de9cd9a
DN
1943 fake_tf.may_throw = true;
1944
6de9cd9a
DN
1945 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1946
1947 if (fake_tf.may_throw)
1948 {
1949 /* In this case honor_protect_cleanup_actions had nothing to do,
1950 and we should process this normally. */
355a7673 1951 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1d65f45c
RH
1952 result = frob_into_branch_around (tp, this_region,
1953 fake_tf.fallthru_label);
6de9cd9a
DN
1954 }
1955 else
1956 {
1957 /* In this case honor_protect_cleanup_actions did nearly all of
1958 the work. All we have left is to append the fallthru_label. */
1959
726a989a 1960 result = gimple_try_eval (tp);
6de9cd9a
DN
1961 if (fake_tf.fallthru_label)
1962 {
355fe088 1963 gimple *x = gimple_build_label (fake_tf.fallthru_label);
726a989a 1964 gimple_seq_add_stmt (&result, x);
6de9cd9a
DN
1965 }
1966 }
726a989a 1967 return result;
6de9cd9a
DN
1968}
1969
1d65f45c 1970/* Main loop for lowering eh constructs. Also moves gsi to the next
726a989a 1971 statement. */
6de9cd9a
DN
1972
1973static void
726a989a 1974lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
6de9cd9a 1975{
726a989a 1976 gimple_seq replace;
355fe088
TS
1977 gimple *x;
1978 gimple *stmt = gsi_stmt (*gsi);
6de9cd9a 1979
726a989a 1980 switch (gimple_code (stmt))
6de9cd9a 1981 {
726a989a 1982 case GIMPLE_CALL:
1d65f45c
RH
1983 {
1984 tree fndecl = gimple_call_fndecl (stmt);
1985 tree rhs, lhs;
1986
3d78e008 1987 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1d65f45c
RH
1988 switch (DECL_FUNCTION_CODE (fndecl))
1989 {
1990 case BUILT_IN_EH_POINTER:
1991 /* The front end may have generated a call to
1992 __builtin_eh_pointer (0) within a catch region. Replace
1993 this zero argument with the current catch region number. */
1994 if (state->ehp_region)
1995 {
413581ba
RG
1996 tree nr = build_int_cst (integer_type_node,
1997 state->ehp_region->index);
1d65f45c
RH
1998 gimple_call_set_arg (stmt, 0, nr);
1999 }
2000 else
2001 {
2002 /* The user has dome something silly. Remove it. */
9a9d280e 2003 rhs = null_pointer_node;
1d65f45c
RH
2004 goto do_replace;
2005 }
2006 break;
2007
2008 case BUILT_IN_EH_FILTER:
2009 /* ??? This should never appear, but since it's a builtin it
2010 is accessible to abuse by users. Just remove it and
2011 replace the use with the arbitrary value zero. */
2012 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2013 do_replace:
2014 lhs = gimple_call_lhs (stmt);
2015 x = gimple_build_assign (lhs, rhs);
2016 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2017 /* FALLTHRU */
2018
2019 case BUILT_IN_EH_COPY_VALUES:
2020 /* Likewise this should not appear. Remove it. */
2021 gsi_remove (gsi, true);
2022 return;
2023
2024 default:
2025 break;
2026 }
2027 }
2028 /* FALLTHRU */
2029
726a989a 2030 case GIMPLE_ASSIGN:
c5ce6f30 2031 /* If the stmt can throw, use a new temporary for the assignment
ba4d8f9d 2032 to a LHS. This makes sure the old value of the LHS is
87cd4259 2033 available on the EH edge. Only do so for statements that
073a8998 2034 potentially fall through (no noreturn calls e.g.), otherwise
87cd4259 2035 this new assignment might create fake fallthru regions. */
36bbc05d 2036 if (stmt_could_throw_p (cfun, stmt)
ba4d8f9d 2037 && gimple_has_lhs (stmt)
87cd4259 2038 && gimple_stmt_may_fallthru (stmt)
ba4d8f9d
RG
2039 && !tree_could_throw_p (gimple_get_lhs (stmt))
2040 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2041 {
2042 tree lhs = gimple_get_lhs (stmt);
b731b390 2043 tree tmp = create_tmp_var (TREE_TYPE (lhs));
355fe088 2044 gimple *s = gimple_build_assign (lhs, tmp);
ba4d8f9d
RG
2045 gimple_set_location (s, gimple_location (stmt));
2046 gimple_set_block (s, gimple_block (stmt));
2047 gimple_set_lhs (stmt, tmp);
2048 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2049 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2050 DECL_GIMPLE_REG_P (tmp) = 1;
2051 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2052 }
6de9cd9a 2053 /* Look for things that can throw exceptions, and record them. */
36bbc05d 2054 if (state->cur_region && stmt_could_throw_p (cfun, stmt))
6de9cd9a 2055 {
726a989a 2056 record_stmt_eh_region (state->cur_region, stmt);
6de9cd9a 2057 note_eh_region_may_contain_throw (state->cur_region);
6de9cd9a
DN
2058 }
2059 break;
2060
726a989a
RB
2061 case GIMPLE_COND:
2062 case GIMPLE_GOTO:
2063 case GIMPLE_RETURN:
2064 maybe_record_in_goto_queue (state, stmt);
6de9cd9a
DN
2065 break;
2066
726a989a 2067 case GIMPLE_SWITCH:
538dd0b7 2068 verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
6de9cd9a
DN
2069 break;
2070
726a989a 2071 case GIMPLE_TRY:
538dd0b7
DM
2072 {
2073 gtry *try_stmt = as_a <gtry *> (stmt);
2074 if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2075 replace = lower_try_finally (state, try_stmt);
2076 else
2077 {
2078 x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
2079 if (!x)
6728ee79 2080 {
538dd0b7
DM
2081 replace = gimple_try_eval (try_stmt);
2082 lower_eh_constructs_1 (state, &replace);
2083 }
2084 else
2085 switch (gimple_code (x))
2086 {
6728ee79 2087 case GIMPLE_CATCH:
538dd0b7
DM
2088 replace = lower_catch (state, try_stmt);
2089 break;
6728ee79 2090 case GIMPLE_EH_FILTER:
538dd0b7
DM
2091 replace = lower_eh_filter (state, try_stmt);
2092 break;
6728ee79 2093 case GIMPLE_EH_MUST_NOT_THROW:
538dd0b7
DM
2094 replace = lower_eh_must_not_throw (state, try_stmt);
2095 break;
0a35513e 2096 case GIMPLE_EH_ELSE:
538dd0b7
DM
2097 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2098 gcc_unreachable ();
6728ee79 2099 default:
538dd0b7
DM
2100 replace = lower_cleanup (state, try_stmt);
2101 break;
2102 }
2103 }
2104 }
726a989a
RB
2105
2106 /* Remove the old stmt and insert the transformed sequence
2107 instead. */
2108 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2109 gsi_remove (gsi, true);
2110
2111 /* Return since we don't want gsi_next () */
2112 return;
6de9cd9a 2113
0a35513e
AH
2114 case GIMPLE_EH_ELSE:
2115 /* We should be eliminating this in lower_try_finally et al. */
2116 gcc_unreachable ();
2117
6de9cd9a
DN
2118 default:
2119 /* A type, a decl, or some kind of statement that we're not
2120 interested in. Don't walk them. */
2121 break;
2122 }
726a989a
RB
2123
2124 gsi_next (gsi);
2125}
2126
2127/* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2128
2129static void
355a7673 2130lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
726a989a
RB
2131{
2132 gimple_stmt_iterator gsi;
355a7673 2133 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
726a989a 2134 lower_eh_constructs_2 (state, &gsi);
6de9cd9a
DN
2135}
2136
17795822
TS
2137namespace {
2138
2139const pass_data pass_data_lower_eh =
be55bfe6
TS
2140{
2141 GIMPLE_PASS, /* type */
2142 "eh", /* name */
2143 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
2144 TV_TREE_EH, /* tv_id */
2145 PROP_gimple_lcf, /* properties_required */
2146 PROP_gimple_leh, /* properties_provided */
2147 0, /* properties_destroyed */
2148 0, /* todo_flags_start */
2149 0, /* todo_flags_finish */
2150};
2151
17795822 2152class pass_lower_eh : public gimple_opt_pass
be55bfe6
TS
2153{
2154public:
2155 pass_lower_eh (gcc::context *ctxt)
2156 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2157 {}
2158
2159 /* opt_pass methods: */
2160 virtual unsigned int execute (function *);
2161
2162}; // class pass_lower_eh
2163
2164unsigned int
2165pass_lower_eh::execute (function *fun)
6de9cd9a
DN
2166{
2167 struct leh_state null_state;
1d65f45c 2168 gimple_seq bodyp;
726a989a 2169
1d65f45c
RH
2170 bodyp = gimple_body (current_function_decl);
2171 if (bodyp == NULL)
2172 return 0;
6de9cd9a 2173
c203e8a7 2174 finally_tree = new hash_table<finally_tree_hasher> (31);
b7da9fd4 2175 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
1d65f45c 2176 memset (&null_state, 0, sizeof (null_state));
6de9cd9a 2177
726a989a 2178 collect_finally_tree_1 (bodyp, NULL);
355a7673
MM
2179 lower_eh_constructs_1 (&null_state, &bodyp);
2180 gimple_set_body (current_function_decl, bodyp);
6de9cd9a 2181
1d65f45c
RH
2182 /* We assume there's a return statement, or something, at the end of
2183 the function, and thus ploping the EH sequence afterward won't
2184 change anything. */
2185 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2186 gimple_seq_add_seq (&bodyp, eh_seq);
2187
2188 /* We assume that since BODYP already existed, adding EH_SEQ to it
2189 didn't change its value, and we don't have to re-set the function. */
2190 gcc_assert (bodyp == gimple_body (current_function_decl));
6de9cd9a 2191
c203e8a7
TS
2192 delete finally_tree;
2193 finally_tree = NULL;
b7da9fd4 2194 BITMAP_FREE (eh_region_may_contain_throw_map);
1d65f45c 2195 eh_seq = NULL;
f9417da1
RG
2196
2197 /* If this function needs a language specific EH personality routine
2198 and the frontend didn't already set one do so now. */
be55bfe6 2199 if (function_needs_eh_personality (fun) == eh_personality_lang
f9417da1
RG
2200 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2201 DECL_FUNCTION_PERSONALITY (current_function_decl)
2202 = lang_hooks.eh_personality ();
2203
c2924966 2204 return 0;
6de9cd9a
DN
2205}
2206
17795822
TS
2207} // anon namespace
2208
27a4cd48
DM
2209gimple_opt_pass *
2210make_pass_lower_eh (gcc::context *ctxt)
2211{
2212 return new pass_lower_eh (ctxt);
2213}
6de9cd9a 2214\f
1d65f45c
RH
2215/* Create the multiple edges from an EH_DISPATCH statement to all of
2216 the possible handlers for its EH region. Return true if there's
2217 no fallthru edge; false if there is. */
6de9cd9a 2218
1d65f45c 2219bool
538dd0b7 2220make_eh_dispatch_edges (geh_dispatch *stmt)
6de9cd9a 2221{
1d65f45c
RH
2222 eh_region r;
2223 eh_catch c;
6de9cd9a
DN
2224 basic_block src, dst;
2225
1d65f45c 2226 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
726a989a 2227 src = gimple_bb (stmt);
6de9cd9a 2228
1d65f45c
RH
2229 switch (r->type)
2230 {
2231 case ERT_TRY:
2232 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2233 {
61ff5d6f 2234 dst = label_to_block (cfun, c->label);
1d65f45c 2235 make_edge (src, dst, 0);
19114537 2236
1d65f45c
RH
2237 /* A catch-all handler doesn't have a fallthru. */
2238 if (c->type_list == NULL)
2239 return false;
2240 }
2241 break;
a8ee227c 2242
1d65f45c 2243 case ERT_ALLOWED_EXCEPTIONS:
61ff5d6f 2244 dst = label_to_block (cfun, r->u.allowed.label);
1d65f45c
RH
2245 make_edge (src, dst, 0);
2246 break;
2247
2248 default:
2249 gcc_unreachable ();
2250 }
2251
2252 return true;
a8ee227c
JH
2253}
2254
1d65f45c
RH
2255/* Create the single EH edge from STMT to its nearest landing pad,
2256 if there is such a landing pad within the current function. */
2257
6de9cd9a 2258void
355fe088 2259make_eh_edges (gimple *stmt)
6de9cd9a 2260{
1d65f45c
RH
2261 basic_block src, dst;
2262 eh_landing_pad lp;
2263 int lp_nr;
6de9cd9a 2264
1d65f45c
RH
2265 lp_nr = lookup_stmt_eh_lp (stmt);
2266 if (lp_nr <= 0)
2267 return;
6de9cd9a 2268
1d65f45c
RH
2269 lp = get_eh_landing_pad_from_number (lp_nr);
2270 gcc_assert (lp != NULL);
a203a221 2271
1d65f45c 2272 src = gimple_bb (stmt);
61ff5d6f 2273 dst = label_to_block (cfun, lp->post_landing_pad);
1d65f45c 2274 make_edge (src, dst, EDGE_EH);
6de9cd9a
DN
2275}
2276
1d65f45c
RH
2277/* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2278 do not actually perform the final edge redirection.
a3710436 2279
1d65f45c
RH
2280 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2281 we intend to change the destination EH region as well; this means
2282 EH_LANDING_PAD_NR must already be set on the destination block label.
2283 If false, we're being called from generic cfg manipulation code and we
2284 should preserve our place within the region tree. */
2285
2286static void
2287redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
a3710436 2288{
1d65f45c
RH
2289 eh_landing_pad old_lp, new_lp;
2290 basic_block old_bb;
355fe088 2291 gimple *throw_stmt;
1d65f45c
RH
2292 int old_lp_nr, new_lp_nr;
2293 tree old_label, new_label;
2294 edge_iterator ei;
2295 edge e;
2296
2297 old_bb = edge_in->dest;
2298 old_label = gimple_block_label (old_bb);
2299 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2300 gcc_assert (old_lp_nr > 0);
2301 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2302
2303 throw_stmt = last_stmt (edge_in->src);
2304 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2305
2306 new_label = gimple_block_label (new_bb);
a3710436 2307
1d65f45c
RH
2308 /* Look for an existing region that might be using NEW_BB already. */
2309 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2310 if (new_lp_nr)
a3710436 2311 {
1d65f45c
RH
2312 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2313 gcc_assert (new_lp);
b8698a0f 2314
1d65f45c
RH
2315 /* Unless CHANGE_REGION is true, the new and old landing pad
2316 had better be associated with the same EH region. */
2317 gcc_assert (change_region || new_lp->region == old_lp->region);
a3710436
JH
2318 }
2319 else
2320 {
1d65f45c
RH
2321 new_lp = NULL;
2322 gcc_assert (!change_region);
a3710436
JH
2323 }
2324
1d65f45c
RH
2325 /* Notice when we redirect the last EH edge away from OLD_BB. */
2326 FOR_EACH_EDGE (e, ei, old_bb->preds)
2327 if (e != edge_in && (e->flags & EDGE_EH))
2328 break;
cc7220fd 2329
1d65f45c 2330 if (new_lp)
cc7220fd 2331 {
1d65f45c
RH
2332 /* NEW_LP already exists. If there are still edges into OLD_LP,
2333 there's nothing to do with the EH tree. If there are no more
2334 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2335 If CHANGE_REGION is true, then our caller is expecting to remove
2336 the landing pad. */
2337 if (e == NULL && !change_region)
2338 remove_eh_landing_pad (old_lp);
cc7220fd 2339 }
1d65f45c 2340 else
cc7220fd 2341 {
1d65f45c
RH
2342 /* No correct landing pad exists. If there are no more edges
2343 into OLD_LP, then we can simply re-use the existing landing pad.
2344 Otherwise, we have to create a new landing pad. */
2345 if (e == NULL)
2346 {
2347 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2348 new_lp = old_lp;
2349 }
2350 else
2351 new_lp = gen_eh_landing_pad (old_lp->region);
2352 new_lp->post_landing_pad = new_label;
2353 EH_LANDING_PAD_NR (new_label) = new_lp->index;
cc7220fd 2354 }
1d65f45c
RH
2355
2356 /* Maybe move the throwing statement to the new region. */
2357 if (old_lp != new_lp)
cc7220fd 2358 {
1d65f45c
RH
2359 remove_stmt_from_eh_lp (throw_stmt);
2360 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
cc7220fd 2361 }
cc7220fd
JH
2362}
2363
1d65f45c 2364/* Redirect EH edge E to NEW_BB. */
726a989a 2365
1d65f45c
RH
2366edge
2367redirect_eh_edge (edge edge_in, basic_block new_bb)
cc7220fd 2368{
1d65f45c
RH
2369 redirect_eh_edge_1 (edge_in, new_bb, false);
2370 return ssa_redirect_edge (edge_in, new_bb);
2371}
cc7220fd 2372
1d65f45c
RH
2373/* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2374 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2375 The actual edge update will happen in the caller. */
cc7220fd 2376
1d65f45c 2377void
538dd0b7 2378redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
1d65f45c
RH
2379{
2380 tree new_lab = gimple_block_label (new_bb);
2381 bool any_changed = false;
2382 basic_block old_bb;
2383 eh_region r;
2384 eh_catch c;
2385
2386 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2387 switch (r->type)
cc7220fd 2388 {
1d65f45c
RH
2389 case ERT_TRY:
2390 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
cc7220fd 2391 {
61ff5d6f 2392 old_bb = label_to_block (cfun, c->label);
1d65f45c
RH
2393 if (old_bb == e->dest)
2394 {
2395 c->label = new_lab;
2396 any_changed = true;
2397 }
cc7220fd 2398 }
1d65f45c
RH
2399 break;
2400
2401 case ERT_ALLOWED_EXCEPTIONS:
61ff5d6f 2402 old_bb = label_to_block (cfun, r->u.allowed.label);
1d65f45c
RH
2403 gcc_assert (old_bb == e->dest);
2404 r->u.allowed.label = new_lab;
2405 any_changed = true;
2406 break;
2407
2408 default:
2409 gcc_unreachable ();
cc7220fd 2410 }
726a989a 2411
1d65f45c 2412 gcc_assert (any_changed);
cc7220fd 2413}
6de9cd9a 2414\f
726a989a
RB
2415/* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2416
890065bf 2417bool
726a989a
RB
2418operation_could_trap_helper_p (enum tree_code op,
2419 bool fp_operation,
2420 bool honor_trapv,
2421 bool honor_nans,
2422 bool honor_snans,
2423 tree divisor,
2424 bool *handled)
2425{
2426 *handled = true;
2427 switch (op)
2428 {
2429 case TRUNC_DIV_EXPR:
2430 case CEIL_DIV_EXPR:
2431 case FLOOR_DIV_EXPR:
2432 case ROUND_DIV_EXPR:
2433 case EXACT_DIV_EXPR:
2434 case CEIL_MOD_EXPR:
2435 case FLOOR_MOD_EXPR:
2436 case ROUND_MOD_EXPR:
2437 case TRUNC_MOD_EXPR:
2438 case RDIV_EXPR:
228868f5 2439 if (honor_snans)
726a989a
RB
2440 return true;
2441 if (fp_operation)
2442 return flag_trapping_math;
2443 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2444 return true;
2445 return false;
2446
2447 case LT_EXPR:
2448 case LE_EXPR:
2449 case GT_EXPR:
2450 case GE_EXPR:
2451 case LTGT_EXPR:
2452 /* Some floating point comparisons may trap. */
2453 return honor_nans;
2454
2455 case EQ_EXPR:
2456 case NE_EXPR:
2457 case UNORDERED_EXPR:
2458 case ORDERED_EXPR:
2459 case UNLT_EXPR:
2460 case UNLE_EXPR:
2461 case UNGT_EXPR:
2462 case UNGE_EXPR:
2463 case UNEQ_EXPR:
2464 return honor_snans;
2465
726a989a
RB
2466 case NEGATE_EXPR:
2467 case ABS_EXPR:
2468 case CONJ_EXPR:
2469 /* These operations don't trap with floating point. */
2470 if (honor_trapv)
2471 return true;
2472 return false;
2473
e197e64e
KV
2474 case ABSU_EXPR:
2475 /* ABSU_EXPR never traps. */
2476 return false;
2477
726a989a
RB
2478 case PLUS_EXPR:
2479 case MINUS_EXPR:
2480 case MULT_EXPR:
2481 /* Any floating arithmetic may trap. */
2482 if (fp_operation && flag_trapping_math)
2483 return true;
2484 if (honor_trapv)
2485 return true;
2486 return false;
2487
f5e5b46c
RG
2488 case COMPLEX_EXPR:
2489 case CONSTRUCTOR:
2490 /* Constructing an object cannot trap. */
2491 return false;
2492
726a989a
RB
2493 default:
2494 /* Any floating arithmetic may trap. */
2495 if (fp_operation && flag_trapping_math)
2496 return true;
2497
2498 *handled = false;
2499 return false;
2500 }
2501}
2502
2503/* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2504 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2505 type operands that may trap. If OP is a division operator, DIVISOR contains
2506 the value of the divisor. */
2507
2508bool
2509operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2510 tree divisor)
2511{
2512 bool honor_nans = (fp_operation && flag_trapping_math
2513 && !flag_finite_math_only);
2514 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2515 bool handled;
2516
2517 if (TREE_CODE_CLASS (op) != tcc_comparison
2518 && TREE_CODE_CLASS (op) != tcc_unary
c566cc9f 2519 && TREE_CODE_CLASS (op) != tcc_binary)
726a989a
RB
2520 return false;
2521
2522 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2523 honor_nans, honor_snans, divisor,
2524 &handled);
2525}
2526
862d0b35
DN
2527
2528/* Returns true if it is possible to prove that the index of
2529 an array access REF (an ARRAY_REF expression) falls into the
2530 array bounds. */
2531
2532static bool
2533in_array_bounds_p (tree ref)
2534{
2535 tree idx = TREE_OPERAND (ref, 1);
2536 tree min, max;
2537
2538 if (TREE_CODE (idx) != INTEGER_CST)
2539 return false;
2540
2541 min = array_ref_low_bound (ref);
2542 max = array_ref_up_bound (ref);
2543 if (!min
2544 || !max
2545 || TREE_CODE (min) != INTEGER_CST
2546 || TREE_CODE (max) != INTEGER_CST)
2547 return false;
2548
2549 if (tree_int_cst_lt (idx, min)
2550 || tree_int_cst_lt (max, idx))
2551 return false;
2552
2553 return true;
2554}
2555
2556/* Returns true if it is possible to prove that the range of
2557 an array access REF (an ARRAY_RANGE_REF expression) falls
2558 into the array bounds. */
2559
2560static bool
2561range_in_array_bounds_p (tree ref)
2562{
2563 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2564 tree range_min, range_max, min, max;
2565
2566 range_min = TYPE_MIN_VALUE (domain_type);
2567 range_max = TYPE_MAX_VALUE (domain_type);
2568 if (!range_min
2569 || !range_max
2570 || TREE_CODE (range_min) != INTEGER_CST
2571 || TREE_CODE (range_max) != INTEGER_CST)
2572 return false;
2573
2574 min = array_ref_low_bound (ref);
2575 max = array_ref_up_bound (ref);
2576 if (!min
2577 || !max
2578 || TREE_CODE (min) != INTEGER_CST
2579 || TREE_CODE (max) != INTEGER_CST)
2580 return false;
2581
2582 if (tree_int_cst_lt (range_min, min)
2583 || tree_int_cst_lt (max, range_max))
2584 return false;
2585
2586 return true;
2587}
2588
726a989a 2589/* Return true if EXPR can trap, as in dereferencing an invalid pointer
1eaba2f2
RH
2590 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2591 This routine expects only GIMPLE lhs or rhs input. */
6de9cd9a
DN
2592
2593bool
2594tree_could_trap_p (tree expr)
2595{
726a989a 2596 enum tree_code code;
1eaba2f2 2597 bool fp_operation = false;
9675412f 2598 bool honor_trapv = false;
726a989a 2599 tree t, base, div = NULL_TREE;
6de9cd9a 2600
726a989a
RB
2601 if (!expr)
2602 return false;
1d65f45c 2603
726a989a
RB
2604 code = TREE_CODE (expr);
2605 t = TREE_TYPE (expr);
2606
2607 if (t)
1eaba2f2 2608 {
04b03edb
RAE
2609 if (COMPARISON_CLASS_P (expr))
2610 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2611 else
2612 fp_operation = FLOAT_TYPE_P (t);
726a989a 2613 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
1eaba2f2
RH
2614 }
2615
726a989a
RB
2616 if (TREE_CODE_CLASS (code) == tcc_binary)
2617 div = TREE_OPERAND (expr, 1);
2618 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2619 return true;
2620
d25cee4d 2621 restart:
6de9cd9a
DN
2622 switch (code)
2623 {
6de9cd9a
DN
2624 case COMPONENT_REF:
2625 case REALPART_EXPR:
2626 case IMAGPART_EXPR:
2627 case BIT_FIELD_REF:
483edb92 2628 case VIEW_CONVERT_EXPR:
d25cee4d
RH
2629 case WITH_SIZE_EXPR:
2630 expr = TREE_OPERAND (expr, 0);
2631 code = TREE_CODE (expr);
2632 goto restart;
a7e5372d
ZD
2633
2634 case ARRAY_RANGE_REF:
11fc4275
EB
2635 base = TREE_OPERAND (expr, 0);
2636 if (tree_could_trap_p (base))
a7e5372d 2637 return true;
11fc4275
EB
2638 if (TREE_THIS_NOTRAP (expr))
2639 return false;
11fc4275 2640 return !range_in_array_bounds_p (expr);
a7e5372d
ZD
2641
2642 case ARRAY_REF:
2643 base = TREE_OPERAND (expr, 0);
a7e5372d
ZD
2644 if (tree_could_trap_p (base))
2645 return true;
a7e5372d
ZD
2646 if (TREE_THIS_NOTRAP (expr))
2647 return false;
a7e5372d 2648 return !in_array_bounds_p (expr);
6de9cd9a 2649
4e1f39e4 2650 case TARGET_MEM_REF:
70f34814 2651 case MEM_REF:
4e1f39e4
RB
2652 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2653 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2654 return true;
2655 if (TREE_THIS_NOTRAP (expr))
70f34814 2656 return false;
4e1f39e4
RB
2657 /* We cannot prove that the access is in-bounds when we have
2658 variable-index TARGET_MEM_REFs. */
2659 if (code == TARGET_MEM_REF
2660 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2661 return true;
2662 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2663 {
2664 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
aca52e6f
RS
2665 poly_offset_int off = mem_ref_offset (expr);
2666 if (maybe_lt (off, 0))
4e1f39e4
RB
2667 return true;
2668 if (TREE_CODE (base) == STRING_CST)
aca52e6f
RS
2669 return maybe_le (TREE_STRING_LENGTH (base), off);
2670 tree size = DECL_SIZE_UNIT (base);
2671 if (size == NULL_TREE
2672 || !poly_int_tree_p (size)
2673 || maybe_le (wi::to_poly_offset (size), off))
4e1f39e4
RB
2674 return true;
2675 /* Now we are sure the first byte of the access is inside
2676 the object. */
2677 return false;
2678 }
2679 return true;
2680
6de9cd9a 2681 case INDIRECT_REF:
1eaba2f2
RH
2682 return !TREE_THIS_NOTRAP (expr);
2683
2684 case ASM_EXPR:
2685 return TREE_THIS_VOLATILE (expr);
5cb2183e 2686
726a989a
RB
2687 case CALL_EXPR:
2688 t = get_callee_fndecl (expr);
2689 /* Assume that calls to weak functions may trap. */
f2c3a8ce 2690 if (!t || !DECL_P (t))
1eaba2f2 2691 return true;
f2c3a8ce
JJ
2692 if (DECL_WEAK (t))
2693 return tree_could_trap_p (t);
2694 return false;
2695
2696 case FUNCTION_DECL:
2697 /* Assume that accesses to weak functions may trap, unless we know
2698 they are certainly defined in current TU or in some other
2699 LTO partition. */
5940e204 2700 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
f2c3a8ce 2701 {
5940e204
EB
2702 cgraph_node *node = cgraph_node::get (expr);
2703 if (node)
2704 node = node->function_symbol ();
2705 return !(node && node->in_other_partition);
f2c3a8ce
JJ
2706 }
2707 return false;
2708
2709 case VAR_DECL:
2710 /* Assume that accesses to weak vars may trap, unless we know
2711 they are certainly defined in current TU or in some other
2712 LTO partition. */
5940e204 2713 if (DECL_WEAK (expr) && !DECL_COMDAT (expr) && DECL_EXTERNAL (expr))
f2c3a8ce 2714 {
5940e204
EB
2715 varpool_node *node = varpool_node::get (expr);
2716 if (node)
2717 node = node->ultimate_alias_target ();
2718 return !(node && node->in_other_partition);
f2c3a8ce 2719 }
1eaba2f2
RH
2720 return false;
2721
726a989a
RB
2722 default:
2723 return false;
2724 }
2725}
1eaba2f2 2726
31b6733b
JJ
2727/* Return non-NULL if there is an integer operation with trapping overflow
2728 we can rewrite into non-trapping. Called via walk_tree from
2729 rewrite_to_non_trapping_overflow. */
2730
2731static tree
2732find_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2733{
2734 if (EXPR_P (*tp)
c068b2a5 2735 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (*tp))
31b6733b
JJ
2736 && !operation_no_trapping_overflow (TREE_TYPE (*tp), TREE_CODE (*tp)))
2737 return *tp;
2738 if (IS_TYPE_OR_DECL_P (*tp)
2739 || (TREE_CODE (*tp) == SAVE_EXPR && data == NULL))
2740 *walk_subtrees = 0;
2741 return NULL_TREE;
2742}
2743
2744/* Rewrite selected operations into unsigned arithmetics, so that they
2745 don't trap on overflow. */
2746
2747static tree
2748replace_trapping_overflow (tree *tp, int *walk_subtrees, void *data)
2749{
2750 if (find_trapping_overflow (tp, walk_subtrees, data))
2751 {
2752 tree type = TREE_TYPE (*tp);
2753 tree utype = unsigned_type_for (type);
2754 *walk_subtrees = 0;
2755 int len = TREE_OPERAND_LENGTH (*tp);
2756 for (int i = 0; i < len; ++i)
2757 walk_tree (&TREE_OPERAND (*tp, i), replace_trapping_overflow,
2758 data, (hash_set<tree> *) data);
2759
2760 if (TREE_CODE (*tp) == ABS_EXPR)
2761 {
1a71493d
RB
2762 TREE_SET_CODE (*tp, ABSU_EXPR);
2763 TREE_TYPE (*tp) = utype;
2764 *tp = fold_convert (type, *tp);
31b6733b
JJ
2765 }
2766 else
2767 {
2768 TREE_TYPE (*tp) = utype;
2769 len = TREE_OPERAND_LENGTH (*tp);
2770 for (int i = 0; i < len; ++i)
2771 TREE_OPERAND (*tp, i)
2772 = fold_convert (utype, TREE_OPERAND (*tp, i));
2773 *tp = fold_convert (type, *tp);
2774 }
2775 }
2776 return NULL_TREE;
2777}
2778
2779/* If any subexpression of EXPR can trap due to -ftrapv, rewrite it
2780 using unsigned arithmetics to avoid traps in it. */
2781
2782tree
2783rewrite_to_non_trapping_overflow (tree expr)
2784{
2785 if (!flag_trapv)
2786 return expr;
2787 hash_set<tree> pset;
2788 if (!walk_tree (&expr, find_trapping_overflow, &pset, &pset))
2789 return expr;
2790 expr = unshare_expr (expr);
2791 pset.empty ();
2792 walk_tree (&expr, replace_trapping_overflow, &pset, &pset);
2793 return expr;
2794}
1eaba2f2 2795
726a989a
RB
2796/* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2797 an assignment or a conditional) may throw. */
1eaba2f2 2798
726a989a 2799static bool
12c4f7dc 2800stmt_could_throw_1_p (gassign *stmt)
726a989a 2801{
12c4f7dc 2802 enum tree_code code = gimple_assign_rhs_code (stmt);
726a989a
RB
2803 bool honor_nans = false;
2804 bool honor_snans = false;
2805 bool fp_operation = false;
2806 bool honor_trapv = false;
2807 tree t;
2808 size_t i;
2809 bool handled, ret;
9675412f 2810
726a989a
RB
2811 if (TREE_CODE_CLASS (code) == tcc_comparison
2812 || TREE_CODE_CLASS (code) == tcc_unary
c566cc9f 2813 || TREE_CODE_CLASS (code) == tcc_binary)
726a989a 2814 {
12c4f7dc 2815 if (TREE_CODE_CLASS (code) == tcc_comparison)
dd46054a 2816 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
dd46054a
RG
2817 else
2818 t = gimple_expr_type (stmt);
726a989a
RB
2819 fp_operation = FLOAT_TYPE_P (t);
2820 if (fp_operation)
2821 {
2822 honor_nans = flag_trapping_math && !flag_finite_math_only;
2823 honor_snans = flag_signaling_nans != 0;
2824 }
2825 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2826 honor_trapv = true;
2827 }
2828
12c4f7dc
RB
2829 /* First check the LHS. */
2830 if (tree_could_trap_p (gimple_assign_lhs (stmt)))
2831 return true;
2832
726a989a 2833 /* Check if the main expression may trap. */
726a989a 2834 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
12c4f7dc
RB
2835 honor_nans, honor_snans,
2836 gimple_assign_rhs2 (stmt),
726a989a
RB
2837 &handled);
2838 if (handled)
2839 return ret;
2840
2841 /* If the expression does not trap, see if any of the individual operands may
2842 trap. */
12c4f7dc 2843 for (i = 1; i < gimple_num_ops (stmt); i++)
726a989a
RB
2844 if (tree_could_trap_p (gimple_op (stmt, i)))
2845 return true;
2846
2847 return false;
2848}
2849
2850
36bbc05d 2851/* Return true if statement STMT within FUN could throw an exception. */
726a989a
RB
2852
2853bool
36bbc05d 2854stmt_could_throw_p (function *fun, gimple *stmt)
726a989a 2855{
726a989a
RB
2856 if (!flag_exceptions)
2857 return false;
2858
2859 /* The only statements that can throw an exception are assignments,
1d65f45c
RH
2860 conditionals, calls, resx, and asms. */
2861 switch (gimple_code (stmt))
2862 {
2863 case GIMPLE_RESX:
2864 return true;
726a989a 2865
1d65f45c 2866 case GIMPLE_CALL:
538dd0b7 2867 return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
726a989a 2868
1d65f45c 2869 case GIMPLE_COND:
12c4f7dc 2870 {
36bbc05d 2871 if (fun && !fun->can_throw_non_call_exceptions)
12c4f7dc
RB
2872 return false;
2873 gcond *cond = as_a <gcond *> (stmt);
2874 tree lhs = gimple_cond_lhs (cond);
2875 return operation_could_trap_p (gimple_cond_code (cond),
2876 FLOAT_TYPE_P (TREE_TYPE (lhs)),
2877 false, NULL_TREE);
2878 }
2879
2880 case GIMPLE_ASSIGN:
36bbc05d 2881 if ((fun && !fun->can_throw_non_call_exceptions)
12c4f7dc 2882 || gimple_clobber_p (stmt))
1d65f45c 2883 return false;
12c4f7dc 2884 return stmt_could_throw_1_p (as_a <gassign *> (stmt));
726a989a 2885
1d65f45c 2886 case GIMPLE_ASM:
36bbc05d 2887 if (fun && !fun->can_throw_non_call_exceptions)
1d65f45c 2888 return false;
538dd0b7 2889 return gimple_asm_volatile_p (as_a <gasm *> (stmt));
1d65f45c
RH
2890
2891 default:
2892 return false;
2893 }
6de9cd9a
DN
2894}
2895
726a989a
RB
2896
2897/* Return true if expression T could throw an exception. */
2898
6de9cd9a
DN
2899bool
2900tree_could_throw_p (tree t)
2901{
2902 if (!flag_exceptions)
2903 return false;
726a989a 2904 if (TREE_CODE (t) == MODIFY_EXPR)
6de9cd9a 2905 {
8f4f502f 2906 if (cfun->can_throw_non_call_exceptions
1d65f45c
RH
2907 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2908 return true;
726a989a 2909 t = TREE_OPERAND (t, 1);
6de9cd9a
DN
2910 }
2911
d25cee4d
RH
2912 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2913 t = TREE_OPERAND (t, 0);
6de9cd9a
DN
2914 if (TREE_CODE (t) == CALL_EXPR)
2915 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
8f4f502f 2916 if (cfun->can_throw_non_call_exceptions)
67c605a5 2917 return tree_could_trap_p (t);
6de9cd9a
DN
2918 return false;
2919}
2920
36bbc05d
MJ
2921/* Return true if STMT can throw an exception that is not caught within its
2922 function FUN. FUN can be NULL but the function is extra conservative
2923 then. */
33977f81
JH
2924
2925bool
36bbc05d 2926stmt_can_throw_external (function *fun, gimple *stmt)
33977f81 2927{
1d65f45c 2928 int lp_nr;
33977f81 2929
36bbc05d 2930 if (!stmt_could_throw_p (fun, stmt))
33977f81 2931 return false;
36bbc05d
MJ
2932 if (!fun)
2933 return true;
33977f81 2934
36bbc05d 2935 lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
1d65f45c 2936 return lp_nr == 0;
33977f81 2937}
726a989a 2938
36bbc05d
MJ
2939/* Return true if STMT can throw an exception that is caught within its
2940 function FUN. */
726a989a 2941
6de9cd9a 2942bool
36bbc05d 2943stmt_can_throw_internal (function *fun, gimple *stmt)
6de9cd9a 2944{
1d65f45c 2945 int lp_nr;
726a989a 2946
36bbc05d
MJ
2947 gcc_checking_assert (fun);
2948 if (!stmt_could_throw_p (fun, stmt))
6de9cd9a 2949 return false;
726a989a 2950
36bbc05d 2951 lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
1d65f45c
RH
2952 return lp_nr > 0;
2953}
2954
2955/* Given a statement STMT in IFUN, if STMT can no longer throw, then
2956 remove any entry it might have from the EH table. Return true if
2957 any change was made. */
2958
2959bool
355fe088 2960maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
1d65f45c 2961{
36bbc05d 2962 if (stmt_could_throw_p (ifun, stmt))
1d65f45c
RH
2963 return false;
2964 return remove_stmt_from_eh_lp_fn (ifun, stmt);
6de9cd9a
DN
2965}
2966
1d65f45c
RH
2967/* Likewise, but always use the current function. */
2968
2969bool
355fe088 2970maybe_clean_eh_stmt (gimple *stmt)
1d65f45c
RH
2971{
2972 return maybe_clean_eh_stmt_fn (cfun, stmt);
2973}
6de9cd9a 2974
af47810a
RH
2975/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2976 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2977 in the table if it should be in there. Return TRUE if a replacement was
2978 done that my require an EH edge purge. */
2979
1d65f45c 2980bool
355fe088 2981maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt)
1eaba2f2 2982{
1d65f45c 2983 int lp_nr = lookup_stmt_eh_lp (old_stmt);
af47810a 2984
1d65f45c 2985 if (lp_nr != 0)
af47810a 2986 {
36bbc05d 2987 bool new_stmt_could_throw = stmt_could_throw_p (cfun, new_stmt);
af47810a
RH
2988
2989 if (new_stmt == old_stmt && new_stmt_could_throw)
2990 return false;
2991
1d65f45c 2992 remove_stmt_from_eh_lp (old_stmt);
af47810a
RH
2993 if (new_stmt_could_throw)
2994 {
1d65f45c 2995 add_stmt_to_eh_lp (new_stmt, lp_nr);
af47810a
RH
2996 return false;
2997 }
2998 else
2999 return true;
3000 }
3001
1eaba2f2
RH
3002 return false;
3003}
1d65f45c 3004
073a8998 3005/* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
1d65f45c
RH
3006 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
3007 operand is the return value of duplicate_eh_regions. */
3008
3009bool
355fe088
TS
3010maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt,
3011 struct function *old_fun, gimple *old_stmt,
b787e7a2
TS
3012 hash_map<void *, void *> *map,
3013 int default_lp_nr)
1d65f45c
RH
3014{
3015 int old_lp_nr, new_lp_nr;
1d65f45c 3016
36bbc05d 3017 if (!stmt_could_throw_p (new_fun, new_stmt))
1d65f45c
RH
3018 return false;
3019
3020 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
3021 if (old_lp_nr == 0)
3022 {
3023 if (default_lp_nr == 0)
3024 return false;
3025 new_lp_nr = default_lp_nr;
3026 }
3027 else if (old_lp_nr > 0)
3028 {
3029 eh_landing_pad old_lp, new_lp;
3030
9771b263 3031 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
b787e7a2 3032 new_lp = static_cast<eh_landing_pad> (*map->get (old_lp));
1d65f45c
RH
3033 new_lp_nr = new_lp->index;
3034 }
3035 else
3036 {
3037 eh_region old_r, new_r;
3038
9771b263 3039 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
b787e7a2 3040 new_r = static_cast<eh_region> (*map->get (old_r));
1d65f45c
RH
3041 new_lp_nr = -new_r->index;
3042 }
3043
3044 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
3045 return true;
3046}
3047
3048/* Similar, but both OLD_STMT and NEW_STMT are within the current function,
3049 and thus no remapping is required. */
3050
3051bool
355fe088 3052maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt)
1d65f45c
RH
3053{
3054 int lp_nr;
3055
36bbc05d 3056 if (!stmt_could_throw_p (cfun, new_stmt))
1d65f45c
RH
3057 return false;
3058
3059 lp_nr = lookup_stmt_eh_lp (old_stmt);
3060 if (lp_nr == 0)
3061 return false;
3062
3063 add_stmt_to_eh_lp (new_stmt, lp_nr);
3064 return true;
3065}
a24549d4 3066\f
726a989a
RB
3067/* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
3068 GIMPLE_TRY) that are similar enough to be considered the same. Currently
3069 this only handles handlers consisting of a single call, as that's the
3070 important case for C++: a destructor call for a particular object showing
3071 up in multiple handlers. */
a24549d4
JM
3072
3073static bool
726a989a 3074same_handler_p (gimple_seq oneh, gimple_seq twoh)
a24549d4 3075{
726a989a 3076 gimple_stmt_iterator gsi;
355fe088 3077 gimple *ones, *twos;
726a989a 3078 unsigned int ai;
a24549d4 3079
726a989a
RB
3080 gsi = gsi_start (oneh);
3081 if (!gsi_one_before_end_p (gsi))
a24549d4 3082 return false;
726a989a 3083 ones = gsi_stmt (gsi);
a24549d4 3084
726a989a
RB
3085 gsi = gsi_start (twoh);
3086 if (!gsi_one_before_end_p (gsi))
a24549d4 3087 return false;
726a989a
RB
3088 twos = gsi_stmt (gsi);
3089
3090 if (!is_gimple_call (ones)
3091 || !is_gimple_call (twos)
3092 || gimple_call_lhs (ones)
3093 || gimple_call_lhs (twos)
3094 || gimple_call_chain (ones)
3095 || gimple_call_chain (twos)
25583c4f 3096 || !gimple_call_same_target_p (ones, twos)
726a989a 3097 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
a24549d4
JM
3098 return false;
3099
726a989a
RB
3100 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3101 if (!operand_equal_p (gimple_call_arg (ones, ai),
1d65f45c 3102 gimple_call_arg (twos, ai), 0))
a24549d4
JM
3103 return false;
3104
3105 return true;
3106}
3107
3108/* Optimize
3109 try { A() } finally { try { ~B() } catch { ~A() } }
3110 try { ... } finally { ~A() }
3111 into
3112 try { A() } catch { ~B() }
3113 try { ~B() ... } finally { ~A() }
3114
3115 This occurs frequently in C++, where A is a local variable and B is a
3116 temporary used in the initializer for A. */
3117
3118static void
538dd0b7 3119optimize_double_finally (gtry *one, gtry *two)
a24549d4 3120{
355fe088 3121 gimple *oneh;
726a989a 3122 gimple_stmt_iterator gsi;
355a7673 3123 gimple_seq cleanup;
a24549d4 3124
355a7673
MM
3125 cleanup = gimple_try_cleanup (one);
3126 gsi = gsi_start (cleanup);
726a989a 3127 if (!gsi_one_before_end_p (gsi))
a24549d4
JM
3128 return;
3129
726a989a
RB
3130 oneh = gsi_stmt (gsi);
3131 if (gimple_code (oneh) != GIMPLE_TRY
3132 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
a24549d4
JM
3133 return;
3134
726a989a 3135 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
a24549d4 3136 {
726a989a 3137 gimple_seq seq = gimple_try_eval (oneh);
a24549d4 3138
726a989a
RB
3139 gimple_try_set_cleanup (one, seq);
3140 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3141 seq = copy_gimple_seq_and_replace_locals (seq);
3142 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3143 gimple_try_set_eval (two, seq);
a24549d4
JM
3144 }
3145}
3146
3147/* Perform EH refactoring optimizations that are simpler to do when code
84fbffb2 3148 flow has been lowered but EH structures haven't. */
a24549d4
JM
3149
3150static void
726a989a 3151refactor_eh_r (gimple_seq seq)
a24549d4 3152{
726a989a 3153 gimple_stmt_iterator gsi;
355fe088 3154 gimple *one, *two;
a24549d4 3155
726a989a
RB
3156 one = NULL;
3157 two = NULL;
3158 gsi = gsi_start (seq);
3159 while (1)
3160 {
3161 one = two;
3162 if (gsi_end_p (gsi))
3163 two = NULL;
3164 else
3165 two = gsi_stmt (gsi);
538dd0b7
DM
3166 if (one && two)
3167 if (gtry *try_one = dyn_cast <gtry *> (one))
3168 if (gtry *try_two = dyn_cast <gtry *> (two))
3169 if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
3170 && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
3171 optimize_double_finally (try_one, try_two);
726a989a
RB
3172 if (one)
3173 switch (gimple_code (one))
a24549d4 3174 {
726a989a
RB
3175 case GIMPLE_TRY:
3176 refactor_eh_r (gimple_try_eval (one));
3177 refactor_eh_r (gimple_try_cleanup (one));
3178 break;
3179 case GIMPLE_CATCH:
538dd0b7 3180 refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
726a989a
RB
3181 break;
3182 case GIMPLE_EH_FILTER:
3183 refactor_eh_r (gimple_eh_filter_failure (one));
3184 break;
0a35513e 3185 case GIMPLE_EH_ELSE:
538dd0b7
DM
3186 {
3187 geh_else *eh_else_stmt = as_a <geh_else *> (one);
3188 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
3189 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
3190 }
0a35513e 3191 break;
726a989a
RB
3192 default:
3193 break;
a24549d4 3194 }
726a989a
RB
3195 if (two)
3196 gsi_next (&gsi);
3197 else
3198 break;
a24549d4
JM
3199 }
3200}
3201
17795822
TS
3202namespace {
3203
3204const pass_data pass_data_refactor_eh =
a24549d4 3205{
27a4cd48
DM
3206 GIMPLE_PASS, /* type */
3207 "ehopt", /* name */
3208 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3209 TV_TREE_EH, /* tv_id */
3210 PROP_gimple_lcf, /* properties_required */
3211 0, /* properties_provided */
3212 0, /* properties_destroyed */
3213 0, /* todo_flags_start */
3214 0, /* todo_flags_finish */
a24549d4 3215};
27a4cd48 3216
17795822 3217class pass_refactor_eh : public gimple_opt_pass
27a4cd48
DM
3218{
3219public:
c3284718
RS
3220 pass_refactor_eh (gcc::context *ctxt)
3221 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
27a4cd48
DM
3222 {}
3223
3224 /* opt_pass methods: */
1a3d085c 3225 virtual bool gate (function *) { return flag_exceptions != 0; }
be55bfe6
TS
3226 virtual unsigned int execute (function *)
3227 {
3228 refactor_eh_r (gimple_body (current_function_decl));
3229 return 0;
3230 }
27a4cd48
DM
3231
3232}; // class pass_refactor_eh
3233
17795822
TS
3234} // anon namespace
3235
27a4cd48
DM
3236gimple_opt_pass *
3237make_pass_refactor_eh (gcc::context *ctxt)
3238{
3239 return new pass_refactor_eh (ctxt);
3240}
1d65f45c
RH
3241\f
3242/* At the end of gimple optimization, we can lower RESX. */
a8da523f 3243
1d65f45c 3244static bool
538dd0b7
DM
3245lower_resx (basic_block bb, gresx *stmt,
3246 hash_map<eh_region, tree> *mnt_map)
a8da523f 3247{
1d65f45c
RH
3248 int lp_nr;
3249 eh_region src_r, dst_r;
3250 gimple_stmt_iterator gsi;
355fe088 3251 gimple *x;
1d65f45c
RH
3252 tree fn, src_nr;
3253 bool ret = false;
a8da523f 3254
1d65f45c
RH
3255 lp_nr = lookup_stmt_eh_lp (stmt);
3256 if (lp_nr != 0)
3257 dst_r = get_eh_region_from_lp_number (lp_nr);
3258 else
3259 dst_r = NULL;
a8da523f 3260
1d65f45c 3261 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
1d65f45c 3262 gsi = gsi_last_bb (bb);
a8da523f 3263
072c87d1
RH
3264 if (src_r == NULL)
3265 {
3266 /* We can wind up with no source region when pass_cleanup_eh shows
3267 that there are no entries into an eh region and deletes it, but
3268 then the block that contains the resx isn't removed. This can
3269 happen without optimization when the switch statement created by
3270 lower_try_finally_switch isn't simplified to remove the eh case.
3271
3272 Resolve this by expanding the resx node to an abort. */
3273
e79983f4 3274 fn = builtin_decl_implicit (BUILT_IN_TRAP);
072c87d1
RH
3275 x = gimple_build_call (fn, 0);
3276 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3277
3278 while (EDGE_COUNT (bb->succs) > 0)
3279 remove_edge (EDGE_SUCC (bb, 0));
3280 }
3281 else if (dst_r)
1d65f45c
RH
3282 {
3283 /* When we have a destination region, we resolve this by copying
3284 the excptr and filter values into place, and changing the edge
3285 to immediately after the landing pad. */
3286 edge e;
a8da523f 3287
1d65f45c
RH
3288 if (lp_nr < 0)
3289 {
3290 basic_block new_bb;
1d65f45c 3291 tree lab;
496a4ef5 3292
1d65f45c
RH
3293 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3294 the failure decl into a new block, if needed. */
3295 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
a8da523f 3296
b787e7a2 3297 tree *slot = mnt_map->get (dst_r);
1d65f45c
RH
3298 if (slot == NULL)
3299 {
3300 gimple_stmt_iterator gsi2;
a8da523f 3301
1d65f45c 3302 new_bb = create_empty_bb (bb);
e7a74006 3303 new_bb->count = bb->count;
726338f4 3304 add_bb_to_loop (new_bb, bb->loop_father);
1d65f45c
RH
3305 lab = gimple_block_label (new_bb);
3306 gsi2 = gsi_start_bb (new_bb);
a8da523f 3307
1d65f45c
RH
3308 fn = dst_r->u.must_not_throw.failure_decl;
3309 x = gimple_build_call (fn, 0);
3310 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3311 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
4e6d1743 3312
b787e7a2 3313 mnt_map->put (dst_r, lab);
1d65f45c
RH
3314 }
3315 else
3316 {
b787e7a2 3317 lab = *slot;
61ff5d6f 3318 new_bb = label_to_block (cfun, lab);
1d65f45c 3319 }
a8da523f 3320
1d65f45c 3321 gcc_assert (EDGE_COUNT (bb->succs) == 0);
357067f2 3322 e = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
1d65f45c
RH
3323 }
3324 else
3325 {
3326 edge_iterator ei;
413581ba 3327 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
a8da523f 3328
e79983f4 3329 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
413581ba 3330 src_nr = build_int_cst (integer_type_node, src_r->index);
1d65f45c
RH
3331 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3332 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
a8da523f 3333
1d65f45c
RH
3334 /* Update the flags for the outgoing edge. */
3335 e = single_succ_edge (bb);
3336 gcc_assert (e->flags & EDGE_EH);
3337 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
357067f2 3338 e->probability = profile_probability::always ();
a8da523f 3339
1d65f45c
RH
3340 /* If there are no more EH users of the landing pad, delete it. */
3341 FOR_EACH_EDGE (e, ei, e->dest->preds)
3342 if (e->flags & EDGE_EH)
3343 break;
3344 if (e == NULL)
3345 {
3346 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3347 remove_eh_landing_pad (lp);
3348 }
3349 }
a8da523f 3350
1d65f45c
RH
3351 ret = true;
3352 }
3353 else
3354 {
3355 tree var;
a8da523f 3356
1d65f45c
RH
3357 /* When we don't have a destination region, this exception escapes
3358 up the call chain. We resolve this by generating a call to the
3359 _Unwind_Resume library function. */
a8da523f 3360
384c400a 3361 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
ed5cd5bc 3362 with no arguments for C++. Check for that. */
384c400a
RH
3363 if (src_r->use_cxa_end_cleanup)
3364 {
e79983f4 3365 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
384c400a
RH
3366 x = gimple_build_call (fn, 0);
3367 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3368 }
3369 else
4e6d1743 3370 {
e79983f4 3371 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
413581ba 3372 src_nr = build_int_cst (integer_type_node, src_r->index);
1d65f45c 3373 x = gimple_build_call (fn, 1, src_nr);
b731b390 3374 var = create_tmp_var (ptr_type_node);
1d65f45c
RH
3375 var = make_ssa_name (var, x);
3376 gimple_call_set_lhs (x, var);
3377 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3378
c497284c
ML
3379 /* When exception handling is delegated to a caller function, we
3380 have to guarantee that shadow memory variables living on stack
3381 will be cleaner before control is given to a parent function. */
3382 if (sanitize_flags_p (SANITIZE_ADDRESS))
3383 {
3384 tree decl
3385 = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3386 gimple *g = gimple_build_call (decl, 0);
3387 gimple_set_location (g, gimple_location (stmt));
3388 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3389 }
3390
e79983f4 3391 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
1d65f45c
RH
3392 x = gimple_build_call (fn, 1, var);
3393 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
4e6d1743 3394 }
a8da523f 3395
1d65f45c 3396 gcc_assert (EDGE_COUNT (bb->succs) == 0);
4e6d1743 3397 }
496a4ef5 3398
1d65f45c
RH
3399 gsi_remove (&gsi, true);
3400
3401 return ret;
4e6d1743
JH
3402}
3403
17795822
TS
3404namespace {
3405
3406const pass_data pass_data_lower_resx =
4e6d1743 3407{
27a4cd48
DM
3408 GIMPLE_PASS, /* type */
3409 "resx", /* name */
3410 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
3411 TV_TREE_EH, /* tv_id */
3412 PROP_gimple_lcf, /* properties_required */
3413 0, /* properties_provided */
3414 0, /* properties_destroyed */
3415 0, /* todo_flags_start */
3bea341f 3416 0, /* todo_flags_finish */
4e6d1743
JH
3417};
3418
17795822 3419class pass_lower_resx : public gimple_opt_pass
27a4cd48
DM
3420{
3421public:
c3284718
RS
3422 pass_lower_resx (gcc::context *ctxt)
3423 : gimple_opt_pass (pass_data_lower_resx, ctxt)
27a4cd48
DM
3424 {}
3425
3426 /* opt_pass methods: */
1a3d085c 3427 virtual bool gate (function *) { return flag_exceptions != 0; }
be55bfe6 3428 virtual unsigned int execute (function *);
27a4cd48
DM
3429
3430}; // class pass_lower_resx
3431
be55bfe6
TS
3432unsigned
3433pass_lower_resx::execute (function *fun)
3434{
3435 basic_block bb;
be55bfe6
TS
3436 bool dominance_invalidated = false;
3437 bool any_rewritten = false;
3438
b787e7a2 3439 hash_map<eh_region, tree> mnt_map;
be55bfe6
TS
3440
3441 FOR_EACH_BB_FN (bb, fun)
3442 {
355fe088 3443 gimple *last = last_stmt (bb);
be55bfe6
TS
3444 if (last && is_gimple_resx (last))
3445 {
538dd0b7
DM
3446 dominance_invalidated |=
3447 lower_resx (bb, as_a <gresx *> (last), &mnt_map);
be55bfe6
TS
3448 any_rewritten = true;
3449 }
3450 }
3451
be55bfe6
TS
3452 if (dominance_invalidated)
3453 {
3454 free_dominance_info (CDI_DOMINATORS);
3455 free_dominance_info (CDI_POST_DOMINATORS);
3456 }
3457
3458 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3459}
3460
17795822
TS
3461} // anon namespace
3462
27a4cd48
DM
3463gimple_opt_pass *
3464make_pass_lower_resx (gcc::context *ctxt)
3465{
3466 return new pass_lower_resx (ctxt);
3467}
3468
960f0c9d
JJ
3469/* Try to optimize var = {v} {CLOBBER} stmts followed just by
3470 external throw. */
3471
3472static void
3473optimize_clobbers (basic_block bb)
3474{
3475 gimple_stmt_iterator gsi = gsi_last_bb (bb);
f223bb13
JJ
3476 bool any_clobbers = false;
3477 bool seen_stack_restore = false;
3478 edge_iterator ei;
3479 edge e;
3480
3481 /* Only optimize anything if the bb contains at least one clobber,
3482 ends with resx (checked by caller), optionally contains some
3483 debug stmts or labels, or at most one __builtin_stack_restore
3484 call, and has an incoming EH edge. */
6d1c2bd3 3485 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
960f0c9d 3486 {
355fe088 3487 gimple *stmt = gsi_stmt (gsi);
960f0c9d 3488 if (is_gimple_debug (stmt))
6d1c2bd3 3489 continue;
f223bb13
JJ
3490 if (gimple_clobber_p (stmt))
3491 {
3492 any_clobbers = true;
3493 continue;
3494 }
3495 if (!seen_stack_restore
3496 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3497 {
3498 seen_stack_restore = true;
3499 continue;
3500 }
3501 if (gimple_code (stmt) == GIMPLE_LABEL)
3502 break;
3503 return;
3504 }
3505 if (!any_clobbers)
3506 return;
3507 FOR_EACH_EDGE (e, ei, bb->preds)
3508 if (e->flags & EDGE_EH)
3509 break;
3510 if (e == NULL)
3511 return;
3512 gsi = gsi_last_bb (bb);
3513 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3514 {
355fe088 3515 gimple *stmt = gsi_stmt (gsi);
f223bb13
JJ
3516 if (!gimple_clobber_p (stmt))
3517 continue;
960f0c9d
JJ
3518 unlink_stmt_vdef (stmt);
3519 gsi_remove (&gsi, true);
3520 release_defs (stmt);
3521 }
3522}
1d65f45c 3523
ea85edfe
JJ
3524/* Try to sink var = {v} {CLOBBER} stmts followed just by
3525 internal throw to successor BB. */
3526
3527static int
3528sink_clobbers (basic_block bb)
3529{
3530 edge e;
3531 edge_iterator ei;
3532 gimple_stmt_iterator gsi, dgsi;
3533 basic_block succbb;
3534 bool any_clobbers = false;
df35498a 3535 unsigned todo = 0;
ea85edfe
JJ
3536
3537 /* Only optimize if BB has a single EH successor and
3538 all predecessor edges are EH too. */
3539 if (!single_succ_p (bb)
3540 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3541 return 0;
3542
3543 FOR_EACH_EDGE (e, ei, bb->preds)
3544 {
3545 if ((e->flags & EDGE_EH) == 0)
3546 return 0;
3547 }
3548
3549 /* And BB contains only CLOBBER stmts before the final
3550 RESX. */
3551 gsi = gsi_last_bb (bb);
3552 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3553 {
355fe088 3554 gimple *stmt = gsi_stmt (gsi);
ea85edfe
JJ
3555 if (is_gimple_debug (stmt))
3556 continue;
3557 if (gimple_code (stmt) == GIMPLE_LABEL)
3558 break;
f223bb13 3559 if (!gimple_clobber_p (stmt))
ea85edfe
JJ
3560 return 0;
3561 any_clobbers = true;
3562 }
3563 if (!any_clobbers)
3564 return 0;
3565
4c1aff1c
RB
3566 edge succe = single_succ_edge (bb);
3567 succbb = succe->dest;
3568
3569 /* See if there is a virtual PHI node to take an updated virtual
3570 operand from. */
538dd0b7 3571 gphi *vphi = NULL;
4c1aff1c 3572 tree vuse = NULL_TREE;
538dd0b7
DM
3573 for (gphi_iterator gpi = gsi_start_phis (succbb);
3574 !gsi_end_p (gpi); gsi_next (&gpi))
4c1aff1c 3575 {
538dd0b7 3576 tree res = gimple_phi_result (gpi.phi ());
4c1aff1c
RB
3577 if (virtual_operand_p (res))
3578 {
538dd0b7 3579 vphi = gpi.phi ();
4c1aff1c
RB
3580 vuse = res;
3581 break;
3582 }
3583 }
3584
ea85edfe
JJ
3585 dgsi = gsi_after_labels (succbb);
3586 gsi = gsi_last_bb (bb);
3587 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3588 {
355fe088 3589 gimple *stmt = gsi_stmt (gsi);
f223bb13 3590 tree lhs;
ea85edfe
JJ
3591 if (is_gimple_debug (stmt))
3592 continue;
3593 if (gimple_code (stmt) == GIMPLE_LABEL)
3594 break;
f223bb13
JJ
3595 lhs = gimple_assign_lhs (stmt);
3596 /* Unfortunately we don't have dominance info updated at this
3597 point, so checking if
3598 dominated_by_p (CDI_DOMINATORS, succbb,
3599 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3600 would be too costly. Thus, avoid sinking any clobbers that
3601 refer to non-(D) SSA_NAMEs. */
3602 if (TREE_CODE (lhs) == MEM_REF
3603 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3604 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3605 {
4c1aff1c 3606 unlink_stmt_vdef (stmt);
f223bb13
JJ
3607 gsi_remove (&gsi, true);
3608 release_defs (stmt);
3609 continue;
3610 }
4c1aff1c
RB
3611
3612 /* As we do not change stmt order when sinking across a
3613 forwarder edge we can keep virtual operands in place. */
ea85edfe 3614 gsi_remove (&gsi, false);
4c1aff1c
RB
3615 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3616
3617 /* But adjust virtual operands if we sunk across a PHI node. */
3618 if (vuse)
3619 {
355fe088 3620 gimple *use_stmt;
4c1aff1c
RB
3621 imm_use_iterator iter;
3622 use_operand_p use_p;
3623 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3624 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3625 SET_USE (use_p, gimple_vdef (stmt));
0a1a83cb
RB
3626 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3627 {
3628 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3629 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3630 }
4c1aff1c
RB
3631 /* Adjust the incoming virtual operand. */
3632 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3633 SET_USE (gimple_vuse_op (stmt), vuse);
3634 }
df35498a
RB
3635 /* If there isn't a single predecessor but no virtual PHI node
3636 arrange for virtual operands to be renamed. */
3637 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3638 && !single_pred_p (succbb))
3639 {
3640 /* In this case there will be no use of the VDEF of this stmt.
3641 ??? Unless this is a secondary opportunity and we have not
3642 removed unreachable blocks yet, so we cannot assert this.
3643 Which also means we will end up renaming too many times. */
3644 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3645 mark_virtual_operands_for_renaming (cfun);
3646 todo |= TODO_update_ssa_only_virtuals;
3647 }
ea85edfe
JJ
3648 }
3649
df35498a 3650 return todo;
ea85edfe
JJ
3651}
3652
9f698956
AB
3653/* At the end of inlining, we can lower EH_DISPATCH. Return true when
3654 we have found some duplicate labels and removed some edges. */
4e6d1743 3655
9f698956 3656static bool
538dd0b7 3657lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
4e6d1743 3658{
1d65f45c
RH
3659 gimple_stmt_iterator gsi;
3660 int region_nr;
3661 eh_region r;
3662 tree filter, fn;
355fe088 3663 gimple *x;
9f698956 3664 bool redirected = false;
4e6d1743 3665
1d65f45c
RH
3666 region_nr = gimple_eh_dispatch_region (stmt);
3667 r = get_eh_region_from_number (region_nr);
4e6d1743 3668
1d65f45c 3669 gsi = gsi_last_bb (src);
4e6d1743 3670
1d65f45c 3671 switch (r->type)
4e6d1743 3672 {
1d65f45c
RH
3673 case ERT_TRY:
3674 {
ef062b13 3675 auto_vec<tree> labels;
1d65f45c
RH
3676 tree default_label = NULL;
3677 eh_catch c;
3678 edge_iterator ei;
3679 edge e;
6e2830c3 3680 hash_set<tree> seen_values;
1d65f45c
RH
3681
3682 /* Collect the labels for a switch. Zero the post_landing_pad
3683 field becase we'll no longer have anything keeping these labels
073a8998 3684 in existence and the optimizer will be free to merge these
1d65f45c
RH
3685 blocks at will. */
3686 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3687 {
3688 tree tp_node, flt_node, lab = c->label;
9f698956 3689 bool have_label = false;
4e6d1743 3690
1d65f45c
RH
3691 c->label = NULL;
3692 tp_node = c->type_list;
3693 flt_node = c->filter_list;
3694
3695 if (tp_node == NULL)
3696 {
3697 default_label = lab;
3698 break;
3699 }
3700 do
3701 {
9f698956
AB
3702 /* Filter out duplicate labels that arise when this handler
3703 is shadowed by an earlier one. When no labels are
3704 attached to the handler anymore, we remove
3705 the corresponding edge and then we delete unreachable
3706 blocks at the end of this pass. */
6e2830c3 3707 if (! seen_values.contains (TREE_VALUE (flt_node)))
9f698956 3708 {
3d528853
NF
3709 tree t = build_case_label (TREE_VALUE (flt_node),
3710 NULL, lab);
9771b263 3711 labels.safe_push (t);
6e2830c3 3712 seen_values.add (TREE_VALUE (flt_node));
9f698956
AB
3713 have_label = true;
3714 }
1d65f45c
RH
3715
3716 tp_node = TREE_CHAIN (tp_node);
3717 flt_node = TREE_CHAIN (flt_node);
3718 }
3719 while (tp_node);
9f698956
AB
3720 if (! have_label)
3721 {
61ff5d6f 3722 remove_edge (find_edge (src, label_to_block (cfun, lab)));
9f698956
AB
3723 redirected = true;
3724 }
1d65f45c
RH
3725 }
3726
3727 /* Clean up the edge flags. */
3728 FOR_EACH_EDGE (e, ei, src->succs)
3729 {
3730 if (e->flags & EDGE_FALLTHRU)
3731 {
3732 /* If there was no catch-all, use the fallthru edge. */
3733 if (default_label == NULL)
3734 default_label = gimple_block_label (e->dest);
3735 e->flags &= ~EDGE_FALLTHRU;
3736 }
3737 }
3738 gcc_assert (default_label != NULL);
3739
3740 /* Don't generate a switch if there's only a default case.
3741 This is common in the form of try { A; } catch (...) { B; }. */
9771b263 3742 if (!labels.exists ())
1d65f45c
RH
3743 {
3744 e = single_succ_edge (src);
3745 e->flags |= EDGE_FALLTHRU;
3746 }
3747 else
3748 {
e79983f4 3749 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
413581ba
RG
3750 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3751 region_nr));
b731b390 3752 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
1d65f45c
RH
3753 filter = make_ssa_name (filter, x);
3754 gimple_call_set_lhs (x, filter);
3755 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3756
3757 /* Turn the default label into a default case. */
3d528853 3758 default_label = build_case_label (NULL, NULL, default_label);
1d65f45c
RH
3759 sort_case_labels (labels);
3760
fd8d363e 3761 x = gimple_build_switch (filter, default_label, labels);
1d65f45c 3762 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
1d65f45c
RH
3763 }
3764 }
3765 break;
3766
3767 case ERT_ALLOWED_EXCEPTIONS:
3768 {
3769 edge b_e = BRANCH_EDGE (src);
3770 edge f_e = FALLTHRU_EDGE (src);
3771
e79983f4 3772 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
413581ba
RG
3773 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3774 region_nr));
b731b390 3775 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
1d65f45c
RH
3776 filter = make_ssa_name (filter, x);
3777 gimple_call_set_lhs (x, filter);
3778 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3779
3780 r->u.allowed.label = NULL;
3781 x = gimple_build_cond (EQ_EXPR, filter,
3782 build_int_cst (TREE_TYPE (filter),
3783 r->u.allowed.filter),
3784 NULL_TREE, NULL_TREE);
3785 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3786
3787 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3788 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3789 }
3790 break;
3791
3792 default:
3793 gcc_unreachable ();
4e6d1743 3794 }
1d65f45c
RH
3795
3796 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3797 gsi_remove (&gsi, true);
9f698956 3798 return redirected;
4e6d1743
JH
3799}
3800
17795822
TS
3801namespace {
3802
3803const pass_data pass_data_lower_eh_dispatch =
be55bfe6
TS
3804{
3805 GIMPLE_PASS, /* type */
3806 "ehdisp", /* name */
3807 OPTGROUP_NONE, /* optinfo_flags */
be55bfe6
TS
3808 TV_TREE_EH, /* tv_id */
3809 PROP_gimple_lcf, /* properties_required */
3810 0, /* properties_provided */
3811 0, /* properties_destroyed */
3812 0, /* todo_flags_start */
3bea341f 3813 0, /* todo_flags_finish */
be55bfe6
TS
3814};
3815
17795822 3816class pass_lower_eh_dispatch : public gimple_opt_pass
be55bfe6
TS
3817{
3818public:
3819 pass_lower_eh_dispatch (gcc::context *ctxt)
3820 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3821 {}
3822
3823 /* opt_pass methods: */
3824 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3825 virtual unsigned int execute (function *);
3826
3827}; // class pass_lower_eh_dispatch
3828
3829unsigned
3830pass_lower_eh_dispatch::execute (function *fun)
1d65f45c
RH
3831{
3832 basic_block bb;
ea85edfe 3833 int flags = 0;
9f698956 3834 bool redirected = false;
4e6d1743 3835
1d65f45c 3836 assign_filter_values ();
496a4ef5 3837
be55bfe6 3838 FOR_EACH_BB_FN (bb, fun)
1d65f45c 3839 {
355fe088 3840 gimple *last = last_stmt (bb);
960f0c9d
JJ
3841 if (last == NULL)
3842 continue;
3843 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
1d65f45c 3844 {
538dd0b7
DM
3845 redirected |= lower_eh_dispatch (bb,
3846 as_a <geh_dispatch *> (last));
ea85edfe
JJ
3847 flags |= TODO_update_ssa_only_virtuals;
3848 }
3849 else if (gimple_code (last) == GIMPLE_RESX)
3850 {
36bbc05d 3851 if (stmt_can_throw_external (cfun, last))
ea85edfe
JJ
3852 optimize_clobbers (bb);
3853 else
3854 flags |= sink_clobbers (bb);
1d65f45c
RH
3855 }
3856 }
3857
9f698956 3858 if (redirected)
830717d0
RB
3859 {
3860 free_dominance_info (CDI_DOMINATORS);
3861 delete_unreachable_blocks ();
3862 }
ea85edfe 3863 return flags;
1d65f45c
RH
3864}
3865
17795822
TS
3866} // anon namespace
3867
27a4cd48
DM
3868gimple_opt_pass *
3869make_pass_lower_eh_dispatch (gcc::context *ctxt)
3870{
3871 return new pass_lower_eh_dispatch (ctxt);
3872}
1d65f45c 3873\f
d273b176
SB
3874/* Walk statements, see what regions and, optionally, landing pads
3875 are really referenced.
3876
3877 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3878 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3879
3880 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3881 regions are marked.
3882
3883 The caller is responsible for freeing the returned sbitmaps. */
1d65f45c
RH
3884
3885static void
d273b176 3886mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
1d65f45c
RH
3887{
3888 sbitmap r_reachable, lp_reachable;
1d65f45c 3889 basic_block bb;
d273b176
SB
3890 bool mark_landing_pads = (lp_reachablep != NULL);
3891 gcc_checking_assert (r_reachablep != NULL);
4e6d1743 3892
9771b263 3893 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
f61e445a 3894 bitmap_clear (r_reachable);
d273b176
SB
3895 *r_reachablep = r_reachable;
3896
3897 if (mark_landing_pads)
3898 {
3899 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3900 bitmap_clear (lp_reachable);
3901 *lp_reachablep = lp_reachable;
3902 }
3903 else
3904 lp_reachable = NULL;
4e6d1743 3905
11cd3bed 3906 FOR_EACH_BB_FN (bb, cfun)
4e6d1743 3907 {
57f93411 3908 gimple_stmt_iterator gsi;
1d65f45c
RH
3909
3910 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3911 {
355fe088 3912 gimple *stmt = gsi_stmt (gsi);
1d65f45c 3913
d273b176 3914 if (mark_landing_pads)
1d65f45c 3915 {
d273b176
SB
3916 int lp_nr = lookup_stmt_eh_lp (stmt);
3917
3918 /* Negative LP numbers are MUST_NOT_THROW regions which
3919 are not considered BB enders. */
3920 if (lp_nr < 0)
3921 bitmap_set_bit (r_reachable, -lp_nr);
3922
3923 /* Positive LP numbers are real landing pads, and BB enders. */
3924 else if (lp_nr > 0)
3925 {
3926 gcc_assert (gsi_one_before_end_p (gsi));
3927 eh_region region = get_eh_region_from_lp_number (lp_nr);
3928 bitmap_set_bit (r_reachable, region->index);
3929 bitmap_set_bit (lp_reachable, lp_nr);
3930 }
1d65f45c 3931 }
6ae70ea2
JJ
3932
3933 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3934 switch (gimple_code (stmt))
3935 {
3936 case GIMPLE_RESX:
538dd0b7
DM
3937 bitmap_set_bit (r_reachable,
3938 gimple_resx_region (as_a <gresx *> (stmt)));
6ae70ea2
JJ
3939 break;
3940 case GIMPLE_EH_DISPATCH:
538dd0b7
DM
3941 bitmap_set_bit (r_reachable,
3942 gimple_eh_dispatch_region (
3943 as_a <geh_dispatch *> (stmt)));
6ae70ea2 3944 break;
5b1441be
RH
3945 case GIMPLE_CALL:
3946 if (gimple_call_builtin_p (stmt, BUILT_IN_EH_COPY_VALUES))
3947 for (int i = 0; i < 2; ++i)
3948 {
3949 tree rt = gimple_call_arg (stmt, i);
3950 HOST_WIDE_INT ri = tree_to_shwi (rt);
3951
3754c8fe 3952 gcc_assert (ri == (int)ri);
5b1441be
RH
3953 bitmap_set_bit (r_reachable, ri);
3954 }
3955 break;
6ae70ea2
JJ
3956 default:
3957 break;
3958 }
1d65f45c 3959 }
4e6d1743 3960 }
d273b176
SB
3961}
3962
3963/* Remove unreachable handlers and unreachable landing pads. */
3964
3965static void
3966remove_unreachable_handlers (void)
3967{
3968 sbitmap r_reachable, lp_reachable;
3969 eh_region region;
3970 eh_landing_pad lp;
3971 unsigned i;
3972
3973 mark_reachable_handlers (&r_reachable, &lp_reachable);
1d65f45c
RH
3974
3975 if (dump_file)
4e6d1743 3976 {
1d65f45c
RH
3977 fprintf (dump_file, "Before removal of unreachable regions:\n");
3978 dump_eh_tree (dump_file, cfun);
3979 fprintf (dump_file, "Reachable regions: ");
f61e445a 3980 dump_bitmap_file (dump_file, r_reachable);
1d65f45c 3981 fprintf (dump_file, "Reachable landing pads: ");
f61e445a 3982 dump_bitmap_file (dump_file, lp_reachable);
4e6d1743
JH
3983 }
3984
d273b176
SB
3985 if (dump_file)
3986 {
3987 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3988 if (region && !bitmap_bit_p (r_reachable, region->index))
3989 fprintf (dump_file,
3990 "Removing unreachable region %d\n",
3991 region->index);
3992 }
3993
3994 remove_unreachable_eh_regions (r_reachable);
4e6d1743 3995
d273b176
SB
3996 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3997 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
1d65f45c
RH
3998 {
3999 if (dump_file)
d273b176
SB
4000 fprintf (dump_file,
4001 "Removing unreachable landing pad %d\n",
4002 lp->index);
1d65f45c
RH
4003 remove_eh_landing_pad (lp);
4004 }
b8698a0f 4005
1d65f45c 4006 if (dump_file)
4e6d1743 4007 {
1d65f45c
RH
4008 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
4009 dump_eh_tree (dump_file, cfun);
4010 fprintf (dump_file, "\n\n");
4e6d1743
JH
4011 }
4012
1d65f45c
RH
4013 sbitmap_free (r_reachable);
4014 sbitmap_free (lp_reachable);
4015
b2b29377
MM
4016 if (flag_checking)
4017 verify_eh_tree (cfun);
1d65f45c
RH
4018}
4019
99d8763e
JJ
4020/* Remove unreachable handlers if any landing pads have been removed after
4021 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
4022
4023void
4024maybe_remove_unreachable_handlers (void)
4025{
4026 eh_landing_pad lp;
d273b176 4027 unsigned i;
99d8763e
JJ
4028
4029 if (cfun->eh == NULL)
4030 return;
d273b176
SB
4031
4032 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
99d8763e
JJ
4033 if (lp && lp->post_landing_pad)
4034 {
61ff5d6f 4035 if (label_to_block (cfun, lp->post_landing_pad) == NULL)
99d8763e
JJ
4036 {
4037 remove_unreachable_handlers ();
4038 return;
4039 }
4040 }
4041}
4042
1d65f45c
RH
4043/* Remove regions that do not have landing pads. This assumes
4044 that remove_unreachable_handlers has already been run, and
d273b176
SB
4045 that we've just manipulated the landing pads since then.
4046
4047 Preserve regions with landing pads and regions that prevent
4048 exceptions from propagating further, even if these regions
4049 are not reachable. */
1d65f45c
RH
4050
4051static void
4052remove_unreachable_handlers_no_lp (void)
4053{
d273b176 4054 eh_region region;
1a47f99c 4055 sbitmap r_reachable;
d273b176 4056 unsigned i;
1a47f99c 4057
d273b176 4058 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
1a47f99c 4059
d273b176 4060 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
1a47f99c 4061 {
d273b176
SB
4062 if (! region)
4063 continue;
4064
4065 if (region->landing_pads != NULL
4066 || region->type == ERT_MUST_NOT_THROW)
4067 bitmap_set_bit (r_reachable, region->index);
4068
4069 if (dump_file
4070 && !bitmap_bit_p (r_reachable, region->index))
4071 fprintf (dump_file,
4072 "Removing unreachable region %d\n",
4073 region->index);
1a47f99c 4074 }
1d65f45c 4075
d273b176 4076 remove_unreachable_eh_regions (r_reachable);
1a47f99c
MM
4077
4078 sbitmap_free (r_reachable);
4e6d1743
JH
4079}
4080
1d65f45c
RH
4081/* Undo critical edge splitting on an EH landing pad. Earlier, we
4082 optimisticaly split all sorts of edges, including EH edges. The
4083 optimization passes in between may not have needed them; if not,
4084 we should undo the split.
4085
4086 Recognize this case by having one EH edge incoming to the BB and
4087 one normal edge outgoing; BB should be empty apart from the
4088 post_landing_pad label.
4089
4090 Note that this is slightly different from the empty handler case
4091 handled by cleanup_empty_eh, in that the actual handler may yet
4092 have actual code but the landing pad has been separated from the
4093 handler. As such, cleanup_empty_eh relies on this transformation
4094 having been done first. */
a8da523f
JH
4095
4096static bool
1d65f45c 4097unsplit_eh (eh_landing_pad lp)
a8da523f 4098{
61ff5d6f 4099 basic_block bb = label_to_block (cfun, lp->post_landing_pad);
1d65f45c
RH
4100 gimple_stmt_iterator gsi;
4101 edge e_in, e_out;
4102
4103 /* Quickly check the edge counts on BB for singularity. */
f223bb13 4104 if (!single_pred_p (bb) || !single_succ_p (bb))
1d65f45c 4105 return false;
f223bb13
JJ
4106 e_in = single_pred_edge (bb);
4107 e_out = single_succ_edge (bb);
a8da523f 4108
1d65f45c
RH
4109 /* Input edge must be EH and output edge must be normal. */
4110 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4111 return false;
4112
3333cd50
RG
4113 /* The block must be empty except for the labels and debug insns. */
4114 gsi = gsi_after_labels (bb);
4115 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4116 gsi_next_nondebug (&gsi);
4117 if (!gsi_end_p (gsi))
1d65f45c
RH
4118 return false;
4119
4120 /* The destination block must not already have a landing pad
4121 for a different region. */
4122 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
a8da523f 4123 {
538dd0b7 4124 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1d65f45c
RH
4125 tree lab;
4126 int lp_nr;
a8da523f 4127
538dd0b7 4128 if (!label_stmt)
1d65f45c 4129 break;
538dd0b7 4130 lab = gimple_label_label (label_stmt);
1d65f45c
RH
4131 lp_nr = EH_LANDING_PAD_NR (lab);
4132 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4133 return false;
4134 }
a8da523f 4135
f8fd49b5
RH
4136 /* The new destination block must not already be a destination of
4137 the source block, lest we merge fallthru and eh edges and get
4138 all sorts of confused. */
4139 if (find_edge (e_in->src, e_out->dest))
4140 return false;
4141
d6063d7f
RH
4142 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4143 thought this should have been cleaned up by a phicprop pass, but
4144 that doesn't appear to handle virtuals. Propagate by hand. */
4145 if (!gimple_seq_empty_p (phi_nodes (bb)))
4146 {
538dd0b7 4147 for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
d6063d7f 4148 {
355fe088 4149 gimple *use_stmt;
538dd0b7 4150 gphi *phi = gpi.phi ();
d6063d7f
RH
4151 tree lhs = gimple_phi_result (phi);
4152 tree rhs = gimple_phi_arg_def (phi, 0);
4153 use_operand_p use_p;
4154 imm_use_iterator iter;
4155
4156 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4157 {
4158 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4159 SET_USE (use_p, rhs);
4160 }
4161
4162 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4163 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4164
538dd0b7 4165 remove_phi_node (&gpi, true);
d6063d7f
RH
4166 }
4167 }
496a4ef5 4168
1d65f45c
RH
4169 if (dump_file && (dump_flags & TDF_DETAILS))
4170 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4171 lp->index, e_out->dest->index);
4172
4173 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4174 a successor edge, humor it. But do the real CFG change with the
4175 predecessor of E_OUT in order to preserve the ordering of arguments
4176 to the PHI nodes in E_OUT->DEST. */
4177 redirect_eh_edge_1 (e_in, e_out->dest, false);
4178 redirect_edge_pred (e_out, e_in->src);
4179 e_out->flags = e_in->flags;
4180 e_out->probability = e_in->probability;
1d65f45c 4181 remove_edge (e_in);
496a4ef5 4182
1d65f45c
RH
4183 return true;
4184}
496a4ef5 4185
1d65f45c 4186/* Examine each landing pad block and see if it matches unsplit_eh. */
496a4ef5 4187
1d65f45c
RH
4188static bool
4189unsplit_all_eh (void)
4190{
4191 bool changed = false;
4192 eh_landing_pad lp;
4193 int i;
496a4ef5 4194
9771b263 4195 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1d65f45c
RH
4196 if (lp)
4197 changed |= unsplit_eh (lp);
4198
4199 return changed;
4200}
4201
4202/* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4203 to OLD_BB to NEW_BB; return true on success, false on failure.
4204
4205 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4206 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4207 Virtual PHIs may be deleted and marked for renaming. */
4208
4209static bool
4210cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
d6063d7f 4211 edge old_bb_out, bool change_region)
1d65f45c 4212{
538dd0b7 4213 gphi_iterator ngsi, ogsi;
1d65f45c
RH
4214 edge_iterator ei;
4215 edge e;
1d65f45c
RH
4216 bitmap ophi_handled;
4217
336ead04
JJ
4218 /* The destination block must not be a regular successor for any
4219 of the preds of the landing pad. Thus, avoid turning
4220 <..>
4221 | \ EH
4222 | <..>
4223 | /
4224 <..>
4225 into
4226 <..>
4227 | | EH
4228 <..>
4229 which CFG verification would choke on. See PR45172 and PR51089. */
4230 FOR_EACH_EDGE (e, ei, old_bb->preds)
4231 if (find_edge (e->src, new_bb))
4232 return false;
4233
1d65f45c
RH
4234 FOR_EACH_EDGE (e, ei, old_bb->preds)
4235 redirect_edge_var_map_clear (e);
4236
4237 ophi_handled = BITMAP_ALLOC (NULL);
1d65f45c
RH
4238
4239 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4240 for the edges we're going to move. */
4241 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4242 {
538dd0b7 4243 gphi *ophi, *nphi = ngsi.phi ();
1d65f45c
RH
4244 tree nresult, nop;
4245
4246 nresult = gimple_phi_result (nphi);
4247 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4248
4249 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4250 the source ssa_name. */
4251 ophi = NULL;
4252 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4253 {
538dd0b7 4254 ophi = ogsi.phi ();
1d65f45c
RH
4255 if (gimple_phi_result (ophi) == nop)
4256 break;
4257 ophi = NULL;
a3710436 4258 }
496a4ef5 4259
1d65f45c
RH
4260 /* If we did find the corresponding PHI, copy those inputs. */
4261 if (ophi)
a8da523f 4262 {
3ffe07e1
JJ
4263 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4264 if (!has_single_use (nop))
4265 {
4266 imm_use_iterator imm_iter;
4267 use_operand_p use_p;
4268
4269 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4270 {
4271 if (!gimple_debug_bind_p (USE_STMT (use_p))
4272 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4273 || gimple_bb (USE_STMT (use_p)) != new_bb))
4274 goto fail;
4275 }
4276 }
1d65f45c
RH
4277 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4278 FOR_EACH_EDGE (e, ei, old_bb->preds)
496a4ef5 4279 {
1d65f45c
RH
4280 location_t oloc;
4281 tree oop;
4282
4283 if ((e->flags & EDGE_EH) == 0)
4284 continue;
4285 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4286 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
9e227d60 4287 redirect_edge_var_map_add (e, nresult, oop, oloc);
496a4ef5 4288 }
1d65f45c 4289 }
d90e76d4 4290 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
1d65f45c
RH
4291 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4292 variable is unchanged from input to the block and we can simply
4293 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4294 else
4295 {
4296 location_t nloc
4297 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4298 FOR_EACH_EDGE (e, ei, old_bb->preds)
9e227d60 4299 redirect_edge_var_map_add (e, nresult, nop, nloc);
1d65f45c
RH
4300 }
4301 }
4302
4303 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4304 we don't know what values from the other edges into NEW_BB to use. */
4305 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4306 {
538dd0b7 4307 gphi *ophi = ogsi.phi ();
1d65f45c
RH
4308 tree oresult = gimple_phi_result (ophi);
4309 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4310 goto fail;
4311 }
4312
1d65f45c
RH
4313 /* Finally, move the edges and update the PHIs. */
4314 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4315 if (e->flags & EDGE_EH)
4316 {
efa26eaa
RG
4317 /* ??? CFG manipluation routines do not try to update loop
4318 form on edge redirection. Do so manually here for now. */
4319 /* If we redirect a loop entry or latch edge that will either create
4320 a multiple entry loop or rotate the loop. If the loops merge
4321 we may have created a loop with multiple latches.
4322 All of this isn't easily fixed thus cancel the affected loop
4323 and mark the other loop as possibly having multiple latches. */
726338f4 4324 if (e->dest == e->dest->loop_father->header)
efa26eaa 4325 {
08c13199 4326 mark_loop_for_removal (e->dest->loop_father);
efa26eaa 4327 new_bb->loop_father->latch = NULL;
08c13199 4328 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
efa26eaa 4329 }
d6063d7f 4330 redirect_eh_edge_1 (e, new_bb, change_region);
1d65f45c
RH
4331 redirect_edge_succ (e, new_bb);
4332 flush_pending_stmts (e);
4333 }
4334 else
4335 ei_next (&ei);
4e6d1743 4336
1d65f45c 4337 BITMAP_FREE (ophi_handled);
1d65f45c
RH
4338 return true;
4339
4340 fail:
4341 FOR_EACH_EDGE (e, ei, old_bb->preds)
4342 redirect_edge_var_map_clear (e);
4343 BITMAP_FREE (ophi_handled);
1d65f45c
RH
4344 return false;
4345}
4346
4347/* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4348 old region to NEW_REGION at BB. */
4349
4350static void
4351cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4352 eh_landing_pad lp, eh_region new_region)
4353{
4354 gimple_stmt_iterator gsi;
4355 eh_landing_pad *pp;
4356
4357 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4358 continue;
4359 *pp = lp->next_lp;
4360
4361 lp->region = new_region;
4362 lp->next_lp = new_region->landing_pads;
4363 new_region->landing_pads = lp;
4364
4365 /* Delete the RESX that was matched within the empty handler block. */
4366 gsi = gsi_last_bb (bb);
3d3f2249 4367 unlink_stmt_vdef (gsi_stmt (gsi));
1d65f45c
RH
4368 gsi_remove (&gsi, true);
4369
4370 /* Clean up E_OUT for the fallthru. */
4371 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
357067f2 4372 e_out->probability = profile_probability::always ();
1d65f45c
RH
4373}
4374
4375/* A subroutine of cleanup_empty_eh. Handle more complex cases of
b8698a0f 4376 unsplitting than unsplit_eh was prepared to handle, e.g. when
1d65f45c
RH
4377 multiple incoming edges and phis are involved. */
4378
4379static bool
d6063d7f 4380cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
1d65f45c
RH
4381{
4382 gimple_stmt_iterator gsi;
1d65f45c
RH
4383 tree lab;
4384
4385 /* We really ought not have totally lost everything following
4386 a landing pad label. Given that BB is empty, there had better
4387 be a successor. */
4388 gcc_assert (e_out != NULL);
4389
d6063d7f
RH
4390 /* The destination block must not already have a landing pad
4391 for a different region. */
1d65f45c
RH
4392 lab = NULL;
4393 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4394 {
538dd0b7 4395 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
d6063d7f
RH
4396 int lp_nr;
4397
538dd0b7 4398 if (!stmt)
1d65f45c
RH
4399 break;
4400 lab = gimple_label_label (stmt);
d6063d7f
RH
4401 lp_nr = EH_LANDING_PAD_NR (lab);
4402 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4403 return false;
1d65f45c 4404 }
1d65f45c
RH
4405
4406 /* Attempt to move the PHIs into the successor block. */
d6063d7f 4407 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
1d65f45c
RH
4408 {
4409 if (dump_file && (dump_flags & TDF_DETAILS))
4410 fprintf (dump_file,
d6063d7f
RH
4411 "Unsplit EH landing pad %d to block %i "
4412 "(via cleanup_empty_eh).\n",
4413 lp->index, e_out->dest->index);
1d65f45c
RH
4414 return true;
4415 }
4416
4417 return false;
4418}
4419
afaaa67d
JJ
4420/* Return true if edge E_FIRST is part of an empty infinite loop
4421 or leads to such a loop through a series of single successor
4422 empty bbs. */
4423
4424static bool
4425infinite_empty_loop_p (edge e_first)
4426{
4427 bool inf_loop = false;
4428 edge e;
4429
4430 if (e_first->dest == e_first->src)
4431 return true;
4432
4433 e_first->src->aux = (void *) 1;
4434 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4435 {
4436 gimple_stmt_iterator gsi;
4437 if (e->dest->aux)
4438 {
4439 inf_loop = true;
4440 break;
4441 }
4442 e->dest->aux = (void *) 1;
4443 gsi = gsi_after_labels (e->dest);
4444 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4445 gsi_next_nondebug (&gsi);
4446 if (!gsi_end_p (gsi))
4447 break;
4448 }
4449 e_first->src->aux = NULL;
4450 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4451 e->dest->aux = NULL;
4452
4453 return inf_loop;
4454}
4455
1d65f45c
RH
4456/* Examine the block associated with LP to determine if it's an empty
4457 handler for its EH region. If so, attempt to redirect EH edges to
4458 an outer region. Return true the CFG was updated in any way. This
4459 is similar to jump forwarding, just across EH edges. */
4460
4461static bool
4462cleanup_empty_eh (eh_landing_pad lp)
4463{
61ff5d6f 4464 basic_block bb = label_to_block (cfun, lp->post_landing_pad);
1d65f45c 4465 gimple_stmt_iterator gsi;
355fe088 4466 gimple *resx;
1d65f45c
RH
4467 eh_region new_region;
4468 edge_iterator ei;
4469 edge e, e_out;
4470 bool has_non_eh_pred;
81bfd197 4471 bool ret = false;
1d65f45c
RH
4472 int new_lp_nr;
4473
4474 /* There can be zero or one edges out of BB. This is the quickest test. */
4475 switch (EDGE_COUNT (bb->succs))
4476 {
4477 case 0:
4478 e_out = NULL;
4479 break;
4480 case 1:
f223bb13 4481 e_out = single_succ_edge (bb);
1d65f45c
RH
4482 break;
4483 default:
4484 return false;
4485 }
81bfd197 4486
55665504
AO
4487 gsi = gsi_last_nondebug_bb (bb);
4488 resx = gsi_stmt (gsi);
81bfd197
MM
4489 if (resx && is_gimple_resx (resx))
4490 {
36bbc05d 4491 if (stmt_can_throw_external (cfun, resx))
81bfd197
MM
4492 optimize_clobbers (bb);
4493 else if (sink_clobbers (bb))
4494 ret = true;
4495 }
4496
1d65f45c
RH
4497 gsi = gsi_after_labels (bb);
4498
4499 /* Make sure to skip debug statements. */
4500 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4501 gsi_next_nondebug (&gsi);
4502
4503 /* If the block is totally empty, look for more unsplitting cases. */
4504 if (gsi_end_p (gsi))
0d228a52 4505 {
609524d2
JJ
4506 /* For the degenerate case of an infinite loop bail out.
4507 If bb has no successors and is totally empty, which can happen e.g.
4508 because of incorrect noreturn attribute, bail out too. */
4509 if (e_out == NULL
4510 || infinite_empty_loop_p (e_out))
81bfd197 4511 return ret;
0d228a52 4512
81bfd197 4513 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
0d228a52 4514 }
1d65f45c 4515
1ee0d660
EB
4516 /* The block should consist only of a single RESX statement, modulo a
4517 preceding call to __builtin_stack_restore if there is no outgoing
4518 edge, since the call can be eliminated in this case. */
1d65f45c 4519 resx = gsi_stmt (gsi);
1ee0d660
EB
4520 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4521 {
55665504 4522 gsi_next_nondebug (&gsi);
1ee0d660
EB
4523 resx = gsi_stmt (gsi);
4524 }
1d65f45c 4525 if (!is_gimple_resx (resx))
81bfd197 4526 return ret;
55665504 4527 gcc_assert (gsi_one_nondebug_before_end_p (gsi));
1d65f45c
RH
4528
4529 /* Determine if there are non-EH edges, or resx edges into the handler. */
4530 has_non_eh_pred = false;
4531 FOR_EACH_EDGE (e, ei, bb->preds)
4532 if (!(e->flags & EDGE_EH))
4533 has_non_eh_pred = true;
4534
4535 /* Find the handler that's outer of the empty handler by looking at
4536 where the RESX instruction was vectored. */
4537 new_lp_nr = lookup_stmt_eh_lp (resx);
4538 new_region = get_eh_region_from_lp_number (new_lp_nr);
4539
4540 /* If there's no destination region within the current function,
4541 redirection is trivial via removing the throwing statements from
4542 the EH region, removing the EH edges, and allowing the block
4543 to go unreachable. */
4544 if (new_region == NULL)
4545 {
4546 gcc_assert (e_out == NULL);
4547 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4548 if (e->flags & EDGE_EH)
4549 {
355fe088 4550 gimple *stmt = last_stmt (e->src);
1d65f45c
RH
4551 remove_stmt_from_eh_lp (stmt);
4552 remove_edge (e);
4553 }
4554 else
4555 ei_next (&ei);
4556 goto succeed;
4557 }
4558
4559 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4560 to handle the abort and allow the blocks to go unreachable. */
4561 if (new_region->type == ERT_MUST_NOT_THROW)
4562 {
4563 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4564 if (e->flags & EDGE_EH)
4565 {
355fe088 4566 gimple *stmt = last_stmt (e->src);
1d65f45c
RH
4567 remove_stmt_from_eh_lp (stmt);
4568 add_stmt_to_eh_lp (stmt, new_lp_nr);
4569 remove_edge (e);
4570 }
4571 else
4572 ei_next (&ei);
4573 goto succeed;
4574 }
4575
4576 /* Try to redirect the EH edges and merge the PHIs into the destination
4577 landing pad block. If the merge succeeds, we'll already have redirected
4578 all the EH edges. The handler itself will go unreachable if there were
4579 no normal edges. */
d6063d7f 4580 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
1d65f45c
RH
4581 goto succeed;
4582
4583 /* Finally, if all input edges are EH edges, then we can (potentially)
4584 reduce the number of transfers from the runtime by moving the landing
4585 pad from the original region to the new region. This is a win when
4586 we remove the last CLEANUP region along a particular exception
4587 propagation path. Since nothing changes except for the region with
4588 which the landing pad is associated, the PHI nodes do not need to be
4589 adjusted at all. */
4590 if (!has_non_eh_pred)
4591 {
4592 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4593 if (dump_file && (dump_flags & TDF_DETAILS))
4594 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4595 lp->index, new_region->index);
4596
4597 /* ??? The CFG didn't change, but we may have rendered the
4598 old EH region unreachable. Trigger a cleanup there. */
a8da523f
JH
4599 return true;
4600 }
1d65f45c 4601
81bfd197 4602 return ret;
1d65f45c
RH
4603
4604 succeed:
4605 if (dump_file && (dump_flags & TDF_DETAILS))
4606 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4607 remove_eh_landing_pad (lp);
4608 return true;
a8da523f
JH
4609}
4610
1d65f45c
RH
4611/* Do a post-order traversal of the EH region tree. Examine each
4612 post_landing_pad block and see if we can eliminate it as empty. */
4613
4614static bool
4615cleanup_all_empty_eh (void)
4616{
4617 bool changed = false;
4618 eh_landing_pad lp;
4619 int i;
4620
9771b263 4621 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1d65f45c
RH
4622 if (lp)
4623 changed |= cleanup_empty_eh (lp);
4624
4625 return changed;
4626}
a8da523f
JH
4627
4628/* Perform cleanups and lowering of exception handling
4629 1) cleanups regions with handlers doing nothing are optimized out
4630 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4631 3) Info about regions that are containing instructions, and regions
4632 reachable via local EH edges is collected
c0d18c6c 4633 4) Eh tree is pruned for regions no longer necessary.
1d65f45c
RH
4634
4635 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4636 Unify those that have the same failure decl and locus.
4637*/
a8da523f
JH
4638
4639static unsigned int
66a3e339 4640execute_cleanup_eh_1 (void)
a8da523f 4641{
1d65f45c
RH
4642 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4643 looking up unreachable landing pads. */
4644 remove_unreachable_handlers ();
a8da523f 4645
1d65f45c 4646 /* Watch out for the region tree vanishing due to all unreachable. */
25fe40b0 4647 if (cfun->eh->region_tree)
a8da523f 4648 {
1d65f45c 4649 bool changed = false;
a8da523f 4650
25fe40b0
RB
4651 if (optimize)
4652 changed |= unsplit_all_eh ();
1d65f45c
RH
4653 changed |= cleanup_all_empty_eh ();
4654
4655 if (changed)
6d07ad98
JH
4656 {
4657 free_dominance_info (CDI_DOMINATORS);
4658 free_dominance_info (CDI_POST_DOMINATORS);
a8da523f 4659
1d65f45c
RH
4660 /* We delayed all basic block deletion, as we may have performed
4661 cleanups on EH edges while non-EH edges were still present. */
4662 delete_unreachable_blocks ();
a8da523f 4663
1d65f45c
RH
4664 /* We manipulated the landing pads. Remove any region that no
4665 longer has a landing pad. */
4666 remove_unreachable_handlers_no_lp ();
4667
4668 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4669 }
a8da523f
JH
4670 }
4671
1d65f45c
RH
4672 return 0;
4673}
4674
17795822
TS
4675namespace {
4676
4677const pass_data pass_data_cleanup_eh =
27a4cd48
DM
4678{
4679 GIMPLE_PASS, /* type */
4680 "ehcleanup", /* name */
4681 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
4682 TV_TREE_EH, /* tv_id */
4683 PROP_gimple_lcf, /* properties_required */
4684 0, /* properties_provided */
4685 0, /* properties_destroyed */
4686 0, /* todo_flags_start */
3bea341f 4687 0, /* todo_flags_finish */
a8da523f 4688};
27a4cd48 4689
17795822 4690class pass_cleanup_eh : public gimple_opt_pass
27a4cd48
DM
4691{
4692public:
c3284718
RS
4693 pass_cleanup_eh (gcc::context *ctxt)
4694 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
27a4cd48
DM
4695 {}
4696
4697 /* opt_pass methods: */
65d3284b 4698 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
1a3d085c
TS
4699 virtual bool gate (function *fun)
4700 {
4701 return fun->eh != NULL && fun->eh->region_tree != NULL;
4702 }
4703
be55bfe6 4704 virtual unsigned int execute (function *);
27a4cd48
DM
4705
4706}; // class pass_cleanup_eh
4707
be55bfe6
TS
4708unsigned int
4709pass_cleanup_eh::execute (function *fun)
4710{
4711 int ret = execute_cleanup_eh_1 ();
4712
4713 /* If the function no longer needs an EH personality routine
4714 clear it. This exposes cross-language inlining opportunities
4715 and avoids references to a never defined personality routine. */
4716 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4717 && function_needs_eh_personality (fun) != eh_personality_lang)
4718 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4719
4720 return ret;
4721}
4722
17795822
TS
4723} // anon namespace
4724
27a4cd48
DM
4725gimple_opt_pass *
4726make_pass_cleanup_eh (gcc::context *ctxt)
4727{
4728 return new pass_cleanup_eh (ctxt);
4729}
1d65f45c
RH
4730\f
4731/* Verify that BB containing STMT as the last statement, has precisely the
4732 edge that make_eh_edges would create. */
4733
24e47c76 4734DEBUG_FUNCTION bool
355fe088 4735verify_eh_edges (gimple *stmt)
1d65f45c
RH
4736{
4737 basic_block bb = gimple_bb (stmt);
4738 eh_landing_pad lp = NULL;
4739 int lp_nr;
4740 edge_iterator ei;
4741 edge e, eh_edge;
4742
4743 lp_nr = lookup_stmt_eh_lp (stmt);
4744 if (lp_nr > 0)
4745 lp = get_eh_landing_pad_from_number (lp_nr);
4746
4747 eh_edge = NULL;
4748 FOR_EACH_EDGE (e, ei, bb->succs)
4749 {
4750 if (e->flags & EDGE_EH)
4751 {
4752 if (eh_edge)
4753 {
4754 error ("BB %i has multiple EH edges", bb->index);
4755 return true;
4756 }
4757 else
4758 eh_edge = e;
4759 }
4760 }
4761
4762 if (lp == NULL)
4763 {
4764 if (eh_edge)
4765 {
67914693 4766 error ("BB %i cannot throw but has an EH edge", bb->index);
1d65f45c
RH
4767 return true;
4768 }
4769 return false;
4770 }
4771
36bbc05d 4772 if (!stmt_could_throw_p (cfun, stmt))
1d65f45c
RH
4773 {
4774 error ("BB %i last statement has incorrectly set lp", bb->index);
4775 return true;
4776 }
4777
4778 if (eh_edge == NULL)
4779 {
4780 error ("BB %i is missing an EH edge", bb->index);
4781 return true;
4782 }
4783
61ff5d6f 4784 if (eh_edge->dest != label_to_block (cfun, lp->post_landing_pad))
1d65f45c
RH
4785 {
4786 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4787 return true;
4788 }
4789
4790 return false;
4791}
4792
4793/* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4794
24e47c76 4795DEBUG_FUNCTION bool
538dd0b7 4796verify_eh_dispatch_edge (geh_dispatch *stmt)
1d65f45c
RH
4797{
4798 eh_region r;
4799 eh_catch c;
4800 basic_block src, dst;
4801 bool want_fallthru = true;
4802 edge_iterator ei;
4803 edge e, fall_edge;
4804
4805 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4806 src = gimple_bb (stmt);
4807
4808 FOR_EACH_EDGE (e, ei, src->succs)
4809 gcc_assert (e->aux == NULL);
4810
4811 switch (r->type)
4812 {
4813 case ERT_TRY:
4814 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4815 {
61ff5d6f 4816 dst = label_to_block (cfun, c->label);
1d65f45c
RH
4817 e = find_edge (src, dst);
4818 if (e == NULL)
4819 {
4820 error ("BB %i is missing an edge", src->index);
4821 return true;
4822 }
4823 e->aux = (void *)e;
4824
4825 /* A catch-all handler doesn't have a fallthru. */
4826 if (c->type_list == NULL)
4827 {
4828 want_fallthru = false;
4829 break;
4830 }
4831 }
4832 break;
4833
4834 case ERT_ALLOWED_EXCEPTIONS:
61ff5d6f 4835 dst = label_to_block (cfun, r->u.allowed.label);
1d65f45c
RH
4836 e = find_edge (src, dst);
4837 if (e == NULL)
4838 {
4839 error ("BB %i is missing an edge", src->index);
4840 return true;
4841 }
4842 e->aux = (void *)e;
4843 break;
4844
4845 default:
4846 gcc_unreachable ();
4847 }
4848
4849 fall_edge = NULL;
4850 FOR_EACH_EDGE (e, ei, src->succs)
4851 {
4852 if (e->flags & EDGE_FALLTHRU)
4853 {
4854 if (fall_edge != NULL)
4855 {
4856 error ("BB %i too many fallthru edges", src->index);
4857 return true;
4858 }
4859 fall_edge = e;
4860 }
4861 else if (e->aux)
4862 e->aux = NULL;
4863 else
4864 {
4865 error ("BB %i has incorrect edge", src->index);
4866 return true;
4867 }
4868 }
4869 if ((fall_edge != NULL) ^ want_fallthru)
4870 {
4871 error ("BB %i has incorrect fallthru edge", src->index);
4872 return true;
4873 }
4874
4875 return false;
4876}