]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
Update copyright years.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
99dee823 2 Copyright (C) 1989-2021 Free Software Foundation, Inc.
4956d07c
MS
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
1322177d 5This file is part of GCC.
4956d07c 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9dcd6f09 9Software Foundation; either version 3, or (at your option) any later
1322177d 10version.
4956d07c 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
4956d07c
MS
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
4956d07c
MS
20
21
1d65f45c
RH
22/* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
ebebc928 30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, EH_ELSE_EXPR, WITH_CLEANUP_EXPR,
1d65f45c
RH
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
ebebc928
AO
33 During initial gimplification (gimplify.c) these are lowered to the
34 GIMPLE_TRY, GIMPLE_CATCH, GIMPLE_EH_ELSE, and GIMPLE_EH_FILTER
35 nodes. The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are
36 converted into GIMPLE_TRY_FINALLY nodes; the others are a more
37 direct 1-1 conversion.
1d65f45c
RH
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
3b06d379 41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
1d65f45c
RH
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
b8698a0f 49 throw does happen. We also create RESX statements that are
1d65f45c
RH
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
b8698a0f 76
1d65f45c
RH
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
dac1fbf8 82 Then, via finish_eh_generation, we generate the real landing pads
1d65f45c
RH
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
b8698a0f 94 the REG_EH_REGION notes attached to individual insns into
1d65f45c
RH
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
4956d07c
MS
110
111
112#include "config.h"
670ee920 113#include "system.h"
4977bab6 114#include "coretypes.h"
c7131fb2 115#include "backend.h"
957060b5 116#include "target.h"
4956d07c
MS
117#include "rtl.h"
118#include "tree.h"
957060b5
AM
119#include "cfghooks.h"
120#include "tree-pass.h"
4d0cdd0c 121#include "memmodel.h"
957060b5 122#include "tm_p.h"
d8a2d370 123#include "stringpool.h"
957060b5
AM
124#include "expmed.h"
125#include "optabs.h"
126#include "emit-rtl.h"
127#include "cgraph.h"
128#include "diagnostic.h"
957060b5 129#include "fold-const.h"
d8a2d370 130#include "stor-layout.h"
36566b39 131#include "explow.h"
36566b39 132#include "stmt.h"
4956d07c 133#include "expr.h"
ee516de9 134#include "calls.h"
e78d8e51 135#include "libfuncs.h"
52a11cbf 136#include "except.h"
4956d07c 137#include "output.h"
52a11cbf
RH
138#include "dwarf2asm.h"
139#include "dwarf2out.h"
677f3fa8 140#include "common/common-target.h"
f1e639b1 141#include "langhooks.h"
60393bbc 142#include "cfgrtl.h"
cf835838 143#include "tree-pretty-print.h"
7d776ee2 144#include "cfgloop.h"
9b2b7279 145#include "builtins.h"
84a98514 146#include "tree-hash-traits.h"
52a11cbf 147
21c157b4 148static GTY(()) int call_site_base;
b086d530 149
2ab71f3d
EB
150static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
151
152static GTY(()) tree setjmp_fn;
52a11cbf
RH
153
154/* Describe the SjLj_Function_Context structure. */
e2500fed 155static GTY(()) tree sjlj_fc_type_node;
52a11cbf
RH
156static int sjlj_fc_call_site_ofs;
157static int sjlj_fc_data_ofs;
158static int sjlj_fc_personality_ofs;
159static int sjlj_fc_lsda_ofs;
160static int sjlj_fc_jbuf_ofs;
161\f
0856ee98 162
7e5487a2 163struct GTY(()) call_site_record_d
e2500fed
GK
164{
165 rtx landing_pad;
166 int action;
167};
4a8fb1a1
LC
168
169/* In the following structure and associated functions,
170 we represent entries in the action table as 1-based indices.
171 Special cases are:
172
173 0: null action record, non-null landing pad; implies cleanups
174 -1: null action record, null landing pad; implies no action
175 -2: no call-site entry; implies must_not_throw
176 -3: we have yet to process outer regions
177
178 Further, no special cases apply to the "next" field of the record.
179 For next, 0 means end of list. */
180
181struct action_record
182{
183 int offset;
184 int filter;
185 int next;
186};
187
188/* Hashtable helpers. */
189
95fbe13e 190struct action_record_hasher : free_ptr_hash <action_record>
4a8fb1a1 191{
67f58944
TS
192 static inline hashval_t hash (const action_record *);
193 static inline bool equal (const action_record *, const action_record *);
4a8fb1a1
LC
194};
195
196inline hashval_t
67f58944 197action_record_hasher::hash (const action_record *entry)
4a8fb1a1
LC
198{
199 return entry->next * 1009 + entry->filter;
200}
201
202inline bool
67f58944
TS
203action_record_hasher::equal (const action_record *entry,
204 const action_record *data)
4a8fb1a1
LC
205{
206 return entry->filter == data->filter && entry->next == data->next;
207}
208
c203e8a7 209typedef hash_table<action_record_hasher> action_hash_type;
52a11cbf 210\f
1d65f45c
RH
211static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
212 eh_landing_pad *);
213
502b8322 214static void dw2_build_landing_pads (void);
52a11cbf 215
c203e8a7 216static int collect_one_action_chain (action_hash_type *, eh_region);
17f6e37d 217static int add_call_site (rtx, int, int);
502b8322 218
9771b263
DN
219static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
220static void push_sleb128 (vec<uchar, va_gc> **, int);
b84b6ee6 221static int dw2_size_of_call_site_table (int);
502b8322 222static int sjlj_size_of_call_site_table (void);
17f6e37d 223static void dw2_output_call_site_table (int, int);
502b8322 224static void sjlj_output_call_site_table (void);
e6cfb550 225
52a11cbf
RH
226\f
227void
502b8322 228init_eh (void)
4956d07c 229{
52a11cbf
RH
230 if (! flag_exceptions)
231 return;
4956d07c 232
fb5c464a 233 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
4956d07c 234
52a11cbf
RH
235 /* Create the SjLj_Function_Context structure. This should match
236 the definition in unwind-sjlj.c. */
677f3fa8 237 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
52a11cbf
RH
238 {
239 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 240
ae2bcd98 241 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
9a0d1e1b 242
d40eb158 243 f_prev = build_decl (BUILTINS_LOCATION,
c2255bc4 244 FIELD_DECL, get_identifier ("__prev"),
52a11cbf
RH
245 build_pointer_type (sjlj_fc_type_node));
246 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 247
d40eb158 248 f_cs = build_decl (BUILTINS_LOCATION,
c2255bc4 249 FIELD_DECL, get_identifier ("__call_site"),
52a11cbf
RH
250 integer_type_node);
251 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 252
45a2c477 253 tmp = build_index_type (size_int (4 - 1));
7b0518e3
UW
254 tmp = build_array_type (lang_hooks.types.type_for_mode
255 (targetm.unwind_word_mode (), 1),
b0c48229 256 tmp);
d40eb158 257 f_data = build_decl (BUILTINS_LOCATION,
c2255bc4 258 FIELD_DECL, get_identifier ("__data"), tmp);
52a11cbf 259 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 260
d40eb158 261 f_per = build_decl (BUILTINS_LOCATION,
c2255bc4 262 FIELD_DECL, get_identifier ("__personality"),
52a11cbf
RH
263 ptr_type_node);
264 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 265
d40eb158 266 f_lsda = build_decl (BUILTINS_LOCATION,
c2255bc4 267 FIELD_DECL, get_identifier ("__lsda"),
52a11cbf
RH
268 ptr_type_node);
269 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 270
52a11cbf
RH
271#ifdef DONT_USE_BUILTIN_SETJMP
272#ifdef JMP_BUF_SIZE
45a2c477 273 tmp = size_int (JMP_BUF_SIZE - 1);
52a11cbf
RH
274#else
275 /* Should be large enough for most systems, if it is not,
276 JMP_BUF_SIZE should be defined with the proper value. It will
277 also tend to be larger than necessary for most systems, a more
278 optimal port will define JMP_BUF_SIZE. */
45a2c477 279 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
52a11cbf
RH
280#endif
281#else
40ba8dfb
NC
282 /* Compute a minimally sized jump buffer. We need room to store at
283 least 3 pointers - stack pointer, frame pointer and return address.
284 Plus for some targets we need room for an extra pointer - in the
285 case of MIPS this is the global pointer. This makes a total of four
286 pointers, but to be safe we actually allocate room for 5.
287
288 If pointers are smaller than words then we allocate enough room for
289 5 words, just in case the backend needs this much room. For more
290 discussion on this issue see:
291 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
292 if (POINTER_SIZE > BITS_PER_WORD)
293 tmp = size_int (5 - 1);
294 else
295 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
52a11cbf 296#endif
40ba8dfb 297
52a11cbf
RH
298 tmp = build_index_type (tmp);
299 tmp = build_array_type (ptr_type_node, tmp);
d40eb158 300 f_jbuf = build_decl (BUILTINS_LOCATION,
c2255bc4 301 FIELD_DECL, get_identifier ("__jbuf"), tmp);
52a11cbf
RH
302#ifdef DONT_USE_BUILTIN_SETJMP
303 /* We don't know what the alignment requirements of the
304 runtime's jmp_buf has. Overestimate. */
fe37c7af 305 SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
52a11cbf
RH
306 DECL_USER_ALIGN (f_jbuf) = 1;
307#endif
308 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
309
310 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
311 TREE_CHAIN (f_prev) = f_cs;
312 TREE_CHAIN (f_cs) = f_data;
313 TREE_CHAIN (f_data) = f_per;
314 TREE_CHAIN (f_per) = f_lsda;
315 TREE_CHAIN (f_lsda) = f_jbuf;
316
317 layout_type (sjlj_fc_type_node);
318
319 /* Cache the interesting field offsets so that we have
320 easy access from rtl. */
321 sjlj_fc_call_site_ofs
ae7e9ddd
RS
322 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
323 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
52a11cbf 324 sjlj_fc_data_ofs
ae7e9ddd
RS
325 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
326 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
52a11cbf 327 sjlj_fc_personality_ofs
ae7e9ddd
RS
328 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
329 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
52a11cbf 330 sjlj_fc_lsda_ofs
ae7e9ddd
RS
331 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
332 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
52a11cbf 333 sjlj_fc_jbuf_ofs
ae7e9ddd
RS
334 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
335 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
2ab71f3d
EB
336
337#ifdef DONT_USE_BUILTIN_SETJMP
338 tmp = build_function_type_list (integer_type_node, TREE_TYPE (f_jbuf),
339 NULL);
340 setjmp_fn = build_decl (BUILTINS_LOCATION, FUNCTION_DECL,
341 get_identifier ("setjmp"), tmp);
342 TREE_PUBLIC (setjmp_fn) = 1;
343 DECL_EXTERNAL (setjmp_fn) = 1;
344 DECL_ASSEMBLER_NAME (setjmp_fn);
345#endif
52a11cbf 346 }
4956d07c
MS
347}
348
52a11cbf 349void
502b8322 350init_eh_for_function (void)
4956d07c 351{
766090c2 352 cfun->eh = ggc_cleared_alloc<eh_status> ();
1d65f45c
RH
353
354 /* Make sure zero'th entries are used. */
9771b263
DN
355 vec_safe_push (cfun->eh->region_array, (eh_region)0);
356 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
6a58eee9 357}
52a11cbf 358\f
083cad55 359/* Routines to generate the exception tree somewhat directly.
6de9cd9a
DN
360 These are used from tree-eh.c when processing exception related
361 nodes during tree optimization. */
362
1d65f45c
RH
363static eh_region
364gen_eh_region (enum eh_region_type type, eh_region outer)
6de9cd9a 365{
1d65f45c 366 eh_region new_eh;
6de9cd9a 367
6de9cd9a 368 /* Insert a new blank region as a leaf in the tree. */
766090c2 369 new_eh = ggc_cleared_alloc<eh_region_d> ();
d858f359
KG
370 new_eh->type = type;
371 new_eh->outer = outer;
6de9cd9a
DN
372 if (outer)
373 {
d858f359
KG
374 new_eh->next_peer = outer->inner;
375 outer->inner = new_eh;
6de9cd9a
DN
376 }
377 else
378 {
d858f359
KG
379 new_eh->next_peer = cfun->eh->region_tree;
380 cfun->eh->region_tree = new_eh;
6de9cd9a
DN
381 }
382
9771b263
DN
383 new_eh->index = vec_safe_length (cfun->eh->region_array);
384 vec_safe_push (cfun->eh->region_array, new_eh);
6de9cd9a 385
384c400a
RH
386 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
387 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
388 new_eh->use_cxa_end_cleanup = true;
389
d858f359 390 return new_eh;
6de9cd9a
DN
391}
392
1d65f45c
RH
393eh_region
394gen_eh_region_cleanup (eh_region outer)
6de9cd9a 395{
1d65f45c 396 return gen_eh_region (ERT_CLEANUP, outer);
6de9cd9a
DN
397}
398
1d65f45c
RH
399eh_region
400gen_eh_region_try (eh_region outer)
6de9cd9a
DN
401{
402 return gen_eh_region (ERT_TRY, outer);
403}
404
1d65f45c
RH
405eh_catch
406gen_eh_region_catch (eh_region t, tree type_or_list)
6de9cd9a 407{
1d65f45c 408 eh_catch c, l;
6de9cd9a
DN
409 tree type_list, type_node;
410
1d65f45c
RH
411 gcc_assert (t->type == ERT_TRY);
412
6de9cd9a
DN
413 /* Ensure to always end up with a type list to normalize further
414 processing, then register each type against the runtime types map. */
415 type_list = type_or_list;
416 if (type_or_list)
417 {
418 if (TREE_CODE (type_or_list) != TREE_LIST)
419 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
420
421 type_node = type_list;
422 for (; type_node; type_node = TREE_CHAIN (type_node))
423 add_type_for_runtime (TREE_VALUE (type_node));
424 }
425
766090c2 426 c = ggc_cleared_alloc<eh_catch_d> ();
1d65f45c 427 c->type_list = type_list;
82d6e6fc 428 l = t->u.eh_try.last_catch;
1d65f45c 429 c->prev_catch = l;
6de9cd9a 430 if (l)
1d65f45c 431 l->next_catch = c;
6de9cd9a 432 else
1d65f45c 433 t->u.eh_try.first_catch = c;
82d6e6fc 434 t->u.eh_try.last_catch = c;
6de9cd9a
DN
435
436 return c;
437}
438
1d65f45c
RH
439eh_region
440gen_eh_region_allowed (eh_region outer, tree allowed)
6de9cd9a 441{
1d65f45c 442 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
6de9cd9a
DN
443 region->u.allowed.type_list = allowed;
444
445 for (; allowed ; allowed = TREE_CHAIN (allowed))
446 add_type_for_runtime (TREE_VALUE (allowed));
447
448 return region;
449}
450
1d65f45c
RH
451eh_region
452gen_eh_region_must_not_throw (eh_region outer)
6de9cd9a
DN
453{
454 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
455}
456
1d65f45c
RH
457eh_landing_pad
458gen_eh_landing_pad (eh_region region)
6de9cd9a 459{
766090c2 460 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
6de9cd9a 461
1d65f45c
RH
462 lp->next_lp = region->landing_pads;
463 lp->region = region;
9771b263 464 lp->index = vec_safe_length (cfun->eh->lp_array);
1d65f45c
RH
465 region->landing_pads = lp;
466
9771b263 467 vec_safe_push (cfun->eh->lp_array, lp);
1d65f45c
RH
468
469 return lp;
6de9cd9a
DN
470}
471
1d65f45c
RH
472eh_region
473get_eh_region_from_number_fn (struct function *ifun, int i)
6de9cd9a 474{
9771b263 475 return (*ifun->eh->region_array)[i];
6de9cd9a
DN
476}
477
1d65f45c
RH
478eh_region
479get_eh_region_from_number (int i)
4e6d1743 480{
1d65f45c 481 return get_eh_region_from_number_fn (cfun, i);
4e6d1743
JH
482}
483
1d65f45c
RH
484eh_landing_pad
485get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
6de9cd9a 486{
9771b263 487 return (*ifun->eh->lp_array)[i];
6de9cd9a 488}
6de9cd9a 489
1d65f45c
RH
490eh_landing_pad
491get_eh_landing_pad_from_number (int i)
492{
493 return get_eh_landing_pad_from_number_fn (cfun, i);
6de9cd9a
DN
494}
495
1d65f45c
RH
496eh_region
497get_eh_region_from_lp_number_fn (struct function *ifun, int i)
b2dd096b 498{
1d65f45c 499 if (i < 0)
9771b263 500 return (*ifun->eh->region_array)[-i];
1d65f45c
RH
501 else if (i == 0)
502 return NULL;
503 else
b2dd096b 504 {
1d65f45c 505 eh_landing_pad lp;
9771b263 506 lp = (*ifun->eh->lp_array)[i];
1d65f45c 507 return lp->region;
b2dd096b
MM
508 }
509}
510
1d65f45c
RH
511eh_region
512get_eh_region_from_lp_number (int i)
4956d07c 513{
1d65f45c 514 return get_eh_region_from_lp_number_fn (cfun, i);
52a11cbf 515}
1d65f45c
RH
516\f
517/* Returns true if the current function has exception handling regions. */
4956d07c 518
1d65f45c
RH
519bool
520current_function_has_exception_handlers (void)
47c84870 521{
1d65f45c 522 return cfun->eh->region_tree != NULL;
47c84870 523}
52a11cbf 524\f
1d65f45c
RH
525/* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
526 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
154bba13 527
1d65f45c 528struct duplicate_eh_regions_data
154bba13 529{
1d65f45c
RH
530 duplicate_eh_regions_map label_map;
531 void *label_map_data;
b787e7a2 532 hash_map<void *, void *> *eh_map;
1d65f45c 533};
154bba13 534
1d65f45c
RH
535static void
536duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
537 eh_region old_r, eh_region outer)
538{
539 eh_landing_pad old_lp, new_lp;
540 eh_region new_r;
154bba13 541
1d65f45c 542 new_r = gen_eh_region (old_r->type, outer);
b787e7a2 543 gcc_assert (!data->eh_map->put (old_r, new_r));
154bba13 544
1d65f45c 545 switch (old_r->type)
52a11cbf 546 {
1d65f45c
RH
547 case ERT_CLEANUP:
548 break;
52a11cbf 549
1d65f45c
RH
550 case ERT_TRY:
551 {
552 eh_catch oc, nc;
553 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
554 {
555 /* We should be doing all our region duplication before and
556 during inlining, which is before filter lists are created. */
557 gcc_assert (oc->filter_list == NULL);
558 nc = gen_eh_region_catch (new_r, oc->type_list);
559 nc->label = data->label_map (oc->label, data->label_map_data);
560 }
561 }
562 break;
27a36778 563
1d65f45c
RH
564 case ERT_ALLOWED_EXCEPTIONS:
565 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
6728ee79
MM
566 if (old_r->u.allowed.label)
567 new_r->u.allowed.label
568 = data->label_map (old_r->u.allowed.label, data->label_map_data);
569 else
570 new_r->u.allowed.label = NULL_TREE;
1d65f45c 571 break;
655dd289 572
1d65f45c 573 case ERT_MUST_NOT_THROW:
5368224f
DC
574 new_r->u.must_not_throw.failure_loc =
575 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
576 new_r->u.must_not_throw.failure_decl =
577 old_r->u.must_not_throw.failure_decl;
1d65f45c
RH
578 break;
579 }
a8da523f 580
1d65f45c 581 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
655dd289 582 {
1d65f45c
RH
583 /* Don't bother copying unused landing pads. */
584 if (old_lp->post_landing_pad == NULL)
585 continue;
a8da523f 586
1d65f45c 587 new_lp = gen_eh_landing_pad (new_r);
b787e7a2 588 gcc_assert (!data->eh_map->put (old_lp, new_lp));
1d65f45c
RH
589
590 new_lp->post_landing_pad
591 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
592 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
655dd289 593 }
1d65f45c 594
384c400a
RH
595 /* Make sure to preserve the original use of __cxa_end_cleanup. */
596 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
597
1d65f45c
RH
598 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
599 duplicate_eh_regions_1 (data, old_r, new_r);
a8da523f 600}
655dd289 601
1d65f45c
RH
602/* Duplicate the EH regions from IFUN rooted at COPY_REGION into
603 the current function and root the tree below OUTER_REGION.
604 The special case of COPY_REGION of NULL means all regions.
605 Remap labels using MAP/MAP_DATA callback. Return a pointer map
606 that allows the caller to remap uses of both EH regions and
607 EH landing pads. */
14925fcd 608
b787e7a2 609hash_map<void *, void *> *
1d65f45c
RH
610duplicate_eh_regions (struct function *ifun,
611 eh_region copy_region, int outer_lp,
612 duplicate_eh_regions_map map, void *map_data)
14925fcd 613{
1d65f45c
RH
614 struct duplicate_eh_regions_data data;
615 eh_region outer_region;
14925fcd 616
b2b29377
MM
617 if (flag_checking)
618 verify_eh_tree (ifun);
a3710436 619
1d65f45c
RH
620 data.label_map = map;
621 data.label_map_data = map_data;
b787e7a2 622 data.eh_map = new hash_map<void *, void *>;
a3710436 623
b0dd8c90 624 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
b8698a0f 625
1d65f45c
RH
626 /* Copy all the regions in the subtree. */
627 if (copy_region)
628 duplicate_eh_regions_1 (&data, copy_region, outer_region);
629 else
a3710436 630 {
1d65f45c
RH
631 eh_region r;
632 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
633 duplicate_eh_regions_1 (&data, r, outer_region);
a3710436 634 }
a3710436 635
b2b29377
MM
636 if (flag_checking)
637 verify_eh_tree (cfun);
a3710436 638
1d65f45c 639 return data.eh_map;
a3710436
JH
640}
641
1d65f45c 642/* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
a3710436 643
1d65f45c
RH
644eh_region
645eh_region_outermost (struct function *ifun, eh_region region_a,
646 eh_region region_b)
a3710436 647{
1d65f45c
RH
648 gcc_assert (ifun->eh->region_array);
649 gcc_assert (ifun->eh->region_tree);
a3710436 650
7ba9e72d 651 auto_sbitmap b_outer (ifun->eh->region_array->length ());
f61e445a 652 bitmap_clear (b_outer);
a3710436 653
1d65f45c
RH
654 do
655 {
d7c028c0 656 bitmap_set_bit (b_outer, region_b->index);
1d65f45c
RH
657 region_b = region_b->outer;
658 }
659 while (region_b);
a3710436 660
1d65f45c 661 do
a3710436 662 {
d7c028c0 663 if (bitmap_bit_p (b_outer, region_a->index))
a3710436 664 break;
1d65f45c 665 region_a = region_a->outer;
a3710436 666 }
1d65f45c 667 while (region_a);
a3710436 668
1d65f45c
RH
669 return region_a;
670}
671\f
1d65f45c
RH
672void
673add_type_for_runtime (tree type)
674{
1d65f45c
RH
675 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
676 if (TREE_CODE (type) == NOP_EXPR)
677 return;
a3710436 678
b086d530
TS
679 bool existed = false;
680 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
681 if (!existed)
682 *slot = lang_hooks.eh_runtime_type (type);
a3710436
JH
683}
684
1d65f45c
RH
685tree
686lookup_type_for_runtime (tree type)
a8da523f 687{
1d65f45c
RH
688 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
689 if (TREE_CODE (type) == NOP_EXPR)
690 return type;
a8da523f 691
1d65f45c 692 /* We should have always inserted the data earlier. */
b086d530 693 return *type_to_runtime_map->get (type);
1d65f45c 694}
083cad55 695
1d65f45c
RH
696\f
697/* Represent an entry in @TTypes for either catch actions
698 or exception filter actions. */
1aa67003 699struct ttypes_filter {
1d65f45c
RH
700 tree t;
701 int filter;
702};
14925fcd 703
4a8fb1a1
LC
704/* Helper for ttypes_filter hashing. */
705
95fbe13e 706struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
4a8fb1a1 707{
67f58944
TS
708 typedef tree_node *compare_type;
709 static inline hashval_t hash (const ttypes_filter *);
710 static inline bool equal (const ttypes_filter *, const tree_node *);
4a8fb1a1
LC
711};
712
1d65f45c
RH
713/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
714 (a tree) for a @TTypes type node we are thinking about adding. */
14925fcd 715
4a8fb1a1 716inline bool
67f58944 717ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
1d65f45c 718{
52a11cbf 719 return entry->t == data;
9762d48d
JM
720}
721
4a8fb1a1 722inline hashval_t
67f58944 723ttypes_filter_hasher::hash (const ttypes_filter *entry)
52a11cbf 724{
fd917e0d 725 return TREE_HASH (entry->t);
52a11cbf 726}
4956d07c 727
c203e8a7 728typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
4a8fb1a1
LC
729
730
731/* Helper for ehspec hashing. */
732
95fbe13e 733struct ehspec_hasher : free_ptr_hash <ttypes_filter>
4a8fb1a1 734{
67f58944
TS
735 static inline hashval_t hash (const ttypes_filter *);
736 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
4a8fb1a1
LC
737};
738
52a11cbf
RH
739/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
740 exception specification list we are thinking about adding. */
741/* ??? Currently we use the type lists in the order given. Someone
742 should put these in some canonical order. */
743
4a8fb1a1 744inline bool
67f58944 745ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
4956d07c 746{
52a11cbf 747 return type_list_equal (entry->t, data->t);
4956d07c
MS
748}
749
52a11cbf 750/* Hash function for exception specification lists. */
4956d07c 751
4a8fb1a1 752inline hashval_t
67f58944 753ehspec_hasher::hash (const ttypes_filter *entry)
4956d07c 754{
52a11cbf
RH
755 hashval_t h = 0;
756 tree list;
757
758 for (list = entry->t; list ; list = TREE_CHAIN (list))
fd917e0d 759 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
52a11cbf 760 return h;
4956d07c
MS
761}
762
c203e8a7 763typedef hash_table<ehspec_hasher> ehspec_hash_type;
4a8fb1a1
LC
764
765
1d65f45c 766/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
fd917e0d 767 to speed up the search. Return the filter value to be used. */
4956d07c 768
52a11cbf 769static int
c203e8a7 770add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
4956d07c 771{
52a11cbf 772 struct ttypes_filter **slot, *n;
4956d07c 773
c203e8a7 774 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
4a8fb1a1 775 INSERT);
52a11cbf
RH
776
777 if ((n = *slot) == NULL)
4956d07c 778 {
52a11cbf 779 /* Filter value is a 1 based table index. */
12670d88 780
5ed6ace5 781 n = XNEW (struct ttypes_filter);
52a11cbf 782 n->t = type;
9771b263 783 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
52a11cbf
RH
784 *slot = n;
785
9771b263 786 vec_safe_push (cfun->eh->ttype_data, type);
4956d07c 787 }
52a11cbf
RH
788
789 return n->filter;
4956d07c
MS
790}
791
1d65f45c 792/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
52a11cbf
RH
793 to speed up the search. Return the filter value to be used. */
794
795static int
c203e8a7 796add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
4a8fb1a1 797 tree list)
12670d88 798{
52a11cbf
RH
799 struct ttypes_filter **slot, *n;
800 struct ttypes_filter dummy;
12670d88 801
52a11cbf 802 dummy.t = list;
c203e8a7 803 slot = ehspec_hash->find_slot (&dummy, INSERT);
52a11cbf
RH
804
805 if ((n = *slot) == NULL)
806 {
1d65f45c
RH
807 int len;
808
809 if (targetm.arm_eabi_unwinder)
9771b263 810 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
1d65f45c 811 else
9771b263 812 len = vec_safe_length (cfun->eh->ehspec_data.other);
1d65f45c 813
52a11cbf
RH
814 /* Filter value is a -1 based byte index into a uleb128 buffer. */
815
5ed6ace5 816 n = XNEW (struct ttypes_filter);
52a11cbf 817 n->t = list;
1d65f45c 818 n->filter = -(len + 1);
52a11cbf
RH
819 *slot = n;
820
617a1b71 821 /* Generate a 0 terminated list of filter values. */
52a11cbf 822 for (; list ; list = TREE_CHAIN (list))
617a1b71
PB
823 {
824 if (targetm.arm_eabi_unwinder)
9771b263 825 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
617a1b71
PB
826 else
827 {
828 /* Look up each type in the list and encode its filter
829 value as a uleb128. */
1d65f45c
RH
830 push_uleb128 (&cfun->eh->ehspec_data.other,
831 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
6d41a92f 832 }
52a11cbf 833 }
1d65f45c 834 if (targetm.arm_eabi_unwinder)
9771b263 835 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
1d65f45c 836 else
9771b263 837 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
52a11cbf 838 }
1d65f45c
RH
839
840 return n->filter;
52a11cbf 841}
1e4ceb6f 842
1d65f45c
RH
843/* Generate the action filter values to be used for CATCH and
844 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
845 we use lots of landing pads, and so every type or list can share
846 the same filter value, which saves table space. */
47c84870 847
1d65f45c
RH
848void
849assign_filter_values (void)
1e4ceb6f 850{
52a11cbf 851 int i;
1d65f45c
RH
852 eh_region r;
853 eh_catch c;
76fc91c7 854
9771b263 855 vec_alloc (cfun->eh->ttype_data, 16);
1d65f45c 856 if (targetm.arm_eabi_unwinder)
9771b263 857 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
1d65f45c 858 else
9771b263 859 vec_alloc (cfun->eh->ehspec_data.other, 64);
1e4ceb6f 860
c203e8a7
TS
861 ehspec_hash_type ehspec (31);
862 ttypes_hash_type ttypes (31);
1e4ceb6f 863
9771b263 864 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
1d65f45c
RH
865 {
866 if (r == NULL)
52a11cbf 867 continue;
76fc91c7 868
1d65f45c 869 switch (r->type)
12c3874e 870 {
1d65f45c
RH
871 case ERT_TRY:
872 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
f645e9a2 873 {
1d65f45c
RH
874 /* Whatever type_list is (NULL or true list), we build a list
875 of filters for the region. */
876 c->filter_list = NULL_TREE;
877
878 if (c->type_list != NULL)
879 {
880 /* Get a filter value for each of the types caught and store
881 them in the region's dedicated list. */
882 tree tp_node = c->type_list;
883
884 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
885 {
c203e8a7
TS
886 int flt
887 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
45a2c477 888 tree flt_node = build_int_cst (integer_type_node, flt);
1d65f45c
RH
889
890 c->filter_list
891 = tree_cons (NULL_TREE, flt_node, c->filter_list);
892 }
893 }
894 else
895 {
896 /* Get a filter value for the NULL list also since it
897 will need an action record anyway. */
c203e8a7 898 int flt = add_ttypes_entry (&ttypes, NULL);
45a2c477 899 tree flt_node = build_int_cst (integer_type_node, flt);
1d65f45c
RH
900
901 c->filter_list
902 = tree_cons (NULL_TREE, flt_node, NULL);
903 }
f645e9a2 904 }
1d65f45c 905 break;
f645e9a2 906
1d65f45c
RH
907 case ERT_ALLOWED_EXCEPTIONS:
908 r->u.allowed.filter
c203e8a7 909 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
1d65f45c 910 break;
6de9cd9a 911
1d65f45c
RH
912 default:
913 break;
914 }
52a11cbf
RH
915 }
916}
917
1d65f45c
RH
918/* Emit SEQ into basic block just before INSN (that is assumed to be
919 first instruction of some existing BB and return the newly
920 produced block. */
921static basic_block
9b2ea071 922emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
1d65f45c 923{
4500f751 924 rtx_insn *next, *last;
8a829274 925 basic_block bb;
1d65f45c
RH
926 edge e;
927 edge_iterator ei;
928
929 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
930 call), we don't want it to go into newly created landing pad or other EH
931 construct. */
932 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
933 if (e->flags & EDGE_FALLTHRU)
934 force_nonfallthru (e);
935 else
936 ei_next (&ei);
4500f751
EB
937
938 /* Make sure to put the location of INSN or a subsequent instruction on SEQ
939 to avoid inheriting the location of the previous instruction. */
940 next = insn;
941 while (next && !NONDEBUG_INSN_P (next))
942 next = NEXT_INSN (next);
943 if (next)
944 last = emit_insn_before_setloc (seq, insn, INSN_LOCATION (next));
945 else
946 last = emit_insn_before (seq, insn);
1d65f45c
RH
947 if (BARRIER_P (last))
948 last = PREV_INSN (last);
8a829274 949 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1d65f45c
RH
950 update_bb_for_insn (bb);
951 bb->flags |= BB_SUPERBLOCK;
952 return bb;
953}
52a11cbf 954\f
0be7e7a6
RH
955/* A subroutine of dw2_build_landing_pads, also used for edge splitting
956 at the rtl level. Emit the code required by the target at a landing
957 pad for the given region. */
958
465d0087 959static void
0be7e7a6
RH
960expand_dw2_landing_pad_for_region (eh_region region)
961{
95a3fb9d
RS
962 if (targetm.have_exception_receiver ())
963 emit_insn (targetm.gen_exception_receiver ());
964 else if (targetm.have_nonlocal_goto_receiver ())
965 emit_insn (targetm.gen_nonlocal_goto_receiver ());
0be7e7a6 966 else
0be7e7a6
RH
967 { /* Nothing */ }
968
969 if (region->exc_ptr_reg)
970 emit_move_insn (region->exc_ptr_reg,
971 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
972 if (region->filter_reg)
973 emit_move_insn (region->filter_reg,
974 gen_rtx_REG (targetm.eh_return_filter_mode (),
975 EH_RETURN_DATA_REGNO (1)));
976}
977
1d65f45c
RH
978/* Expand the extra code needed at landing pads for dwarf2 unwinding. */
979
52a11cbf 980static void
502b8322 981dw2_build_landing_pads (void)
4956d07c 982{
ae0ed63a 983 int i;
1d65f45c 984 eh_landing_pad lp;
0be7e7a6
RH
985 int e_flags = EDGE_FALLTHRU;
986
987 /* If we're going to partition blocks, we need to be able to add
988 new landing pads later, which means that we need to hold on to
989 the post-landing-pad block. Prevent it from being merged away.
990 We'll remove this bit after partitioning. */
991 if (flag_reorder_blocks_and_partition)
992 e_flags |= EDGE_PRESERVE;
4956d07c 993
9771b263 994 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
52a11cbf 995 {
12c3874e 996 basic_block bb;
f8b23302 997 rtx_insn *seq;
4956d07c 998
1d65f45c 999 if (lp == NULL || lp->post_landing_pad == NULL)
a3710436
JH
1000 continue;
1001
52a11cbf 1002 start_sequence ();
4956d07c 1003
1d65f45c
RH
1004 lp->landing_pad = gen_label_rtx ();
1005 emit_label (lp->landing_pad);
c40b5400 1006 LABEL_PRESERVE_P (lp->landing_pad) = 1;
4956d07c 1007
0be7e7a6 1008 expand_dw2_landing_pad_for_region (lp->region);
9a0d1e1b 1009
52a11cbf
RH
1010 seq = get_insns ();
1011 end_sequence ();
5816cb14 1012
1d65f45c 1013 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
57a5a8b3 1014 bb->count = bb->next_bb->count;
357067f2 1015 make_single_succ_edge (bb, bb->next_bb, e_flags);
0b7fccfa
RG
1016 if (current_loops)
1017 {
99b1c316 1018 class loop *loop = bb->next_bb->loop_father;
0b7fccfa
RG
1019 /* If we created a pre-header block, add the new block to the
1020 outer loop, otherwise to the loop itself. */
1021 if (bb->next_bb == loop->header)
1022 add_bb_to_loop (bb, loop_outer (loop));
1023 else
1024 add_bb_to_loop (bb, loop);
1025 }
52a11cbf 1026 }
4956d07c
MS
1027}
1028
52a11cbf 1029\f
9771b263 1030static vec<int> sjlj_lp_call_site_index;
12670d88 1031
1d65f45c
RH
1032/* Process all active landing pads. Assign each one a compact dispatch
1033 index, and a call-site index. */
e701eb4d 1034
1d65f45c
RH
1035static int
1036sjlj_assign_call_site_values (void)
e701eb4d 1037{
c203e8a7 1038 action_hash_type ar_hash (31);
1d65f45c
RH
1039 int i, disp_index;
1040 eh_landing_pad lp;
52a11cbf 1041
9771b263 1042 vec_alloc (crtl->eh.action_record_data, 64);
52a11cbf 1043
1d65f45c
RH
1044 disp_index = 0;
1045 call_site_base = 1;
9771b263 1046 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1d65f45c 1047 if (lp && lp->post_landing_pad)
e6cfb550 1048 {
1d65f45c 1049 int action, call_site;
0856ee98 1050
1d65f45c 1051 /* First: build the action table. */
c203e8a7 1052 action = collect_one_action_chain (&ar_hash, lp->region);
52a11cbf 1053
1d65f45c
RH
1054 /* Next: assign call-site values. If dwarf2 terms, this would be
1055 the region number assigned by convert_to_eh_region_ranges, but
1056 handles no-action and must-not-throw differently. */
52a11cbf
RH
1057 /* Map must-not-throw to otherwise unused call-site index 0. */
1058 if (action == -2)
1d65f45c 1059 call_site = 0;
52a11cbf
RH
1060 /* Map no-action to otherwise unused call-site index -1. */
1061 else if (action == -1)
1d65f45c 1062 call_site = -1;
52a11cbf
RH
1063 /* Otherwise, look it up in the table. */
1064 else
1d65f45c 1065 call_site = add_call_site (GEN_INT (disp_index), action, 0);
9771b263 1066 sjlj_lp_call_site_index[i] = call_site;
52a11cbf 1067
1d65f45c 1068 disp_index++;
52a11cbf 1069 }
1d65f45c 1070
1d65f45c 1071 return disp_index;
4956d07c 1072}
27a36778 1073
1d65f45c
RH
1074/* Emit code to record the current call-site index before every
1075 insn that can throw. */
1076
52a11cbf 1077static void
1d65f45c 1078sjlj_mark_call_sites (void)
27a36778 1079{
52a11cbf 1080 int last_call_site = -2;
f8b23302
DM
1081 rtx_insn *insn;
1082 rtx mem;
52a11cbf 1083
52a11cbf 1084 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 1085 {
1d65f45c
RH
1086 eh_landing_pad lp;
1087 eh_region r;
1088 bool nothrow;
52a11cbf 1089 int this_call_site;
f8b23302 1090 rtx_insn *before, *p;
27a36778 1091
52a11cbf 1092 /* Reset value tracking at extended basic block boundaries. */
4b4bf941 1093 if (LABEL_P (insn))
52a11cbf 1094 last_call_site = -2;
27a36778 1095
d33606c3
EB
1096 /* If the function allocates dynamic stack space, the context must
1097 be updated after every allocation/deallocation accordingly. */
1098 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1099 {
1100 rtx buf_addr;
1101
1102 start_sequence ();
1103 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1104 sjlj_fc_jbuf_ofs);
1105 expand_builtin_update_setjmp_buf (buf_addr);
1106 p = get_insns ();
1107 end_sequence ();
1108 emit_insn_before (p, insn);
1109 }
1110
52a11cbf
RH
1111 if (! INSN_P (insn))
1112 continue;
27a36778 1113
1d65f45c
RH
1114 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1115 if (nothrow)
12633413 1116 continue;
1d65f45c 1117 if (lp)
9771b263 1118 this_call_site = sjlj_lp_call_site_index[lp->index];
1d65f45c 1119 else if (r == NULL)
52a11cbf
RH
1120 {
1121 /* Calls (and trapping insns) without notes are outside any
1122 exception handling region in this function. Mark them as
1123 no action. */
1d65f45c 1124 this_call_site = -1;
52a11cbf
RH
1125 }
1126 else
1d65f45c
RH
1127 {
1128 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1129 this_call_site = 0;
1130 }
27a36778 1131
0cecee06
UW
1132 if (this_call_site != -1)
1133 crtl->uses_eh_lsda = 1;
1134
52a11cbf
RH
1135 if (this_call_site == last_call_site)
1136 continue;
1137
1138 /* Don't separate a call from it's argument loads. */
1139 before = insn;
4b4bf941 1140 if (CALL_P (insn))
9321cf00 1141 before = find_first_parameter_load (insn, NULL);
4956d07c 1142
52a11cbf 1143 start_sequence ();
69c32ec8 1144 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
fd2c57a9 1145 sjlj_fc_call_site_ofs);
69db2d57 1146 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
52a11cbf
RH
1147 p = get_insns ();
1148 end_sequence ();
12670d88 1149
2f937369 1150 emit_insn_before (p, before);
52a11cbf
RH
1151 last_call_site = this_call_site;
1152 }
1153}
4956d07c 1154
52a11cbf
RH
1155/* Construct the SjLj_Function_Context. */
1156
1157static void
f8b23302 1158sjlj_emit_function_enter (rtx_code_label *dispatch_label)
4956d07c 1159{
f8b23302
DM
1160 rtx_insn *fn_begin, *seq;
1161 rtx fc, mem;
fc781ee0 1162 bool fn_begin_outside_block;
f9417da1 1163 rtx personality = get_personality_function (current_function_decl);
4956d07c 1164
69c32ec8 1165 fc = crtl->eh.sjlj_fc;
4956d07c 1166
52a11cbf 1167 start_sequence ();
8a4451aa 1168
8979edec
JL
1169 /* We're storing this libcall's address into memory instead of
1170 calling it directly. Thus, we must call assemble_external_libcall
67914693 1171 here, as we cannot depend on emit_library_call to do it for us. */
f9417da1 1172 assemble_external_libcall (personality);
f4ef873c 1173 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
f9417da1 1174 emit_move_insn (mem, personality);
52a11cbf 1175
f4ef873c 1176 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
e3b5732b 1177 if (crtl->uses_eh_lsda)
52a11cbf
RH
1178 {
1179 char buf[20];
86bdf071
RE
1180 rtx sym;
1181
df696a75 1182 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
86bdf071
RE
1183 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1184 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1185 emit_move_insn (mem, sym);
8a4451aa 1186 }
52a11cbf
RH
1187 else
1188 emit_move_insn (mem, const0_rtx);
3f2c5d1a 1189
0cecee06
UW
1190 if (dispatch_label)
1191 {
ee516de9
EB
1192 rtx addr = plus_constant (Pmode, XEXP (fc, 0), sjlj_fc_jbuf_ofs);
1193
52a11cbf 1194#ifdef DONT_USE_BUILTIN_SETJMP
ee516de9
EB
1195 addr = copy_addr_to_reg (addr);
1196 addr = convert_memory_address (ptr_mode, addr);
1197 tree addr_tree = make_tree (ptr_type_node, addr);
1198
2ab71f3d 1199 tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
ee516de9 1200 rtx x = expand_call (call_expr, NULL_RTX, false);
0cecee06
UW
1201
1202 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1203 TYPE_MODE (integer_type_node), 0,
84a1b7fe
JH
1204 dispatch_label,
1205 profile_probability::unlikely ());
0cecee06 1206#else
ee516de9 1207 expand_builtin_setjmp_setup (addr, dispatch_label);
4956d07c 1208#endif
0cecee06 1209 }
4956d07c 1210
52a11cbf 1211 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
db69559b 1212 XEXP (fc, 0), Pmode);
12670d88 1213
52a11cbf
RH
1214 seq = get_insns ();
1215 end_sequence ();
4956d07c 1216
52a11cbf
RH
1217 /* ??? Instead of doing this at the beginning of the function,
1218 do this in a block that is at loop level 0 and dominates all
1219 can_throw_internal instructions. */
4956d07c 1220
fc781ee0 1221 fn_begin_outside_block = true;
52a11cbf 1222 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
fc781ee0
AM
1223 if (NOTE_P (fn_begin))
1224 {
a38e7aa5 1225 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
fc781ee0 1226 break;
a38e7aa5 1227 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
fc781ee0
AM
1228 fn_begin_outside_block = false;
1229 }
1230
06fc54dd
EB
1231#ifdef DONT_USE_BUILTIN_SETJMP
1232 if (dispatch_label)
1233 {
1234 /* The sequence contains a branch in the middle so we need to force
1235 the creation of a new basic block by means of BB_SUPERBLOCK. */
1236 if (fn_begin_outside_block)
1237 {
1238 basic_block bb
1239 = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1240 if (JUMP_P (BB_END (bb)))
1241 emit_insn_before (seq, BB_END (bb));
1242 else
1243 emit_insn_after (seq, BB_END (bb));
1244 }
1245 else
1246 emit_insn_after (seq, fn_begin);
1247
1248 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flags |= BB_SUPERBLOCK;
1249 return;
1250 }
1251#endif
1252
fc781ee0 1253 if (fn_begin_outside_block)
fefa31b5 1254 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
12c3874e 1255 else
fc781ee0 1256 emit_insn_after (seq, fn_begin);
4956d07c
MS
1257}
1258
52a11cbf
RH
1259/* Call back from expand_function_end to know where we should put
1260 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 1261
52a11cbf 1262void
f8b23302 1263sjlj_emit_function_exit_after (rtx_insn *after)
52a11cbf 1264{
69c32ec8 1265 crtl->eh.sjlj_exit_after = after;
52a11cbf 1266}
4956d07c
MS
1267
1268static void
502b8322 1269sjlj_emit_function_exit (void)
52a11cbf 1270{
f8b23302 1271 rtx_insn *seq, *insn;
4956d07c 1272
52a11cbf 1273 start_sequence ();
ce152ef8 1274
52a11cbf 1275 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
db69559b 1276 XEXP (crtl->eh.sjlj_fc, 0), Pmode);
e6cfb550 1277
52a11cbf
RH
1278 seq = get_insns ();
1279 end_sequence ();
4956d07c 1280
52a11cbf
RH
1281 /* ??? Really this can be done in any block at loop level 0 that
1282 post-dominates all can_throw_internal instructions. This is
1283 the last possible moment. */
9a0d1e1b 1284
4c33221c
UW
1285 insn = crtl->eh.sjlj_exit_after;
1286 if (LABEL_P (insn))
1287 insn = NEXT_INSN (insn);
12c3874e 1288
4c33221c 1289 emit_insn_after (seq, insn);
9a0d1e1b
AM
1290}
1291
52a11cbf 1292static void
be7457df 1293sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
ce152ef8 1294{
095a2d76
RS
1295 scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
1296 scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
1d65f45c 1297 eh_landing_pad lp;
e67d1102 1298 rtx mem, fc, exc_ptr_reg, filter_reg;
f8b23302 1299 rtx_insn *seq;
12c3874e 1300 basic_block bb;
1d65f45c 1301 eh_region r;
1d65f45c 1302 int i, disp_index;
6e1aa848 1303 vec<tree> dispatch_labels = vNULL;
52a11cbf 1304
69c32ec8 1305 fc = crtl->eh.sjlj_fc;
52a11cbf
RH
1306
1307 start_sequence ();
1308
1309 emit_label (dispatch_label);
3f2c5d1a 1310
52a11cbf
RH
1311#ifndef DONT_USE_BUILTIN_SETJMP
1312 expand_builtin_setjmp_receiver (dispatch_label);
52a11cbf 1313
1d65f45c
RH
1314 /* The caller of expand_builtin_setjmp_receiver is responsible for
1315 making sure that the label doesn't vanish. The only other caller
1316 is the expander for __builtin_setjmp_receiver, which places this
1317 label on the nonlocal_goto_label list. Since we're modeling these
1318 CFG edges more exactly, we can use the forced_labels list instead. */
1319 LABEL_PRESERVE_P (dispatch_label) = 1;
6f7eba34 1320 vec_safe_push<rtx_insn *> (forced_labels, dispatch_label);
1d65f45c 1321#endif
52a11cbf 1322
1d65f45c 1323 /* Load up exc_ptr and filter values from the function context. */
7b0518e3
UW
1324 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1325 if (unwind_word_mode != ptr_mode)
52a11cbf
RH
1326 {
1327#ifdef POINTERS_EXTEND_UNSIGNED
f920765d 1328 mem = convert_memory_address (ptr_mode, mem);
52a11cbf 1329#else
f920765d 1330 mem = convert_to_mode (ptr_mode, mem, 0);
52a11cbf
RH
1331#endif
1332 }
1d65f45c 1333 exc_ptr_reg = force_reg (ptr_mode, mem);
52a11cbf 1334
7b0518e3
UW
1335 mem = adjust_address (fc, unwind_word_mode,
1336 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1337 if (unwind_word_mode != filter_mode)
1338 mem = convert_to_mode (filter_mode, mem, 0);
1d65f45c 1339 filter_reg = force_reg (filter_mode, mem);
4956d07c 1340
52a11cbf 1341 /* Jump to one of the directly reachable regions. */
52a11cbf 1342
1d65f45c 1343 disp_index = 0;
e67d1102 1344 rtx_code_label *first_reachable_label = NULL;
1d65f45c
RH
1345
1346 /* If there's exactly one call site in the function, don't bother
1347 generating a switch statement. */
1d65f45c 1348 if (num_dispatch > 1)
9771b263 1349 dispatch_labels.create (num_dispatch);
1d65f45c 1350
9771b263 1351 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1d65f45c
RH
1352 if (lp && lp->post_landing_pad)
1353 {
f8b23302 1354 rtx_insn *seq2;
1476d1bd 1355 rtx_code_label *label;
1d65f45c
RH
1356
1357 start_sequence ();
1358
1359 lp->landing_pad = dispatch_label;
1360
1361 if (num_dispatch > 1)
1362 {
3d528853 1363 tree t_label, case_elt, t;
1d65f45c
RH
1364
1365 t_label = create_artificial_label (UNKNOWN_LOCATION);
3d528853
NF
1366 t = build_int_cst (integer_type_node, disp_index);
1367 case_elt = build_case_label (t, NULL, t_label);
9771b263 1368 dispatch_labels.quick_push (case_elt);
1476d1bd 1369 label = jump_target_rtx (t_label);
1d65f45c
RH
1370 }
1371 else
1372 label = gen_label_rtx ();
1373
1374 if (disp_index == 0)
1375 first_reachable_label = label;
1376 emit_label (label);
1377
1378 r = lp->region;
1379 if (r->exc_ptr_reg)
1380 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1381 if (r->filter_reg)
1382 emit_move_insn (r->filter_reg, filter_reg);
1383
1384 seq2 = get_insns ();
1385 end_sequence ();
1386
e67d1102 1387 rtx_insn *before = label_rtx (lp->post_landing_pad);
1d65f45c 1388 bb = emit_to_new_bb_before (seq2, before);
357067f2 1389 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
405af656
RG
1390 if (current_loops)
1391 {
99b1c316 1392 class loop *loop = bb->next_bb->loop_father;
405af656
RG
1393 /* If we created a pre-header block, add the new block to the
1394 outer loop, otherwise to the loop itself. */
1395 if (bb->next_bb == loop->header)
1396 add_bb_to_loop (bb, loop_outer (loop));
1397 else
1398 add_bb_to_loop (bb, loop);
1399 /* ??? For multiple dispatches we will end up with edges
1400 from the loop tree root into this loop, making it a
1401 multiple-entry loop. Discard all affected loops. */
1402 if (num_dispatch > 1)
1403 {
1404 for (loop = bb->loop_father;
1405 loop_outer (loop); loop = loop_outer (loop))
08c13199 1406 mark_loop_for_removal (loop);
405af656
RG
1407 }
1408 }
e6cfb550 1409
1d65f45c
RH
1410 disp_index++;
1411 }
1412 gcc_assert (disp_index == num_dispatch);
1413
1414 if (num_dispatch > 1)
1415 {
fd8d363e
SB
1416 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1417 sjlj_fc_call_site_ofs);
9a1b6b7a 1418 expand_sjlj_dispatch_table (disp, dispatch_labels);
a1622f83 1419 }
9a0d1e1b 1420
52a11cbf
RH
1421 seq = get_insns ();
1422 end_sequence ();
4956d07c 1423
1d65f45c
RH
1424 bb = emit_to_new_bb_before (seq, first_reachable_label);
1425 if (num_dispatch == 1)
1426 {
357067f2 1427 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
405af656
RG
1428 if (current_loops)
1429 {
99b1c316 1430 class loop *loop = bb->next_bb->loop_father;
405af656
RG
1431 /* If we created a pre-header block, add the new block to the
1432 outer loop, otherwise to the loop itself. */
1433 if (bb->next_bb == loop->header)
1434 add_bb_to_loop (bb, loop_outer (loop));
1435 else
1436 add_bb_to_loop (bb, loop);
1437 }
1438 }
1439 else
1440 {
1441 /* We are not wiring up edges here, but as the dispatcher call
1442 is at function begin simply associate the block with the
1443 outermost (non-)loop. */
1444 if (current_loops)
1445 add_bb_to_loop (bb, current_loops->tree_root);
1d65f45c 1446 }
ce152ef8
AM
1447}
1448
52a11cbf 1449static void
502b8322 1450sjlj_build_landing_pads (void)
ce152ef8 1451{
1d65f45c 1452 int num_dispatch;
ce152ef8 1453
9771b263 1454 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1d65f45c
RH
1455 if (num_dispatch == 0)
1456 return;
cb3874dc 1457 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch, true);
ce152ef8 1458
1d65f45c
RH
1459 num_dispatch = sjlj_assign_call_site_values ();
1460 if (num_dispatch > 0)
52a11cbf 1461 {
f8b23302 1462 rtx_code_label *dispatch_label = gen_label_rtx ();
3a695389
UW
1463 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1464 TYPE_MODE (sjlj_fc_type_node),
1465 TYPE_ALIGN (sjlj_fc_type_node));
69c32ec8 1466 crtl->eh.sjlj_fc
52a11cbf
RH
1467 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1468 int_size_in_bytes (sjlj_fc_type_node),
3a695389 1469 align);
4956d07c 1470
1d65f45c 1471 sjlj_mark_call_sites ();
52a11cbf 1472 sjlj_emit_function_enter (dispatch_label);
1d65f45c 1473 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
52a11cbf
RH
1474 sjlj_emit_function_exit ();
1475 }
a1622f83 1476
0cecee06
UW
1477 /* If we do not have any landing pads, we may still need to register a
1478 personality routine and (empty) LSDA to handle must-not-throw regions. */
1479 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1480 {
1481 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1482 TYPE_MODE (sjlj_fc_type_node),
1483 TYPE_ALIGN (sjlj_fc_type_node));
1484 crtl->eh.sjlj_fc
1485 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1486 int_size_in_bytes (sjlj_fc_type_node),
1487 align);
1488
1489 sjlj_mark_call_sites ();
f8b23302 1490 sjlj_emit_function_enter (NULL);
0cecee06
UW
1491 sjlj_emit_function_exit ();
1492 }
1493
9771b263 1494 sjlj_lp_call_site_index.release ();
4956d07c 1495}
ce152ef8 1496
d33606c3
EB
1497/* Update the sjlj function context. This function should be called
1498 whenever we allocate or deallocate dynamic stack space. */
1499
1500void
1501update_sjlj_context (void)
1502{
1503 if (!flag_exceptions)
1504 return;
1505
1506 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1507}
1508
3cabd6d1
LB
1509/* After initial rtl generation, call back to finish generating
1510 exception support code. */
1511
dac1fbf8 1512void
502b8322 1513finish_eh_generation (void)
ce152ef8 1514{
12c3874e
JH
1515 basic_block bb;
1516
52a11cbf 1517 /* Construct the landing pads. */
677f3fa8 1518 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
52a11cbf
RH
1519 sjlj_build_landing_pads ();
1520 else
1521 dw2_build_landing_pads ();
1d65f45c 1522
56b97603 1523 break_superblocks ();
1d65f45c
RH
1524
1525 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
11cd3bed 1526 FOR_EACH_BB_FN (bb, cfun)
12c3874e 1527 {
1d65f45c 1528 eh_landing_pad lp;
628f6a4e 1529 edge_iterator ei;
1d65f45c 1530 edge e;
496a4ef5 1531
1d65f45c 1532 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
6a58eee9 1533
1d65f45c
RH
1534 FOR_EACH_EDGE (e, ei, bb->succs)
1535 if (e->flags & EDGE_EH)
1536 break;
87c476a2 1537
1d65f45c
RH
1538 /* We should not have generated any new throwing insns during this
1539 pass, and we should not have lost any EH edges, so we only need
1540 to handle two cases here:
1541 (1) reachable handler and an existing edge to post-landing-pad,
1542 (2) no reachable handler and no edge. */
1543 gcc_assert ((lp != NULL) == (e != NULL));
1544 if (lp != NULL)
87c476a2 1545 {
1d65f45c 1546 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
f19c9228 1547
1d65f45c
RH
1548 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1549 e->flags |= (CALL_P (BB_END (bb))
1550 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1551 : EDGE_ABNORMAL);
52a11cbf
RH
1552 }
1553 }
56b97603
EB
1554
1555 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1556 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1557 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1558 commit_edge_insertions ();
4956d07c 1559}
1d65f45c
RH
1560\f
1561/* This section handles removing dead code for flow. */
a8da523f
JH
1562
1563void
1d65f45c 1564remove_eh_landing_pad (eh_landing_pad lp)
a8da523f 1565{
1d65f45c 1566 eh_landing_pad *pp;
a8da523f 1567
1d65f45c
RH
1568 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1569 continue;
1570 *pp = lp->next_lp;
b8698a0f 1571
1d65f45c
RH
1572 if (lp->post_landing_pad)
1573 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
9771b263 1574 (*cfun->eh->lp_array)[lp->index] = NULL;
a8da523f
JH
1575}
1576
d273b176 1577/* Splice the EH region at PP from the region tree. */
496a4ef5 1578
d273b176
SB
1579static void
1580remove_eh_handler_splicer (eh_region *pp)
496a4ef5 1581{
d273b176 1582 eh_region region = *pp;
1d65f45c 1583 eh_landing_pad lp;
496a4ef5 1584
1d65f45c 1585 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
6d07ad98 1586 {
1d65f45c
RH
1587 if (lp->post_landing_pad)
1588 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
9771b263 1589 (*cfun->eh->lp_array)[lp->index] = NULL;
6d07ad98 1590 }
f698d217 1591
1d65f45c 1592 if (region->inner)
f698d217 1593 {
d273b176
SB
1594 eh_region p, outer;
1595 outer = region->outer;
1596
1d65f45c
RH
1597 *pp = p = region->inner;
1598 do
1599 {
1600 p->outer = outer;
1601 pp = &p->next_peer;
1602 p = *pp;
1603 }
1604 while (p);
f698d217 1605 }
1d65f45c 1606 *pp = region->next_peer;
87ff9c8e 1607
9771b263 1608 (*cfun->eh->region_array)[region->index] = NULL;
1d65f45c 1609}
87ff9c8e 1610
d273b176
SB
1611/* Splice a single EH region REGION from the region tree.
1612
1613 To unlink REGION, we need to find the pointer to it with a relatively
1614 expensive search in REGION's outer region. If you are going to
1615 remove a number of handlers, using remove_unreachable_eh_regions may
1616 be a better option. */
1617
1618void
1619remove_eh_handler (eh_region region)
1620{
1621 eh_region *pp, *pp_start, p, outer;
1622
1623 outer = region->outer;
1624 if (outer)
1625 pp_start = &outer->inner;
1626 else
1627 pp_start = &cfun->eh->region_tree;
1628 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1629 continue;
1630
1631 remove_eh_handler_splicer (pp);
1632}
1633
1634/* Worker for remove_unreachable_eh_regions.
1635 PP is a pointer to the region to start a region tree depth-first
1636 search from. R_REACHABLE is the set of regions that have to be
1637 preserved. */
1638
1639static void
1640remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1641{
1642 while (*pp)
1643 {
1644 eh_region region = *pp;
1645 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1646 if (!bitmap_bit_p (r_reachable, region->index))
1647 remove_eh_handler_splicer (pp);
1648 else
1649 pp = &region->next_peer;
1650 }
1651}
1652
1653/* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1654 Do this by traversing the EH tree top-down and splice out regions that
1655 are not marked. By removing regions from the leaves, we avoid costly
1656 searches in the region tree. */
1657
1658void
1659remove_unreachable_eh_regions (sbitmap r_reachable)
1660{
1661 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1662}
1663
1d65f45c
RH
1664/* Invokes CALLBACK for every exception handler landing pad label.
1665 Only used by reload hackery; should not be used by new code. */
87ff9c8e 1666
1d65f45c
RH
1667void
1668for_each_eh_label (void (*callback) (rtx))
87ff9c8e 1669{
1d65f45c
RH
1670 eh_landing_pad lp;
1671 int i;
52a11cbf 1672
9771b263 1673 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
f54a7f6f 1674 {
1d65f45c 1675 if (lp)
4537ec0c 1676 {
e67d1102 1677 rtx_code_label *lab = lp->landing_pad;
1d65f45c
RH
1678 if (lab && LABEL_P (lab))
1679 (*callback) (lab);
4537ec0c 1680 }
52a11cbf 1681 }
87ff9c8e 1682}
1d65f45c
RH
1683\f
1684/* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
b8698a0f 1685 call insn.
1d65f45c
RH
1686
1687 At the gimple level, we use LP_NR
1688 > 0 : The statement transfers to landing pad LP_NR
1689 = 0 : The statement is outside any EH region
1690 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1691
1692 At the rtl level, we use LP_NR
1693 > 0 : The insn transfers to landing pad LP_NR
1694 = 0 : The insn cannot throw
1695 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1696 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1697 missing note: The insn is outside any EH region.
1698
1699 ??? This difference probably ought to be avoided. We could stand
1700 to record nothrow for arbitrary gimple statements, and so avoid
1701 some moderately complex lookups in stmt_could_throw_p. Perhaps
1702 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1703 no-nonlocal-goto property should be recorded elsewhere as a bit
1704 on the call_insn directly. Perhaps we should make more use of
1705 attaching the trees to call_insns (reachable via symbol_ref in
1706 direct call cases) and just pull the data out of the trees. */
87ff9c8e 1707
1d65f45c 1708void
84f16edb 1709make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
87ff9c8e 1710{
1d65f45c
RH
1711 rtx value;
1712 if (ecf_flags & ECF_NOTHROW)
1713 value = const0_rtx;
1714 else if (lp_nr != 0)
1715 value = GEN_INT (lp_nr);
52a11cbf 1716 else
1d65f45c
RH
1717 return;
1718 add_reg_note (insn, REG_EH_REGION, value);
87ff9c8e
RH
1719}
1720
1d65f45c
RH
1721/* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1722 nor perform a non-local goto. Replace the region note if it
1723 already exists. */
87ff9c8e 1724
1d65f45c 1725void
84f16edb 1726make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
87ff9c8e 1727{
1d65f45c
RH
1728 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1729 rtx intmin = GEN_INT (INT_MIN);
52a11cbf 1730
1d65f45c
RH
1731 if (note != 0)
1732 XEXP (note, 0) = intmin;
1733 else
1734 add_reg_note (insn, REG_EH_REGION, intmin);
1735}
21cd906e 1736
1d65f45c
RH
1737/* Return true if INSN could throw, assuming no REG_EH_REGION note
1738 to the contrary. */
21cd906e 1739
1d65f45c
RH
1740bool
1741insn_could_throw_p (const_rtx insn)
1742{
642d55de
EB
1743 if (!flag_exceptions)
1744 return false;
1d65f45c
RH
1745 if (CALL_P (insn))
1746 return true;
8f4f502f 1747 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1d65f45c
RH
1748 return may_trap_p (PATTERN (insn));
1749 return false;
1750}
98f464e0 1751
1d65f45c
RH
1752/* Copy an REG_EH_REGION note to each insn that might throw beginning
1753 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1754 to look for a note, or the note itself. */
98f464e0 1755
1d65f45c 1756void
dc01c3d1 1757copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1d65f45c 1758{
dc01c3d1
DM
1759 rtx_insn *insn;
1760 rtx note = note_or_insn;
21cd906e 1761
1d65f45c
RH
1762 if (INSN_P (note_or_insn))
1763 {
1764 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1765 if (note == NULL)
1766 return;
21cd906e 1767 }
89cfdb7e
JJ
1768 else if (is_a <rtx_insn *> (note_or_insn))
1769 return;
1d65f45c
RH
1770 note = XEXP (note, 0);
1771
1772 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1773 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1774 && insn_could_throw_p (insn))
1775 add_reg_note (insn, REG_EH_REGION, note);
fa51b01b 1776}
4956d07c 1777
1d65f45c 1778/* Likewise, but iterate backward. */
4956d07c 1779
6de9cd9a 1780void
dc01c3d1 1781copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
4956d07c 1782{
dc01c3d1
DM
1783 rtx_insn *insn;
1784 rtx note = note_or_insn;
fb13d4d0 1785
1d65f45c 1786 if (INSN_P (note_or_insn))
7f206d8f 1787 {
1d65f45c
RH
1788 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1789 if (note == NULL)
6de9cd9a 1790 return;
52a11cbf 1791 }
89cfdb7e
JJ
1792 else if (is_a <rtx_insn *> (note_or_insn))
1793 return;
1d65f45c 1794 note = XEXP (note, 0);
fac62ecf 1795
1d65f45c
RH
1796 for (insn = last; insn != first; insn = PREV_INSN (insn))
1797 if (insn_could_throw_p (insn))
1798 add_reg_note (insn, REG_EH_REGION, note);
6de9cd9a
DN
1799}
1800
6de9cd9a 1801
1d65f45c
RH
1802/* Extract all EH information from INSN. Return true if the insn
1803 was marked NOTHROW. */
6de9cd9a 1804
1d65f45c
RH
1805static bool
1806get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1807 eh_landing_pad *plp)
6de9cd9a 1808{
1d65f45c
RH
1809 eh_landing_pad lp = NULL;
1810 eh_region r = NULL;
1811 bool ret = false;
1812 rtx note;
1813 int lp_nr;
6de9cd9a 1814
1d65f45c
RH
1815 if (! INSN_P (insn))
1816 goto egress;
1817
1818 if (NONJUMP_INSN_P (insn)
1819 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1820 insn = XVECEXP (PATTERN (insn), 0, 0);
6de9cd9a 1821
1d65f45c
RH
1822 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1823 if (!note)
6de9cd9a 1824 {
1d65f45c
RH
1825 ret = !insn_could_throw_p (insn);
1826 goto egress;
6de9cd9a 1827 }
1d65f45c
RH
1828
1829 lp_nr = INTVAL (XEXP (note, 0));
1830 if (lp_nr == 0 || lp_nr == INT_MIN)
6de9cd9a 1831 {
1d65f45c
RH
1832 ret = true;
1833 goto egress;
6de9cd9a 1834 }
502b8322 1835
1d65f45c 1836 if (lp_nr < 0)
9771b263 1837 r = (*cfun->eh->region_array)[-lp_nr];
1d65f45c
RH
1838 else
1839 {
9771b263 1840 lp = (*cfun->eh->lp_array)[lp_nr];
1d65f45c
RH
1841 r = lp->region;
1842 }
6de9cd9a 1843
1d65f45c
RH
1844 egress:
1845 *plp = lp;
1846 *pr = r;
1847 return ret;
fb13d4d0
JM
1848}
1849
1d65f45c
RH
1850/* Return the landing pad to which INSN may go, or NULL if it does not
1851 have a reachable landing pad within this function. */
4956d07c 1852
1d65f45c
RH
1853eh_landing_pad
1854get_eh_landing_pad_from_rtx (const_rtx insn)
4956d07c 1855{
1d65f45c
RH
1856 eh_landing_pad lp;
1857 eh_region r;
6de9cd9a 1858
1d65f45c
RH
1859 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1860 return lp;
1861}
6de9cd9a 1862
1d65f45c
RH
1863/* Return the region to which INSN may go, or NULL if it does not
1864 have a reachable region within this function. */
6de9cd9a 1865
1d65f45c
RH
1866eh_region
1867get_eh_region_from_rtx (const_rtx insn)
1868{
1869 eh_landing_pad lp;
1870 eh_region r;
6de9cd9a 1871
1d65f45c
RH
1872 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1873 return r;
6de9cd9a
DN
1874}
1875
1d65f45c
RH
1876/* Return true if INSN throws and is caught by something in this function. */
1877
6de9cd9a 1878bool
ed7a4b4b 1879can_throw_internal (const_rtx insn)
6de9cd9a 1880{
1d65f45c
RH
1881 return get_eh_landing_pad_from_rtx (insn) != NULL;
1882}
1883
1884/* Return true if INSN throws and escapes from the current function. */
1885
1886bool
1887can_throw_external (const_rtx insn)
1888{
1889 eh_landing_pad lp;
1890 eh_region r;
1891 bool nothrow;
e6cfb550 1892
52a11cbf
RH
1893 if (! INSN_P (insn))
1894 return false;
12670d88 1895
4b4bf941 1896 if (NONJUMP_INSN_P (insn)
52a11cbf 1897 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1d65f45c 1898 {
2a62e439
DM
1899 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1900 int i, n = seq->len ();
4956d07c 1901
1d65f45c 1902 for (i = 0; i < n; i++)
2a62e439 1903 if (can_throw_external (seq->element (i)))
1d65f45c 1904 return true;
6de9cd9a 1905
1d65f45c
RH
1906 return false;
1907 }
6de9cd9a 1908
1d65f45c 1909 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
6de9cd9a 1910
1d65f45c
RH
1911 /* If we can't throw, we obviously can't throw external. */
1912 if (nothrow)
1913 return false;
4956d07c 1914
1d65f45c
RH
1915 /* If we have an internal landing pad, then we're not external. */
1916 if (lp != NULL)
1917 return false;
4956d07c 1918
1d65f45c
RH
1919 /* If we're not within an EH region, then we are external. */
1920 if (r == NULL)
1921 return true;
4956d07c 1922
1d65f45c
RH
1923 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1924 which don't always have landing pads. */
1925 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1926 return false;
52a11cbf 1927}
4956d07c 1928
1d65f45c
RH
1929/* Return true if INSN cannot throw at all. */
1930
52a11cbf 1931bool
1d65f45c 1932insn_nothrow_p (const_rtx insn)
4956d07c 1933{
1d65f45c
RH
1934 eh_landing_pad lp;
1935 eh_region r;
4956d07c 1936
52a11cbf 1937 if (! INSN_P (insn))
1d65f45c 1938 return true;
b59fa6cf 1939
4b4bf941 1940 if (NONJUMP_INSN_P (insn)
52a11cbf 1941 && GET_CODE (PATTERN (insn)) == SEQUENCE)
efaadb93 1942 {
2a62e439
DM
1943 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1944 int i, n = seq->len ();
efaadb93
AN
1945
1946 for (i = 0; i < n; i++)
2a62e439 1947 if (!insn_nothrow_p (seq->element (i)))
1d65f45c 1948 return false;
efaadb93 1949
1d65f45c 1950 return true;
efaadb93 1951 }
52a11cbf 1952
1d65f45c
RH
1953 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1954}
1955
1956/* Return true if INSN can perform a non-local goto. */
1957/* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1958
1959bool
d8c1e9b6 1960can_nonlocal_goto (const rtx_insn *insn)
1d65f45c
RH
1961{
1962 if (nonlocal_goto_handler_labels && CALL_P (insn))
52a11cbf 1963 {
1d65f45c
RH
1964 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1965 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1966 return true;
52a11cbf 1967 }
1d65f45c 1968 return false;
4956d07c 1969}
1d65f45c 1970\f
e3b5732b 1971/* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
6814a8a0 1972
20cdc2be 1973static unsigned int
502b8322 1974set_nothrow_function_flags (void)
1ef1bf06 1975{
f8b23302 1976 rtx_insn *insn;
502b8322 1977
fe89fbc5 1978 crtl->nothrow = 1;
1ef1bf06 1979
e3b5732b 1980 /* Assume crtl->all_throwers_are_sibcalls until we encounter
b6128b8c
SH
1981 something that can throw an exception. We specifically exempt
1982 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1983 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1984 is optimistic. */
1ef1bf06 1985
e3b5732b 1986 crtl->all_throwers_are_sibcalls = 1;
b6128b8c 1987
fe89fbc5
JH
1988 /* If we don't know that this implementation of the function will
1989 actually be used, then we must not set TREE_NOTHROW, since
1990 callers must not assume that this function does not throw. */
1991 if (TREE_NOTHROW (current_function_decl))
1992 return 0;
1993
b6128b8c 1994 if (! flag_exceptions)
c2924966 1995 return 0;
502b8322 1996
1ef1bf06 1997 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf 1998 if (can_throw_external (insn))
b6128b8c 1999 {
fe89fbc5 2000 crtl->nothrow = 0;
b6128b8c 2001
4b4bf941 2002 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
b6128b8c 2003 {
e3b5732b 2004 crtl->all_throwers_are_sibcalls = 0;
c2924966 2005 return 0;
b6128b8c
SH
2006 }
2007 }
2008
fe89fbc5 2009 if (crtl->nothrow
d52f5295 2010 && (cgraph_node::get (current_function_decl)->get_availability ()
fe89fbc5 2011 >= AVAIL_AVAILABLE))
f7dd1864 2012 {
d52f5295 2013 struct cgraph_node *node = cgraph_node::get (current_function_decl);
2505c5ed
JH
2014 struct cgraph_edge *e;
2015 for (e = node->callers; e; e = e->next_caller)
2016 e->can_throw_external = false;
d52f5295 2017 node->set_nothrow_flag (true);
f7dd1864
AN
2018
2019 if (dump_file)
2020 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2021 current_function_name ());
2022 }
c2924966 2023 return 0;
1ef1bf06 2024}
52a11cbf 2025
27a4cd48
DM
2026namespace {
2027
2028const pass_data pass_data_set_nothrow_function_flags =
2029{
2030 RTL_PASS, /* type */
2031 "nothrow", /* name */
2032 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2033 TV_NONE, /* tv_id */
2034 0, /* properties_required */
2035 0, /* properties_provided */
2036 0, /* properties_destroyed */
2037 0, /* todo_flags_start */
2038 0, /* todo_flags_finish */
ef330312
PB
2039};
2040
27a4cd48
DM
2041class pass_set_nothrow_function_flags : public rtl_opt_pass
2042{
2043public:
c3284718
RS
2044 pass_set_nothrow_function_flags (gcc::context *ctxt)
2045 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
27a4cd48
DM
2046 {}
2047
2048 /* opt_pass methods: */
be55bfe6
TS
2049 virtual unsigned int execute (function *)
2050 {
2051 return set_nothrow_function_flags ();
2052 }
27a4cd48
DM
2053
2054}; // class pass_set_nothrow_function_flags
2055
2056} // anon namespace
2057
2058rtl_opt_pass *
2059make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2060{
2061 return new pass_set_nothrow_function_flags (ctxt);
2062}
2063
1d65f45c
RH
2064\f
2065/* Various hooks for unwind library. */
2066
2067/* Expand the EH support builtin functions:
2068 __builtin_eh_pointer and __builtin_eh_filter. */
2069
2070static eh_region
2071expand_builtin_eh_common (tree region_nr_t)
2072{
2073 HOST_WIDE_INT region_nr;
2074 eh_region region;
2075
9541ffee 2076 gcc_assert (tree_fits_shwi_p (region_nr_t));
9439e9a1 2077 region_nr = tree_to_shwi (region_nr_t);
1d65f45c 2078
9771b263 2079 region = (*cfun->eh->region_array)[region_nr];
1d65f45c
RH
2080
2081 /* ??? We shouldn't have been able to delete a eh region without
2082 deleting all the code that depended on it. */
2083 gcc_assert (region != NULL);
2084
2085 return region;
2086}
2087
2088/* Expand to the exc_ptr value from the given eh region. */
2089
2090rtx
2091expand_builtin_eh_pointer (tree exp)
2092{
2093 eh_region region
2094 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2095 if (region->exc_ptr_reg == NULL)
2096 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2097 return region->exc_ptr_reg;
2098}
2099
2100/* Expand to the filter value from the given eh region. */
2101
2102rtx
2103expand_builtin_eh_filter (tree exp)
2104{
2105 eh_region region
2106 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2107 if (region->filter_reg == NULL)
2108 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2109 return region->filter_reg;
2110}
2111
2112/* Copy the exc_ptr and filter values from one landing pad's registers
2113 to another. This is used to inline the resx statement. */
2114
2115rtx
2116expand_builtin_eh_copy_values (tree exp)
2117{
2118 eh_region dst
2119 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2120 eh_region src
2121 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
095a2d76 2122 scalar_int_mode fmode = targetm.eh_return_filter_mode ();
1d65f45c
RH
2123
2124 if (dst->exc_ptr_reg == NULL)
2125 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2126 if (src->exc_ptr_reg == NULL)
2127 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2128
2129 if (dst->filter_reg == NULL)
2130 dst->filter_reg = gen_reg_rtx (fmode);
2131 if (src->filter_reg == NULL)
2132 src->filter_reg = gen_reg_rtx (fmode);
2133
2134 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2135 emit_move_insn (dst->filter_reg, src->filter_reg);
2136
2137 return const0_rtx;
2138}
ca55abae
JM
2139
2140/* Do any necessary initialization to access arbitrary stack frames.
2141 On the SPARC, this means flushing the register windows. */
2142
2143void
502b8322 2144expand_builtin_unwind_init (void)
ca55abae
JM
2145{
2146 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 2147 able to copy the saved values for any registers from frames we unwind. */
e3b5732b 2148 crtl->saves_all_registers = 1;
ca55abae 2149
ca55abae 2150 SETUP_FRAME_ADDRESSES ();
ca55abae
JM
2151}
2152
1d65f45c
RH
2153/* Map a non-negative number to an eh return data register number; expands
2154 to -1 if no return data register is associated with the input number.
2155 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2156
52a11cbf 2157rtx
5039610b 2158expand_builtin_eh_return_data_regno (tree exp)
52a11cbf 2159{
5039610b 2160 tree which = CALL_EXPR_ARG (exp, 0);
52a11cbf
RH
2161 unsigned HOST_WIDE_INT iwhich;
2162
2163 if (TREE_CODE (which) != INTEGER_CST)
2164 {
971801ff 2165 error ("argument of %<__builtin_eh_return_regno%> must be constant");
52a11cbf
RH
2166 return constm1_rtx;
2167 }
2168
ae7e9ddd 2169 iwhich = tree_to_uhwi (which);
52a11cbf
RH
2170 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2171 if (iwhich == INVALID_REGNUM)
2172 return constm1_rtx;
2173
2174#ifdef DWARF_FRAME_REGNUM
2175 iwhich = DWARF_FRAME_REGNUM (iwhich);
2176#else
2177 iwhich = DBX_REGISTER_NUMBER (iwhich);
2178#endif
2179
3f2c5d1a 2180 return GEN_INT (iwhich);
52a11cbf
RH
2181}
2182
ca55abae
JM
2183/* Given a value extracted from the return address register or stack slot,
2184 return the actual address encoded in that value. */
2185
2186rtx
502b8322 2187expand_builtin_extract_return_addr (tree addr_tree)
ca55abae 2188{
49452c07 2189 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
52a11cbf 2190
0ab38418
EC
2191 if (GET_MODE (addr) != Pmode
2192 && GET_MODE (addr) != VOIDmode)
2193 {
2194#ifdef POINTERS_EXTEND_UNSIGNED
2195 addr = convert_memory_address (Pmode, addr);
2196#else
2197 addr = convert_to_mode (Pmode, addr, 0);
2198#endif
2199 }
2200
52a11cbf 2201 /* First mask out any unwanted bits. */
cbc7d031
TS
2202 rtx mask = MASK_RETURN_ADDR;
2203 if (mask)
2204 expand_and (Pmode, addr, mask, addr);
52a11cbf
RH
2205
2206 /* Then adjust to find the real return address. */
a8a6b3df
TS
2207 if (RETURN_ADDR_OFFSET)
2208 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
52a11cbf
RH
2209
2210 return addr;
ca55abae
JM
2211}
2212
2213/* Given an actual address in addr_tree, do any necessary encoding
2214 and return the value to be stored in the return address register or
2215 stack slot so the epilogue will return to that address. */
2216
2217rtx
502b8322 2218expand_builtin_frob_return_addr (tree addr_tree)
ca55abae 2219{
49452c07 2220 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
52a11cbf 2221
5ae6cd0d 2222 addr = convert_memory_address (Pmode, addr);
be128cd9 2223
a8a6b3df
TS
2224 if (RETURN_ADDR_OFFSET)
2225 {
2226 addr = force_reg (Pmode, addr);
2227 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2228 }
52a11cbf 2229
ca55abae
JM
2230 return addr;
2231}
2232
52a11cbf
RH
2233/* Set up the epilogue with the magic bits we'll need to return to the
2234 exception handler. */
ca55abae 2235
52a11cbf 2236void
502b8322
AJ
2237expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2238 tree handler_tree)
ca55abae 2239{
34dc173c 2240 rtx tmp;
ca55abae 2241
34dc173c 2242#ifdef EH_RETURN_STACKADJ_RTX
69c32ec8 2243 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
49452c07 2244 VOIDmode, EXPAND_NORMAL);
5ae6cd0d 2245 tmp = convert_memory_address (Pmode, tmp);
69c32ec8 2246 if (!crtl->eh.ehr_stackadj)
dcd7a7df 2247 crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
69c32ec8
JH
2248 else if (tmp != crtl->eh.ehr_stackadj)
2249 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
be128cd9
RK
2250#endif
2251
69c32ec8 2252 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
49452c07 2253 VOIDmode, EXPAND_NORMAL);
5ae6cd0d 2254 tmp = convert_memory_address (Pmode, tmp);
69c32ec8 2255 if (!crtl->eh.ehr_handler)
dcd7a7df 2256 crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
69c32ec8
JH
2257 else if (tmp != crtl->eh.ehr_handler)
2258 emit_move_insn (crtl->eh.ehr_handler, tmp);
ca55abae 2259
69c32ec8
JH
2260 if (!crtl->eh.ehr_label)
2261 crtl->eh.ehr_label = gen_label_rtx ();
2262 emit_jump (crtl->eh.ehr_label);
a1622f83
AM
2263}
2264
1d65f45c
RH
2265/* Expand __builtin_eh_return. This exit path from the function loads up
2266 the eh return data registers, adjusts the stack, and branches to a
2267 given PC other than the normal return address. */
2268
71038426 2269void
502b8322 2270expand_eh_return (void)
ca55abae 2271{
f8b23302 2272 rtx_code_label *around_label;
ca55abae 2273
69c32ec8 2274 if (! crtl->eh.ehr_label)
71038426 2275 return;
ca55abae 2276
e3b5732b 2277 crtl->calls_eh_return = 1;
ca55abae 2278
34dc173c
UW
2279#ifdef EH_RETURN_STACKADJ_RTX
2280 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2281#endif
2282
52a11cbf 2283 around_label = gen_label_rtx ();
52a11cbf 2284 emit_jump (around_label);
ca55abae 2285
69c32ec8 2286 emit_label (crtl->eh.ehr_label);
52a11cbf 2287 clobber_return_register ();
ca55abae 2288
34dc173c 2289#ifdef EH_RETURN_STACKADJ_RTX
69c32ec8 2290 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
34dc173c
UW
2291#endif
2292
3b0b0013
RS
2293 if (targetm.have_eh_return ())
2294 emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
52a11cbf 2295 else
52a11cbf 2296 {
dd4fb609
TS
2297 if (rtx handler = EH_RETURN_HANDLER_RTX)
2298 emit_move_insn (handler, crtl->eh.ehr_handler);
2299 else
a3f9f006 2300 error ("%<__builtin_eh_return%> not supported on this target");
52a11cbf 2301 }
71038426 2302
52a11cbf 2303 emit_label (around_label);
71038426 2304}
c76362b4
JW
2305
2306/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2307 POINTERS_EXTEND_UNSIGNED and return it. */
2308
2309rtx
2310expand_builtin_extend_pointer (tree addr_tree)
2311{
49452c07 2312 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
c76362b4
JW
2313 int extend;
2314
2315#ifdef POINTERS_EXTEND_UNSIGNED
2316 extend = POINTERS_EXTEND_UNSIGNED;
2317#else
2318 /* The previous EH code did an unsigned extend by default, so we do this also
2319 for consistency. */
2320 extend = 1;
2321#endif
2322
7b0518e3 2323 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
c76362b4 2324}
77d33a84 2325\f
52a11cbf 2326static int
c203e8a7 2327add_action_record (action_hash_type *ar_hash, int filter, int next)
77d33a84 2328{
d858f359 2329 struct action_record **slot, *new_ar, tmp;
52a11cbf
RH
2330
2331 tmp.filter = filter;
2332 tmp.next = next;
c203e8a7 2333 slot = ar_hash->find_slot (&tmp, INSERT);
77d33a84 2334
d858f359 2335 if ((new_ar = *slot) == NULL)
77d33a84 2336 {
d858f359 2337 new_ar = XNEW (struct action_record);
9771b263 2338 new_ar->offset = crtl->eh.action_record_data->length () + 1;
d858f359
KG
2339 new_ar->filter = filter;
2340 new_ar->next = next;
2341 *slot = new_ar;
52a11cbf
RH
2342
2343 /* The filter value goes in untouched. The link to the next
2344 record is a "self-relative" byte offset, or zero to indicate
2345 that there is no next record. So convert the absolute 1 based
eaec9b3d 2346 indices we've been carrying around into a displacement. */
52a11cbf 2347
69c32ec8 2348 push_sleb128 (&crtl->eh.action_record_data, filter);
52a11cbf 2349 if (next)
9771b263 2350 next -= crtl->eh.action_record_data->length () + 1;
69c32ec8 2351 push_sleb128 (&crtl->eh.action_record_data, next);
77d33a84 2352 }
77d33a84 2353
d858f359 2354 return new_ar->offset;
52a11cbf 2355}
77d33a84 2356
52a11cbf 2357static int
c203e8a7 2358collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
77d33a84 2359{
52a11cbf 2360 int next;
77d33a84 2361
52a11cbf
RH
2362 /* If we've reached the top of the region chain, then we have
2363 no actions, and require no landing pad. */
2364 if (region == NULL)
2365 return -1;
2366
2367 switch (region->type)
77d33a84 2368 {
52a11cbf 2369 case ERT_CLEANUP:
1d65f45c
RH
2370 {
2371 eh_region r;
2372 /* A cleanup adds a zero filter to the beginning of the chain, but
2373 there are special cases to look out for. If there are *only*
2374 cleanups along a path, then it compresses to a zero action.
2375 Further, if there are multiple cleanups along a path, we only
2376 need to represent one of them, as that is enough to trigger
2377 entry to the landing pad at runtime. */
2378 next = collect_one_action_chain (ar_hash, region->outer);
2379 if (next <= 0)
2380 return 0;
2381 for (r = region->outer; r ; r = r->outer)
2382 if (r->type == ERT_CLEANUP)
2383 return next;
2384 return add_action_record (ar_hash, 0, next);
2385 }
52a11cbf
RH
2386
2387 case ERT_TRY:
1d65f45c
RH
2388 {
2389 eh_catch c;
2390
2391 /* Process the associated catch regions in reverse order.
2392 If there's a catch-all handler, then we don't need to
2393 search outer regions. Use a magic -3 value to record
2394 that we haven't done the outer search. */
2395 next = -3;
2396 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2397 {
2398 if (c->type_list == NULL)
2399 {
2400 /* Retrieve the filter from the head of the filter list
2401 where we have stored it (see assign_filter_values). */
2402 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2403 next = add_action_record (ar_hash, filter, 0);
2404 }
2405 else
2406 {
2407 /* Once the outer search is done, trigger an action record for
2408 each filter we have. */
2409 tree flt_node;
6d41a92f 2410
1d65f45c
RH
2411 if (next == -3)
2412 {
2413 next = collect_one_action_chain (ar_hash, region->outer);
2414
2415 /* If there is no next action, terminate the chain. */
2416 if (next == -1)
2417 next = 0;
2418 /* If all outer actions are cleanups or must_not_throw,
2419 we'll have no action record for it, since we had wanted
2420 to encode these states in the call-site record directly.
2421 Add a cleanup action to the chain to catch these. */
2422 else if (next <= 0)
2423 next = add_action_record (ar_hash, 0, 0);
2424 }
3f2c5d1a 2425
1d65f45c
RH
2426 flt_node = c->filter_list;
2427 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2428 {
2429 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2430 next = add_action_record (ar_hash, filter, next);
2431 }
2432 }
2433 }
2434 return next;
2435 }
52a11cbf
RH
2436
2437 case ERT_ALLOWED_EXCEPTIONS:
2438 /* An exception specification adds its filter to the
2439 beginning of the chain. */
2440 next = collect_one_action_chain (ar_hash, region->outer);
0977ab3a
RH
2441
2442 /* If there is no next action, terminate the chain. */
2443 if (next == -1)
2444 next = 0;
2445 /* If all outer actions are cleanups or must_not_throw,
2446 we'll have no action record for it, since we had wanted
2447 to encode these states in the call-site record directly.
2448 Add a cleanup action to the chain to catch these. */
2449 else if (next <= 0)
2450 next = add_action_record (ar_hash, 0, 0);
083cad55 2451
0977ab3a 2452 return add_action_record (ar_hash, region->u.allowed.filter, next);
52a11cbf
RH
2453
2454 case ERT_MUST_NOT_THROW:
2455 /* A must-not-throw region with no inner handlers or cleanups
2456 requires no call-site entry. Note that this differs from
2457 the no handler or cleanup case in that we do require an lsda
2458 to be generated. Return a magic -2 value to record this. */
2459 return -2;
77d33a84 2460 }
1d65f45c
RH
2461
2462 gcc_unreachable ();
77d33a84
AM
2463}
2464
52a11cbf 2465static int
17f6e37d 2466add_call_site (rtx landing_pad, int action, int section)
77d33a84 2467{
69c32ec8 2468 call_site_record record;
1d65f45c 2469
766090c2 2470 record = ggc_alloc<call_site_record_d> ();
69c32ec8
JH
2471 record->landing_pad = landing_pad;
2472 record->action = action;
77d33a84 2473
9771b263 2474 vec_safe_push (crtl->eh.call_site_record_v[section], record);
77d33a84 2475
9771b263 2476 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
77d33a84
AM
2477}
2478
66e8df53 2479static rtx_note *
b32d5189 2480emit_note_eh_region_end (rtx_insn *insn)
a4a51a52 2481{
a4a51a52
UB
2482 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2483}
2484
3fa410c0
JH
2485/* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
2486 with landing pad.
2487 With landing pad being at offset 0 from the start label of the section
2488 we would miss EH delivery because 0 is special and means no landing pad. */
2489
2490static bool
2491maybe_add_nop_after_section_switch (void)
2492{
2493 if (!crtl->uses_eh_lsda
2494 || !crtl->eh.call_site_record_v[1])
2495 return false;
2496 int n = vec_safe_length (crtl->eh.call_site_record_v[1]);
2497 hash_set<rtx_insn *> visited;
2498
2499 for (int i = 0; i < n; ++i)
2500 {
2501 struct call_site_record_d *cs
2502 = (*crtl->eh.call_site_record_v[1])[i];
2503 if (cs->landing_pad)
2504 {
2505 rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
2506 while (true)
2507 {
2508 /* Landing pads have LABEL_PRESERVE_P flag set. This check make
2509 sure that we do not walk past landing pad visited earlier
2510 which would result in possible quadratic behaviour. */
2511 if (LABEL_P (insn) && LABEL_PRESERVE_P (insn)
2512 && visited.add (insn))
2513 break;
2514
2515 /* Conservatively assume that ASM insn may be empty. We have
2516 now way to tell what they contain. */
2517 if (active_insn_p (insn)
2518 && GET_CODE (PATTERN (insn)) != ASM_INPUT
2519 && GET_CODE (PATTERN (insn)) != ASM_OPERANDS)
2520 break;
2521
2522 /* If we reached the start of hot section, then NOP will be
2523 needed. */
2524 if (GET_CODE (insn) == NOTE
2525 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2526 {
2527 emit_insn_after (gen_nop (), insn);
2528 break;
2529 }
2530
2531 /* We visit only labels from cold section. We should never hit
2532 begining of the insn stream here. */
2533 insn = PREV_INSN (insn);
2534 }
2535 }
2536 }
2537 return false;
2538}
2539
52a11cbf
RH
2540/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2541 The new note numbers will not refer to region numbers, but
2542 instead to call site entries. */
77d33a84 2543
1d65f45c 2544static unsigned int
502b8322 2545convert_to_eh_region_ranges (void)
77d33a84 2546{
f8b23302
DM
2547 rtx insn;
2548 rtx_insn *iter;
66e8df53 2549 rtx_note *note;
c203e8a7 2550 action_hash_type ar_hash (31);
52a11cbf 2551 int last_action = -3;
f8b23302 2552 rtx_insn *last_action_insn = NULL;
52a11cbf 2553 rtx last_landing_pad = NULL_RTX;
f8b23302 2554 rtx_insn *first_no_action_insn = NULL;
ae0ed63a 2555 int call_site = 0;
17f6e37d 2556 int cur_sec = 0;
e67d1102 2557 rtx_insn *section_switch_note = NULL;
f8b23302
DM
2558 rtx_insn *first_no_action_insn_before_switch = NULL;
2559 rtx_insn *last_no_action_insn_before_switch = NULL;
17f6e37d 2560 int saved_call_site_base = call_site_base;
77d33a84 2561
9771b263 2562 vec_alloc (crtl->eh.action_record_data, 64);
77d33a84 2563
52a11cbf
RH
2564 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2565 if (INSN_P (iter))
2566 {
1d65f45c
RH
2567 eh_landing_pad lp;
2568 eh_region region;
2569 bool nothrow;
52a11cbf 2570 int this_action;
e67d1102 2571 rtx_code_label *this_landing_pad;
77d33a84 2572
52a11cbf 2573 insn = iter;
4b4bf941 2574 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
2575 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2576 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 2577
1d65f45c
RH
2578 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2579 if (nothrow)
2580 continue;
2581 if (region)
c203e8a7 2582 this_action = collect_one_action_chain (&ar_hash, region);
52a11cbf 2583 else
1d65f45c 2584 this_action = -1;
52a11cbf
RH
2585
2586 /* Existence of catch handlers, or must-not-throw regions
2587 implies that an lsda is needed (even if empty). */
2588 if (this_action != -1)
e3b5732b 2589 crtl->uses_eh_lsda = 1;
52a11cbf
RH
2590
2591 /* Delay creation of region notes for no-action regions
2592 until we're sure that an lsda will be required. */
2593 else if (last_action == -3)
2594 {
2595 first_no_action_insn = iter;
2596 last_action = -1;
2597 }
1ef1bf06 2598
52a11cbf 2599 if (this_action >= 0)
1d65f45c 2600 this_landing_pad = lp->landing_pad;
52a11cbf 2601 else
e67d1102 2602 this_landing_pad = NULL;
1ef1bf06 2603
52a11cbf
RH
2604 /* Differing actions or landing pads implies a change in call-site
2605 info, which implies some EH_REGION note should be emitted. */
2606 if (last_action != this_action
2607 || last_landing_pad != this_landing_pad)
2608 {
aaa52a96
JJ
2609 /* If there is a queued no-action region in the other section
2610 with hot/cold partitioning, emit it now. */
2611 if (first_no_action_insn_before_switch)
2612 {
2613 gcc_assert (this_action != -1
2614 && last_action == (first_no_action_insn
2615 ? -1 : -3));
2616 call_site = add_call_site (NULL_RTX, 0, 0);
2617 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2618 first_no_action_insn_before_switch);
2619 NOTE_EH_HANDLER (note) = call_site;
a4a51a52
UB
2620 note
2621 = emit_note_eh_region_end (last_no_action_insn_before_switch);
aaa52a96
JJ
2622 NOTE_EH_HANDLER (note) = call_site;
2623 gcc_assert (last_action != -3
2624 || (last_action_insn
2625 == last_no_action_insn_before_switch));
f8b23302
DM
2626 first_no_action_insn_before_switch = NULL;
2627 last_no_action_insn_before_switch = NULL;
aaa52a96
JJ
2628 call_site_base++;
2629 }
52a11cbf
RH
2630 /* If we'd not seen a previous action (-3) or the previous
2631 action was must-not-throw (-2), then we do not need an
2632 end note. */
2633 if (last_action >= -1)
2634 {
2635 /* If we delayed the creation of the begin, do it now. */
2636 if (first_no_action_insn)
2637 {
17f6e37d 2638 call_site = add_call_site (NULL_RTX, 0, cur_sec);
52a11cbf
RH
2639 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2640 first_no_action_insn);
2641 NOTE_EH_HANDLER (note) = call_site;
f8b23302 2642 first_no_action_insn = NULL;
52a11cbf
RH
2643 }
2644
a4a51a52 2645 note = emit_note_eh_region_end (last_action_insn);
52a11cbf
RH
2646 NOTE_EH_HANDLER (note) = call_site;
2647 }
2648
2649 /* If the new action is must-not-throw, then no region notes
2650 are created. */
2651 if (this_action >= -1)
2652 {
3f2c5d1a 2653 call_site = add_call_site (this_landing_pad,
17f6e37d
JJ
2654 this_action < 0 ? 0 : this_action,
2655 cur_sec);
52a11cbf
RH
2656 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2657 NOTE_EH_HANDLER (note) = call_site;
2658 }
2659
2660 last_action = this_action;
2661 last_landing_pad = this_landing_pad;
2662 }
2663 last_action_insn = iter;
2664 }
17f6e37d
JJ
2665 else if (NOTE_P (iter)
2666 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2667 {
2668 gcc_assert (section_switch_note == NULL_RTX);
2669 gcc_assert (flag_reorder_blocks_and_partition);
2670 section_switch_note = iter;
2671 if (first_no_action_insn)
2672 {
2673 first_no_action_insn_before_switch = first_no_action_insn;
2674 last_no_action_insn_before_switch = last_action_insn;
f8b23302 2675 first_no_action_insn = NULL;
17f6e37d
JJ
2676 gcc_assert (last_action == -1);
2677 last_action = -3;
2678 }
2679 /* Force closing of current EH region before section switch and
2680 opening a new one afterwards. */
2681 else if (last_action != -3)
2682 last_landing_pad = pc_rtx;
9771b263
DN
2683 if (crtl->eh.call_site_record_v[cur_sec])
2684 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
17f6e37d 2685 cur_sec++;
0823efed 2686 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
9771b263 2687 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
17f6e37d 2688 }
1ef1bf06 2689
52a11cbf 2690 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 2691 {
a4a51a52 2692 note = emit_note_eh_region_end (last_action_insn);
52a11cbf 2693 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
2694 }
2695
17f6e37d
JJ
2696 call_site_base = saved_call_site_base;
2697
c2924966 2698 return 0;
52a11cbf 2699}
1ef1bf06 2700
27a4cd48
DM
2701namespace {
2702
2703const pass_data pass_data_convert_to_eh_region_ranges =
2704{
2705 RTL_PASS, /* type */
2706 "eh_ranges", /* name */
2707 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
2708 TV_NONE, /* tv_id */
2709 0, /* properties_required */
2710 0, /* properties_provided */
2711 0, /* properties_destroyed */
2712 0, /* todo_flags_start */
2713 0, /* todo_flags_finish */
ef330312 2714};
27a4cd48
DM
2715
2716class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2717{
2718public:
c3284718
RS
2719 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2720 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
27a4cd48
DM
2721 {}
2722
2723 /* opt_pass methods: */
1a3d085c 2724 virtual bool gate (function *);
be55bfe6
TS
2725 virtual unsigned int execute (function *)
2726 {
205b6026 2727 int ret = convert_to_eh_region_ranges ();
3fa410c0 2728 maybe_add_nop_after_section_switch ();
205b6026 2729 return ret;
be55bfe6 2730 }
27a4cd48
DM
2731
2732}; // class pass_convert_to_eh_region_ranges
2733
1a3d085c
TS
2734bool
2735pass_convert_to_eh_region_ranges::gate (function *)
2736{
2737 /* Nothing to do for SJLJ exceptions or if no regions created. */
2738 if (cfun->eh->region_tree == NULL)
2739 return false;
2740 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2741 return false;
2742 return true;
2743}
2744
27a4cd48
DM
2745} // anon namespace
2746
2747rtl_opt_pass *
2748make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2749{
2750 return new pass_convert_to_eh_region_ranges (ctxt);
2751}
52a11cbf
RH
2752\f
2753static void
9771b263 2754push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
52a11cbf
RH
2755{
2756 do
2757 {
2758 unsigned char byte = value & 0x7f;
2759 value >>= 7;
2760 if (value)
2761 byte |= 0x80;
9771b263 2762 vec_safe_push (*data_area, byte);
52a11cbf
RH
2763 }
2764 while (value);
2765}
1ef1bf06 2766
52a11cbf 2767static void
9771b263 2768push_sleb128 (vec<uchar, va_gc> **data_area, int value)
52a11cbf
RH
2769{
2770 unsigned char byte;
2771 int more;
1ef1bf06 2772
52a11cbf 2773 do
1ef1bf06 2774 {
52a11cbf
RH
2775 byte = value & 0x7f;
2776 value >>= 7;
2777 more = ! ((value == 0 && (byte & 0x40) == 0)
2778 || (value == -1 && (byte & 0x40) != 0));
2779 if (more)
2780 byte |= 0x80;
9771b263 2781 vec_safe_push (*data_area, byte);
1ef1bf06 2782 }
52a11cbf
RH
2783 while (more);
2784}
1ef1bf06 2785
52a11cbf 2786\f
52a11cbf 2787static int
17f6e37d 2788dw2_size_of_call_site_table (int section)
1ef1bf06 2789{
9771b263 2790 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
52a11cbf
RH
2791 int size = n * (4 + 4 + 4);
2792 int i;
1ef1bf06 2793
52a11cbf
RH
2794 for (i = 0; i < n; ++i)
2795 {
7e5487a2 2796 struct call_site_record_d *cs =
9771b263 2797 (*crtl->eh.call_site_record_v[section])[i];
52a11cbf
RH
2798 size += size_of_uleb128 (cs->action);
2799 }
fac62ecf 2800
52a11cbf
RH
2801 return size;
2802}
2803
2804static int
502b8322 2805sjlj_size_of_call_site_table (void)
52a11cbf 2806{
9771b263 2807 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
52a11cbf
RH
2808 int size = 0;
2809 int i;
77d33a84 2810
52a11cbf 2811 for (i = 0; i < n; ++i)
1ef1bf06 2812 {
7e5487a2 2813 struct call_site_record_d *cs =
9771b263 2814 (*crtl->eh.call_site_record_v[0])[i];
52a11cbf
RH
2815 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2816 size += size_of_uleb128 (cs->action);
1ef1bf06 2817 }
52a11cbf
RH
2818
2819 return size;
2820}
52a11cbf
RH
2821
2822static void
17f6e37d 2823dw2_output_call_site_table (int cs_format, int section)
52a11cbf 2824{
9771b263 2825 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
52a11cbf 2826 int i;
17f6e37d
JJ
2827 const char *begin;
2828
2829 if (section == 0)
2830 begin = current_function_func_begin_label;
2831 else if (first_function_block_is_cold)
2832 begin = crtl->subsections.hot_section_label;
2833 else
2834 begin = crtl->subsections.cold_section_label;
52a11cbf
RH
2835
2836 for (i = 0; i < n; ++i)
1ef1bf06 2837 {
9771b263 2838 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
52a11cbf
RH
2839 char reg_start_lab[32];
2840 char reg_end_lab[32];
2841 char landing_pad_lab[32];
2842
2843 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2844 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2845
2846 if (cs->landing_pad)
2847 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2848 CODE_LABEL_NUMBER (cs->landing_pad));
2849
2850 /* ??? Perhaps use insn length scaling if the assembler supports
2851 generic arithmetic. */
2852 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2853 data4 if the function is small enough. */
17f6e37d
JJ
2854 if (cs_format == DW_EH_PE_uleb128)
2855 {
2856 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2857 "region %d start", i);
2858 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2859 "length");
2860 if (cs->landing_pad)
2861 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2862 "landing pad");
2863 else
2864 dw2_asm_output_data_uleb128 (0, "landing pad");
2865 }
52a11cbf 2866 else
17f6e37d
JJ
2867 {
2868 dw2_asm_output_delta (4, reg_start_lab, begin,
2869 "region %d start", i);
2870 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2871 if (cs->landing_pad)
2872 dw2_asm_output_delta (4, landing_pad_lab, begin,
2873 "landing pad");
2874 else
2875 dw2_asm_output_data (4, 0, "landing pad");
2876 }
52a11cbf 2877 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
2878 }
2879
52a11cbf
RH
2880 call_site_base += n;
2881}
2882
2883static void
502b8322 2884sjlj_output_call_site_table (void)
52a11cbf 2885{
9771b263 2886 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
52a11cbf 2887 int i;
1ef1bf06 2888
52a11cbf 2889 for (i = 0; i < n; ++i)
1ef1bf06 2890 {
9771b263 2891 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
4da896b2 2892
52a11cbf
RH
2893 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2894 "region %d landing pad", i);
2895 dw2_asm_output_data_uleb128 (cs->action, "action");
2896 }
4da896b2 2897
52a11cbf 2898 call_site_base += n;
1ef1bf06
AM
2899}
2900
0f67af1c 2901/* Switch to the section that should be used for exception tables. */
96d0f4dc 2902
0f67af1c 2903static void
22ba88ef 2904switch_to_exception_section (const char * ARG_UNUSED (fnname))
96d0f4dc 2905{
55fc9e87
EB
2906 section *s;
2907
2908 if (exception_section)
2909 s = exception_section;
2910 else
96d0f4dc 2911 {
04218b35
AD
2912 int flags;
2913
2914 if (EH_TABLES_CAN_BE_READ_ONLY)
2915 {
2916 int tt_format =
2917 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2918 flags = ((! flag_pic
2919 || ((tt_format & 0x70) != DW_EH_PE_absptr
2920 && (tt_format & 0x70) != DW_EH_PE_aligned))
2921 ? 0 : SECTION_WRITE);
2922 }
2923 else
2924 flags = SECTION_WRITE;
2925
55fc9e87
EB
2926 /* Compute the section and cache it into exception_section,
2927 unless it depends on the function name. */
677f3fa8 2928 if (targetm_common.have_named_sections)
1a35e62d 2929 {
22ba88ef 2930#ifdef HAVE_LD_EH_GC_SECTIONS
3e6011cf 2931 if (flag_function_sections
cf288ed3 2932 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
22ba88ef 2933 {
1b4572a8 2934 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3e6011cf
PB
2935 /* The EH table must match the code section, so only mark
2936 it linkonce if we have COMDAT groups to tie them together. */
cf288ed3 2937 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
3e6011cf 2938 flags |= SECTION_LINKONCE;
22ba88ef 2939 sprintf (section_name, ".gcc_except_table.%s", fnname);
3e6011cf 2940 s = get_section (section_name, flags, current_function_decl);
22ba88ef
EB
2941 free (section_name);
2942 }
2943 else
2944#endif
55fc9e87
EB
2945 exception_section
2946 = s = get_section (".gcc_except_table", flags, NULL);
1a35e62d
MM
2947 }
2948 else
55fc9e87 2949 exception_section
04218b35 2950 = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
96d0f4dc 2951 }
55fc9e87
EB
2952
2953 switch_to_section (s);
96d0f4dc
JJ
2954}
2955
617a1b71 2956/* Output a reference from an exception table to the type_info object TYPE.
6fc0bb99 2957 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
617a1b71
PB
2958 the value. */
2959
2960static void
2961output_ttype (tree type, int tt_format, int tt_format_size)
2962{
2963 rtx value;
d858f359 2964 bool is_public = true;
617a1b71
PB
2965
2966 if (type == NULL_TREE)
2967 value = const0_rtx;
2968 else
2969 {
d7f09764
DN
2970 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2971 runtime types so TYPE should already be a runtime type
2972 reference. When pass_ipa_free_lang data is made a default
2973 pass, we can then remove the call to lookup_type_for_runtime
2974 below. */
2975 if (TYPE_P (type))
2976 type = lookup_type_for_runtime (type);
2977
cda5bf39 2978 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
617a1b71
PB
2979
2980 /* Let cgraph know that the rtti decl is used. Not all of the
2981 paths below go through assemble_integer, which would take
2982 care of this for us. */
2983 STRIP_NOPS (type);
2984 if (TREE_CODE (type) == ADDR_EXPR)
2985 {
2986 type = TREE_OPERAND (type, 0);
8813a647 2987 if (VAR_P (type))
66058468 2988 is_public = TREE_PUBLIC (type);
617a1b71 2989 }
dd2c9f74
VR
2990 else
2991 gcc_assert (TREE_CODE (type) == INTEGER_CST);
617a1b71
PB
2992 }
2993
2994 /* Allow the target to override the type table entry format. */
2995 if (targetm.asm_out.ttype (value))
2996 return;
2997
2998 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2999 assemble_integer (value, tt_format_size,
3000 tt_format_size * BITS_PER_UNIT, 1);
3001 else
d858f359 3002 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
617a1b71
PB
3003}
3004
b78b513e
EB
3005/* Output an exception table for the current function according to SECTION.
3006
3007 If the function has been partitioned into hot and cold parts, value 0 for
3008 SECTION refers to the table associated with the hot part while value 1
3009 refers to the table associated with the cold part. If the function has
3010 not been partitioned, value 0 refers to the single exception table. */
3011
17f6e37d 3012static void
a68b5e52 3013output_one_function_exception_table (int section)
52a11cbf 3014{
1d65f45c 3015 int tt_format, cs_format, lp_format, i;
52a11cbf
RH
3016 char ttype_label[32];
3017 char cs_after_size_label[32];
3018 char cs_end_label[32];
52a11cbf 3019 int call_site_len;
52a11cbf 3020 int have_tt_data;
ae0ed63a 3021 int tt_format_size = 0;
1ef1bf06 3022
9771b263 3023 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
1d65f45c 3024 || (targetm.arm_eabi_unwinder
9771b263
DN
3025 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
3026 : vec_safe_length (cfun->eh->ehspec_data.other)));
52a11cbf 3027
b627d6fe
RH
3028 /* Indicate the format of the @TType entries. */
3029 if (! have_tt_data)
3030 tt_format = DW_EH_PE_omit;
3031 else
3032 {
3033 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
61214be1
TS
3034 if (HAVE_AS_LEB128)
3035 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
3036 section ? "LLSDATTC" : "LLSDATT",
3037 current_function_funcdef_no);
3038
b627d6fe
RH
3039 tt_format_size = size_of_encoded_value (tt_format);
3040
7a900ebc 3041 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3042 }
52a11cbf 3043
17f6e37d
JJ
3044 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
3045 current_function_funcdef_no);
52a11cbf
RH
3046
3047 /* The LSDA header. */
3048
3049 /* Indicate the format of the landing pad start pointer. An omitted
3050 field implies @LPStart == @Start. */
3051 /* Currently we always put @LPStart == @Start. This field would
3052 be most useful in moving the landing pads completely out of
3053 line to another section, but it could also be used to minimize
3054 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3055 lp_format = DW_EH_PE_omit;
3056 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3057 eh_data_format_name (lp_format));
52a11cbf
RH
3058
3059 /* @LPStart pointer would go here. */
3060
2a1ee410
RH
3061 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3062 eh_data_format_name (tt_format));
52a11cbf 3063
61214be1
TS
3064 if (!HAVE_AS_LEB128)
3065 {
3066 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3067 call_site_len = sjlj_size_of_call_site_table ();
3068 else
3069 call_site_len = dw2_size_of_call_site_table (section);
3070 }
52a11cbf
RH
3071
3072 /* A pc-relative 4-byte displacement to the @TType data. */
3073 if (have_tt_data)
3074 {
61214be1 3075 if (HAVE_AS_LEB128)
1ef1bf06 3076 {
61214be1
TS
3077 char ttype_after_disp_label[32];
3078 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3079 section ? "LLSDATTDC" : "LLSDATTD",
3080 current_function_funcdef_no);
3081 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3082 "@TType base offset");
3083 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
1ef1bf06 3084 }
61214be1
TS
3085 else
3086 {
3087 /* Ug. Alignment queers things. */
3088 unsigned int before_disp, after_disp, last_disp, disp;
3089
3090 before_disp = 1 + 1;
3091 after_disp = (1 + size_of_uleb128 (call_site_len)
3092 + call_site_len
3093 + vec_safe_length (crtl->eh.action_record_data)
3094 + (vec_safe_length (cfun->eh->ttype_data)
3095 * tt_format_size));
3096
3097 disp = after_disp;
3098 do
3099 {
3100 unsigned int disp_size, pad;
52a11cbf 3101
61214be1
TS
3102 last_disp = disp;
3103 disp_size = size_of_uleb128 (disp);
3104 pad = before_disp + disp_size + after_disp;
3105 if (pad % tt_format_size)
3106 pad = tt_format_size - (pad % tt_format_size);
3107 else
3108 pad = 0;
3109 disp = after_disp + pad;
3110 }
3111 while (disp != last_disp);
3112
3113 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3114 }
3115 }
1ef1bf06 3116
52a11cbf 3117 /* Indicate the format of the call-site offsets. */
61214be1
TS
3118 if (HAVE_AS_LEB128)
3119 cs_format = DW_EH_PE_uleb128;
3120 else
3121 cs_format = DW_EH_PE_udata4;
3122
2a1ee410
RH
3123 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3124 eh_data_format_name (cs_format));
52a11cbf 3125
61214be1
TS
3126 if (HAVE_AS_LEB128)
3127 {
3128 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3129 section ? "LLSDACSBC" : "LLSDACSB",
3130 current_function_funcdef_no);
3131 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3132 section ? "LLSDACSEC" : "LLSDACSE",
3133 current_function_funcdef_no);
3134 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3135 "Call-site table length");
3136 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3137 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3138 sjlj_output_call_site_table ();
3139 else
3140 dw2_output_call_site_table (cs_format, section);
3141 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3142 }
52a11cbf 3143 else
61214be1
TS
3144 {
3145 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3146 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3147 sjlj_output_call_site_table ();
3148 else
3149 dw2_output_call_site_table (cs_format, section);
3150 }
52a11cbf
RH
3151
3152 /* ??? Decode and interpret the data for flag_debug_asm. */
1d65f45c
RH
3153 {
3154 uchar uc;
9771b263 3155 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
1d65f45c
RH
3156 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3157 }
1ef1bf06 3158
52a11cbf 3159 if (have_tt_data)
7a900ebc 3160 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3161
9771b263 3162 i = vec_safe_length (cfun->eh->ttype_data);
52a11cbf 3163 while (i-- > 0)
1ef1bf06 3164 {
9771b263 3165 tree type = (*cfun->eh->ttype_data)[i];
617a1b71 3166 output_ttype (type, tt_format, tt_format_size);
1ef1bf06 3167 }
52a11cbf 3168
61214be1
TS
3169 if (HAVE_AS_LEB128 && have_tt_data)
3170 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
52a11cbf
RH
3171
3172 /* ??? Decode and interpret the data for flag_debug_asm. */
1d65f45c 3173 if (targetm.arm_eabi_unwinder)
617a1b71 3174 {
1d65f45c
RH
3175 tree type;
3176 for (i = 0;
9771b263 3177 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
1d65f45c
RH
3178 output_ttype (type, tt_format, tt_format_size);
3179 }
3180 else
3181 {
3182 uchar uc;
3183 for (i = 0;
9771b263 3184 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
1d65f45c
RH
3185 dw2_asm_output_data (1, uc,
3186 i ? NULL : "Exception specification table");
617a1b71 3187 }
17f6e37d
JJ
3188}
3189
b78b513e
EB
3190/* Output an exception table for the current function according to SECTION,
3191 switching back and forth from the function section appropriately.
3192
3193 If the function has been partitioned into hot and cold parts, value 0 for
3194 SECTION refers to the table associated with the hot part while value 1
3195 refers to the table associated with the cold part. If the function has
3196 not been partitioned, value 0 refers to the single exception table. */
3197
17f6e37d 3198void
b78b513e 3199output_function_exception_table (int section)
17f6e37d 3200{
b78b513e 3201 const char *fnname = get_fnname_from_decl (current_function_decl);
f9417da1
RG
3202 rtx personality = get_personality_function (current_function_decl);
3203
17f6e37d 3204 /* Not all functions need anything. */
d989dba8
TV
3205 if (!crtl->uses_eh_lsda
3206 || targetm_common.except_unwind_info (&global_options) == UI_NONE)
b78b513e
EB
3207 return;
3208
3209 /* No need to emit any boilerplate stuff for the cold part. */
3210 if (section == 1 && !crtl->eh.call_site_record_v[1])
17f6e37d
JJ
3211 return;
3212
f9417da1 3213 if (personality)
a68b5e52
RH
3214 {
3215 assemble_external_libcall (personality);
3216
3217 if (targetm.asm_out.emit_except_personality)
3218 targetm.asm_out.emit_except_personality (personality);
3219 }
3220
3221 switch_to_exception_section (fnname);
3222
3223 /* If the target wants a label to begin the table, emit it here. */
3224 targetm.asm_out.emit_except_table_label (asm_out_file);
17f6e37d 3225
b78b513e
EB
3226 /* Do the real work. */
3227 output_one_function_exception_table (section);
52a11cbf 3228
d6b5193b 3229 switch_to_section (current_function_section ());
1ef1bf06 3230}
e2500fed 3231
b4660e5a 3232void
355fe088 3233set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
b4660e5a
JH
3234{
3235 fun->eh->throw_stmt_table = table;
3236}
3237
355fe088 3238hash_map<gimple *, int> *
b4660e5a
JH
3239get_eh_throw_stmt_table (struct function *fun)
3240{
3241 return fun->eh->throw_stmt_table;
3242}
1d65f45c
RH
3243\f
3244/* Determine if the function needs an EH personality function. */
f9417da1
RG
3245
3246enum eh_personality_kind
3247function_needs_eh_personality (struct function *fn)
3248{
f9417da1 3249 enum eh_personality_kind kind = eh_personality_none;
1d65f45c 3250 eh_region i;
f9417da1 3251
1d65f45c 3252 FOR_ALL_EH_REGION_FN (i, fn)
f9417da1
RG
3253 {
3254 switch (i->type)
3255 {
f9417da1
RG
3256 case ERT_CLEANUP:
3257 /* Can do with any personality including the generic C one. */
3258 kind = eh_personality_any;
3259 break;
3260
1d65f45c 3261 case ERT_TRY:
f9417da1
RG
3262 case ERT_ALLOWED_EXCEPTIONS:
3263 /* Always needs a EH personality function. The generic C
3264 personality doesn't handle these even for empty type lists. */
3265 return eh_personality_lang;
3266
1d65f45c
RH
3267 case ERT_MUST_NOT_THROW:
3268 /* Always needs a EH personality function. The language may specify
3269 what abort routine that must be used, e.g. std::terminate. */
f9417da1
RG
3270 return eh_personality_lang;
3271 }
f9417da1
RG
3272 }
3273
3274 return kind;
3275}
1d65f45c 3276\f
cc7220fd 3277/* Dump EH information to OUT. */
13a9fa44 3278
083cad55 3279void
13a9fa44 3280dump_eh_tree (FILE * out, struct function *fun)
cc7220fd 3281{
1d65f45c 3282 eh_region i;
cc7220fd 3283 int depth = 0;
1d65f45c
RH
3284 static const char *const type_name[] = {
3285 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3286 };
cc7220fd
JH
3287
3288 i = fun->eh->region_tree;
13a9fa44 3289 if (!i)
cc7220fd
JH
3290 return;
3291
3292 fprintf (out, "Eh tree:\n");
3293 while (1)
3294 {
3295 fprintf (out, " %*s %i %s", depth * 2, "",
1d65f45c
RH
3296 i->index, type_name[(int) i->type]);
3297
3298 if (i->landing_pads)
0c0efb33 3299 {
1d65f45c
RH
3300 eh_landing_pad lp;
3301
3302 fprintf (out, " land:");
3303 if (current_ir_type () == IR_GIMPLE)
3304 {
3305 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3306 {
3307 fprintf (out, "{%i,", lp->index);
ef6cb4c7 3308 print_generic_expr (out, lp->post_landing_pad);
1d65f45c
RH
3309 fputc ('}', out);
3310 if (lp->next_lp)
3311 fputc (',', out);
3312 }
3313 }
3314 else
f645e9a2 3315 {
d7fde127 3316 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
1d65f45c
RH
3317 {
3318 fprintf (out, "{%i,", lp->index);
3319 if (lp->landing_pad)
3320 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3321 NOTE_P (lp->landing_pad) ? "(del)" : "");
3322 else
3323 fprintf (out, "(nil),");
3324 if (lp->post_landing_pad)
3325 {
e67d1102 3326 rtx_insn *lab = label_rtx (lp->post_landing_pad);
1d65f45c
RH
3327 fprintf (out, "%i%s}", INSN_UID (lab),
3328 NOTE_P (lab) ? "(del)" : "");
3329 }
3330 else
3331 fprintf (out, "(nil)}");
3332 if (lp->next_lp)
3333 fputc (',', out);
3334 }
f645e9a2 3335 }
0c0efb33 3336 }
1d65f45c 3337
13a9fa44
JH
3338 switch (i->type)
3339 {
3340 case ERT_CLEANUP:
1d65f45c 3341 case ERT_MUST_NOT_THROW:
13a9fa44
JH
3342 break;
3343
3344 case ERT_TRY:
3345 {
1d65f45c
RH
3346 eh_catch c;
3347 fprintf (out, " catch:");
3348 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3349 {
3350 fputc ('{', out);
3351 if (c->label)
3352 {
3353 fprintf (out, "lab:");
ef6cb4c7 3354 print_generic_expr (out, c->label);
1d65f45c
RH
3355 fputc (';', out);
3356 }
ef6cb4c7 3357 print_generic_expr (out, c->type_list);
1d65f45c
RH
3358 fputc ('}', out);
3359 if (c->next_catch)
3360 fputc (',', out);
3361 }
13a9fa44
JH
3362 }
3363 break;
3364
13a9fa44 3365 case ERT_ALLOWED_EXCEPTIONS:
0c0efb33 3366 fprintf (out, " filter :%i types:", i->u.allowed.filter);
ef6cb4c7 3367 print_generic_expr (out, i->u.allowed.type_list);
13a9fa44 3368 break;
13a9fa44 3369 }
1d65f45c
RH
3370 fputc ('\n', out);
3371
cc7220fd
JH
3372 /* If there are sub-regions, process them. */
3373 if (i->inner)
3374 i = i->inner, depth++;
3375 /* If there are peers, process them. */
3376 else if (i->next_peer)
3377 i = i->next_peer;
3378 /* Otherwise, step back up the tree to the next peer. */
3379 else
3380 {
13a9fa44
JH
3381 do
3382 {
3383 i = i->outer;
3384 depth--;
3385 if (i == NULL)
3386 return;
3387 }
3388 while (i->next_peer == NULL);
cc7220fd
JH
3389 i = i->next_peer;
3390 }
3391 }
3392}
3393
9994a182
DN
3394/* Dump the EH tree for FN on stderr. */
3395
24e47c76 3396DEBUG_FUNCTION void
9994a182
DN
3397debug_eh_tree (struct function *fn)
3398{
3399 dump_eh_tree (stderr, fn);
3400}
3401
0c0efb33
JH
3402/* Verify invariants on EH datastructures. */
3403
24e47c76 3404DEBUG_FUNCTION void
cc7220fd
JH
3405verify_eh_tree (struct function *fun)
3406{
1d65f45c
RH
3407 eh_region r, outer;
3408 int nvisited_lp, nvisited_r;
3409 int count_lp, count_r, depth, i;
3410 eh_landing_pad lp;
cc7220fd 3411 bool err = false;
cc7220fd 3412
98f358e5 3413 if (!fun->eh->region_tree)
cc7220fd 3414 return;
1d65f45c
RH
3415
3416 count_r = 0;
9771b263 3417 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
1d65f45c 3418 if (r)
cc7220fd 3419 {
1d65f45c
RH
3420 if (r->index == i)
3421 count_r++;
3422 else
cc7220fd 3423 {
a9c697b8 3424 error ("%<region_array%> is corrupted for region %i", r->index);
cc7220fd
JH
3425 err = true;
3426 }
3427 }
3428
1d65f45c 3429 count_lp = 0;
9771b263 3430 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
1d65f45c
RH
3431 if (lp)
3432 {
3433 if (lp->index == i)
3434 count_lp++;
3435 else
3436 {
a9c697b8 3437 error ("%<lp_array%> is corrupted for lp %i", lp->index);
1d65f45c
RH
3438 err = true;
3439 }
3440 }
3441
3442 depth = nvisited_lp = nvisited_r = 0;
3443 outer = NULL;
3444 r = fun->eh->region_tree;
cc7220fd
JH
3445 while (1)
3446 {
9771b263 3447 if ((*fun->eh->region_array)[r->index] != r)
cc7220fd 3448 {
a9c697b8 3449 error ("%<region_array%> is corrupted for region %i", r->index);
cc7220fd
JH
3450 err = true;
3451 }
1d65f45c 3452 if (r->outer != outer)
cc7220fd 3453 {
1d65f45c 3454 error ("outer block of region %i is wrong", r->index);
cc7220fd
JH
3455 err = true;
3456 }
1d65f45c 3457 if (depth < 0)
cc7220fd 3458 {
1d65f45c 3459 error ("negative nesting depth of region %i", r->index);
cc7220fd
JH
3460 err = true;
3461 }
1d65f45c
RH
3462 nvisited_r++;
3463
3464 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
cc7220fd 3465 {
9771b263 3466 if ((*fun->eh->lp_array)[lp->index] != lp)
1d65f45c 3467 {
a9c697b8 3468 error ("%<lp_array%> is corrupted for lp %i", lp->index);
1d65f45c
RH
3469 err = true;
3470 }
3471 if (lp->region != r)
3472 {
3473 error ("region of lp %i is wrong", lp->index);
3474 err = true;
3475 }
3476 nvisited_lp++;
cc7220fd 3477 }
1d65f45c
RH
3478
3479 if (r->inner)
3480 outer = r, r = r->inner, depth++;
3481 else if (r->next_peer)
3482 r = r->next_peer;
cc7220fd
JH
3483 else
3484 {
98f358e5
JH
3485 do
3486 {
1d65f45c
RH
3487 r = r->outer;
3488 if (r == NULL)
3489 goto region_done;
98f358e5 3490 depth--;
1d65f45c 3491 outer = r->outer;
98f358e5 3492 }
1d65f45c
RH
3493 while (r->next_peer == NULL);
3494 r = r->next_peer;
cc7220fd
JH
3495 }
3496 }
1d65f45c
RH
3497 region_done:
3498 if (depth != 0)
3499 {
3500 error ("tree list ends on depth %i", depth);
3501 err = true;
3502 }
3503 if (count_r != nvisited_r)
3504 {
a9c697b8 3505 error ("%<region_array%> does not match %<region_tree%>");
1d65f45c
RH
3506 err = true;
3507 }
3508 if (count_lp != nvisited_lp)
3509 {
a9c697b8 3510 error ("%<lp_array%> does not match %<region_tree%>");
1d65f45c
RH
3511 err = true;
3512 }
617a1b71 3513
1d65f45c
RH
3514 if (err)
3515 {
3516 dump_eh_tree (stderr, fun);
a9c697b8 3517 internal_error ("%qs failed", __func__);
1d65f45c 3518 }
617a1b71 3519}
ef330312 3520\f
e2500fed 3521#include "gt-except.h"