1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
26 The representation of exceptions changes several times during
27 the compilation process:
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
114 #include "coretypes.h"
116 #include "cfghooks.h"
120 #include "fold-const.h"
121 #include "stringpool.h"
122 #include "stor-layout.h"
124 #include "insn-codes.h"
126 #include "insn-config.h"
131 #include "emit-rtl.h"
135 #include "libfuncs.h"
138 #include "dwarf2asm.h"
139 #include "dwarf2out.h"
145 #include "common/common-target.h"
146 #include "langhooks.h"
149 #include "diagnostic.h"
150 #include "tree-pretty-print.h"
151 #include "tree-pass.h"
153 #include "builtins.h"
154 #include "tree-hash-traits.h"
156 static GTY(()) int call_site_base
;
158 static GTY (()) hash_map
<tree_hash
, tree
> *type_to_runtime_map
;
160 /* Describe the SjLj_Function_Context structure. */
161 static GTY(()) tree sjlj_fc_type_node
;
162 static int sjlj_fc_call_site_ofs
;
163 static int sjlj_fc_data_ofs
;
164 static int sjlj_fc_personality_ofs
;
165 static int sjlj_fc_lsda_ofs
;
166 static int sjlj_fc_jbuf_ofs
;
169 struct GTY(()) call_site_record_d
175 /* In the following structure and associated functions,
176 we represent entries in the action table as 1-based indices.
179 0: null action record, non-null landing pad; implies cleanups
180 -1: null action record, null landing pad; implies no action
181 -2: no call-site entry; implies must_not_throw
182 -3: we have yet to process outer regions
184 Further, no special cases apply to the "next" field of the record.
185 For next, 0 means end of list. */
194 /* Hashtable helpers. */
196 struct action_record_hasher
: free_ptr_hash
<action_record
>
198 static inline hashval_t
hash (const action_record
*);
199 static inline bool equal (const action_record
*, const action_record
*);
203 action_record_hasher::hash (const action_record
*entry
)
205 return entry
->next
* 1009 + entry
->filter
;
209 action_record_hasher::equal (const action_record
*entry
,
210 const action_record
*data
)
212 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
215 typedef hash_table
<action_record_hasher
> action_hash_type
;
217 static bool get_eh_region_and_lp_from_rtx (const_rtx
, eh_region
*,
220 static void dw2_build_landing_pads (void);
222 static int collect_one_action_chain (action_hash_type
*, eh_region
);
223 static int add_call_site (rtx
, int, int);
225 static void push_uleb128 (vec
<uchar
, va_gc
> **, unsigned int);
226 static void push_sleb128 (vec
<uchar
, va_gc
> **, int);
227 #ifndef HAVE_AS_LEB128
228 static int dw2_size_of_call_site_table (int);
229 static int sjlj_size_of_call_site_table (void);
231 static void dw2_output_call_site_table (int, int);
232 static void sjlj_output_call_site_table (void);
238 if (! flag_exceptions
)
241 type_to_runtime_map
= hash_map
<tree_hash
, tree
>::create_ggc (31);
243 /* Create the SjLj_Function_Context structure. This should match
244 the definition in unwind-sjlj.c. */
245 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
247 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
249 sjlj_fc_type_node
= lang_hooks
.types
.make_type (RECORD_TYPE
);
251 f_prev
= build_decl (BUILTINS_LOCATION
,
252 FIELD_DECL
, get_identifier ("__prev"),
253 build_pointer_type (sjlj_fc_type_node
));
254 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
256 f_cs
= build_decl (BUILTINS_LOCATION
,
257 FIELD_DECL
, get_identifier ("__call_site"),
259 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
261 tmp
= build_index_type (size_int (4 - 1));
262 tmp
= build_array_type (lang_hooks
.types
.type_for_mode
263 (targetm
.unwind_word_mode (), 1),
265 f_data
= build_decl (BUILTINS_LOCATION
,
266 FIELD_DECL
, get_identifier ("__data"), tmp
);
267 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
269 f_per
= build_decl (BUILTINS_LOCATION
,
270 FIELD_DECL
, get_identifier ("__personality"),
272 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
274 f_lsda
= build_decl (BUILTINS_LOCATION
,
275 FIELD_DECL
, get_identifier ("__lsda"),
277 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
279 #ifdef DONT_USE_BUILTIN_SETJMP
281 tmp
= size_int (JMP_BUF_SIZE
- 1);
283 /* Should be large enough for most systems, if it is not,
284 JMP_BUF_SIZE should be defined with the proper value. It will
285 also tend to be larger than necessary for most systems, a more
286 optimal port will define JMP_BUF_SIZE. */
287 tmp
= size_int (FIRST_PSEUDO_REGISTER
+ 2 - 1);
290 /* Compute a minimally sized jump buffer. We need room to store at
291 least 3 pointers - stack pointer, frame pointer and return address.
292 Plus for some targets we need room for an extra pointer - in the
293 case of MIPS this is the global pointer. This makes a total of four
294 pointers, but to be safe we actually allocate room for 5.
296 If pointers are smaller than words then we allocate enough room for
297 5 words, just in case the backend needs this much room. For more
298 discussion on this issue see:
299 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
300 if (POINTER_SIZE
> BITS_PER_WORD
)
301 tmp
= size_int (5 - 1);
303 tmp
= size_int ((5 * BITS_PER_WORD
/ POINTER_SIZE
) - 1);
306 tmp
= build_index_type (tmp
);
307 tmp
= build_array_type (ptr_type_node
, tmp
);
308 f_jbuf
= build_decl (BUILTINS_LOCATION
,
309 FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
310 #ifdef DONT_USE_BUILTIN_SETJMP
311 /* We don't know what the alignment requirements of the
312 runtime's jmp_buf has. Overestimate. */
313 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
314 DECL_USER_ALIGN (f_jbuf
) = 1;
316 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
318 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
319 TREE_CHAIN (f_prev
) = f_cs
;
320 TREE_CHAIN (f_cs
) = f_data
;
321 TREE_CHAIN (f_data
) = f_per
;
322 TREE_CHAIN (f_per
) = f_lsda
;
323 TREE_CHAIN (f_lsda
) = f_jbuf
;
325 layout_type (sjlj_fc_type_node
);
327 /* Cache the interesting field offsets so that we have
328 easy access from rtl. */
329 sjlj_fc_call_site_ofs
330 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs
))
331 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs
)) / BITS_PER_UNIT
);
333 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data
))
334 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data
)) / BITS_PER_UNIT
);
335 sjlj_fc_personality_ofs
336 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per
))
337 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per
)) / BITS_PER_UNIT
);
339 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda
))
340 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda
)) / BITS_PER_UNIT
);
342 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf
))
343 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf
)) / BITS_PER_UNIT
);
348 init_eh_for_function (void)
350 cfun
->eh
= ggc_cleared_alloc
<eh_status
> ();
352 /* Make sure zero'th entries are used. */
353 vec_safe_push (cfun
->eh
->region_array
, (eh_region
)0);
354 vec_safe_push (cfun
->eh
->lp_array
, (eh_landing_pad
)0);
357 /* Routines to generate the exception tree somewhat directly.
358 These are used from tree-eh.c when processing exception related
359 nodes during tree optimization. */
362 gen_eh_region (enum eh_region_type type
, eh_region outer
)
366 /* Insert a new blank region as a leaf in the tree. */
367 new_eh
= ggc_cleared_alloc
<eh_region_d
> ();
369 new_eh
->outer
= outer
;
372 new_eh
->next_peer
= outer
->inner
;
373 outer
->inner
= new_eh
;
377 new_eh
->next_peer
= cfun
->eh
->region_tree
;
378 cfun
->eh
->region_tree
= new_eh
;
381 new_eh
->index
= vec_safe_length (cfun
->eh
->region_array
);
382 vec_safe_push (cfun
->eh
->region_array
, new_eh
);
384 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
385 if (targetm
.arm_eabi_unwinder
&& lang_hooks
.eh_use_cxa_end_cleanup
)
386 new_eh
->use_cxa_end_cleanup
= true;
392 gen_eh_region_cleanup (eh_region outer
)
394 return gen_eh_region (ERT_CLEANUP
, outer
);
398 gen_eh_region_try (eh_region outer
)
400 return gen_eh_region (ERT_TRY
, outer
);
404 gen_eh_region_catch (eh_region t
, tree type_or_list
)
407 tree type_list
, type_node
;
409 gcc_assert (t
->type
== ERT_TRY
);
411 /* Ensure to always end up with a type list to normalize further
412 processing, then register each type against the runtime types map. */
413 type_list
= type_or_list
;
416 if (TREE_CODE (type_or_list
) != TREE_LIST
)
417 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
419 type_node
= type_list
;
420 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
421 add_type_for_runtime (TREE_VALUE (type_node
));
424 c
= ggc_cleared_alloc
<eh_catch_d
> ();
425 c
->type_list
= type_list
;
426 l
= t
->u
.eh_try
.last_catch
;
431 t
->u
.eh_try
.first_catch
= c
;
432 t
->u
.eh_try
.last_catch
= c
;
438 gen_eh_region_allowed (eh_region outer
, tree allowed
)
440 eh_region region
= gen_eh_region (ERT_ALLOWED_EXCEPTIONS
, outer
);
441 region
->u
.allowed
.type_list
= allowed
;
443 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
444 add_type_for_runtime (TREE_VALUE (allowed
));
450 gen_eh_region_must_not_throw (eh_region outer
)
452 return gen_eh_region (ERT_MUST_NOT_THROW
, outer
);
456 gen_eh_landing_pad (eh_region region
)
458 eh_landing_pad lp
= ggc_cleared_alloc
<eh_landing_pad_d
> ();
460 lp
->next_lp
= region
->landing_pads
;
462 lp
->index
= vec_safe_length (cfun
->eh
->lp_array
);
463 region
->landing_pads
= lp
;
465 vec_safe_push (cfun
->eh
->lp_array
, lp
);
471 get_eh_region_from_number_fn (struct function
*ifun
, int i
)
473 return (*ifun
->eh
->region_array
)[i
];
477 get_eh_region_from_number (int i
)
479 return get_eh_region_from_number_fn (cfun
, i
);
483 get_eh_landing_pad_from_number_fn (struct function
*ifun
, int i
)
485 return (*ifun
->eh
->lp_array
)[i
];
489 get_eh_landing_pad_from_number (int i
)
491 return get_eh_landing_pad_from_number_fn (cfun
, i
);
495 get_eh_region_from_lp_number_fn (struct function
*ifun
, int i
)
498 return (*ifun
->eh
->region_array
)[-i
];
504 lp
= (*ifun
->eh
->lp_array
)[i
];
510 get_eh_region_from_lp_number (int i
)
512 return get_eh_region_from_lp_number_fn (cfun
, i
);
515 /* Returns true if the current function has exception handling regions. */
518 current_function_has_exception_handlers (void)
520 return cfun
->eh
->region_tree
!= NULL
;
523 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
524 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
526 struct duplicate_eh_regions_data
528 duplicate_eh_regions_map label_map
;
529 void *label_map_data
;
530 hash_map
<void *, void *> *eh_map
;
534 duplicate_eh_regions_1 (struct duplicate_eh_regions_data
*data
,
535 eh_region old_r
, eh_region outer
)
537 eh_landing_pad old_lp
, new_lp
;
540 new_r
= gen_eh_region (old_r
->type
, outer
);
541 gcc_assert (!data
->eh_map
->put (old_r
, new_r
));
551 for (oc
= old_r
->u
.eh_try
.first_catch
; oc
; oc
= oc
->next_catch
)
553 /* We should be doing all our region duplication before and
554 during inlining, which is before filter lists are created. */
555 gcc_assert (oc
->filter_list
== NULL
);
556 nc
= gen_eh_region_catch (new_r
, oc
->type_list
);
557 nc
->label
= data
->label_map (oc
->label
, data
->label_map_data
);
562 case ERT_ALLOWED_EXCEPTIONS
:
563 new_r
->u
.allowed
.type_list
= old_r
->u
.allowed
.type_list
;
564 if (old_r
->u
.allowed
.label
)
565 new_r
->u
.allowed
.label
566 = data
->label_map (old_r
->u
.allowed
.label
, data
->label_map_data
);
568 new_r
->u
.allowed
.label
= NULL_TREE
;
571 case ERT_MUST_NOT_THROW
:
572 new_r
->u
.must_not_throw
.failure_loc
=
573 LOCATION_LOCUS (old_r
->u
.must_not_throw
.failure_loc
);
574 new_r
->u
.must_not_throw
.failure_decl
=
575 old_r
->u
.must_not_throw
.failure_decl
;
579 for (old_lp
= old_r
->landing_pads
; old_lp
; old_lp
= old_lp
->next_lp
)
581 /* Don't bother copying unused landing pads. */
582 if (old_lp
->post_landing_pad
== NULL
)
585 new_lp
= gen_eh_landing_pad (new_r
);
586 gcc_assert (!data
->eh_map
->put (old_lp
, new_lp
));
588 new_lp
->post_landing_pad
589 = data
->label_map (old_lp
->post_landing_pad
, data
->label_map_data
);
590 EH_LANDING_PAD_NR (new_lp
->post_landing_pad
) = new_lp
->index
;
593 /* Make sure to preserve the original use of __cxa_end_cleanup. */
594 new_r
->use_cxa_end_cleanup
= old_r
->use_cxa_end_cleanup
;
596 for (old_r
= old_r
->inner
; old_r
; old_r
= old_r
->next_peer
)
597 duplicate_eh_regions_1 (data
, old_r
, new_r
);
600 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
601 the current function and root the tree below OUTER_REGION.
602 The special case of COPY_REGION of NULL means all regions.
603 Remap labels using MAP/MAP_DATA callback. Return a pointer map
604 that allows the caller to remap uses of both EH regions and
607 hash_map
<void *, void *> *
608 duplicate_eh_regions (struct function
*ifun
,
609 eh_region copy_region
, int outer_lp
,
610 duplicate_eh_regions_map map
, void *map_data
)
612 struct duplicate_eh_regions_data data
;
613 eh_region outer_region
;
616 verify_eh_tree (ifun
);
618 data
.label_map
= map
;
619 data
.label_map_data
= map_data
;
620 data
.eh_map
= new hash_map
<void *, void *>;
622 outer_region
= get_eh_region_from_lp_number_fn (cfun
, outer_lp
);
624 /* Copy all the regions in the subtree. */
626 duplicate_eh_regions_1 (&data
, copy_region
, outer_region
);
630 for (r
= ifun
->eh
->region_tree
; r
; r
= r
->next_peer
)
631 duplicate_eh_regions_1 (&data
, r
, outer_region
);
635 verify_eh_tree (cfun
);
640 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
643 eh_region_outermost (struct function
*ifun
, eh_region region_a
,
648 gcc_assert (ifun
->eh
->region_array
);
649 gcc_assert (ifun
->eh
->region_tree
);
651 b_outer
= sbitmap_alloc (ifun
->eh
->region_array
->length ());
652 bitmap_clear (b_outer
);
656 bitmap_set_bit (b_outer
, region_b
->index
);
657 region_b
= region_b
->outer
;
663 if (bitmap_bit_p (b_outer
, region_a
->index
))
665 region_a
= region_a
->outer
;
669 sbitmap_free (b_outer
);
674 add_type_for_runtime (tree type
)
676 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
677 if (TREE_CODE (type
) == NOP_EXPR
)
680 bool existed
= false;
681 tree
*slot
= &type_to_runtime_map
->get_or_insert (type
, &existed
);
683 *slot
= lang_hooks
.eh_runtime_type (type
);
687 lookup_type_for_runtime (tree type
)
689 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
690 if (TREE_CODE (type
) == NOP_EXPR
)
693 /* We should have always inserted the data earlier. */
694 return *type_to_runtime_map
->get (type
);
698 /* Represent an entry in @TTypes for either catch actions
699 or exception filter actions. */
700 struct ttypes_filter
{
705 /* Helper for ttypes_filter hashing. */
707 struct ttypes_filter_hasher
: free_ptr_hash
<ttypes_filter
>
709 typedef tree_node
*compare_type
;
710 static inline hashval_t
hash (const ttypes_filter
*);
711 static inline bool equal (const ttypes_filter
*, const tree_node
*);
714 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
715 (a tree) for a @TTypes type node we are thinking about adding. */
718 ttypes_filter_hasher::equal (const ttypes_filter
*entry
, const tree_node
*data
)
720 return entry
->t
== data
;
724 ttypes_filter_hasher::hash (const ttypes_filter
*entry
)
726 return TREE_HASH (entry
->t
);
729 typedef hash_table
<ttypes_filter_hasher
> ttypes_hash_type
;
732 /* Helper for ehspec hashing. */
734 struct ehspec_hasher
: free_ptr_hash
<ttypes_filter
>
736 static inline hashval_t
hash (const ttypes_filter
*);
737 static inline bool equal (const ttypes_filter
*, const ttypes_filter
*);
740 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
741 exception specification list we are thinking about adding. */
742 /* ??? Currently we use the type lists in the order given. Someone
743 should put these in some canonical order. */
746 ehspec_hasher::equal (const ttypes_filter
*entry
, const ttypes_filter
*data
)
748 return type_list_equal (entry
->t
, data
->t
);
751 /* Hash function for exception specification lists. */
754 ehspec_hasher::hash (const ttypes_filter
*entry
)
759 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
760 h
= (h
<< 5) + (h
>> 27) + TREE_HASH (TREE_VALUE (list
));
764 typedef hash_table
<ehspec_hasher
> ehspec_hash_type
;
767 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
768 to speed up the search. Return the filter value to be used. */
771 add_ttypes_entry (ttypes_hash_type
*ttypes_hash
, tree type
)
773 struct ttypes_filter
**slot
, *n
;
775 slot
= ttypes_hash
->find_slot_with_hash (type
, (hashval_t
) TREE_HASH (type
),
778 if ((n
= *slot
) == NULL
)
780 /* Filter value is a 1 based table index. */
782 n
= XNEW (struct ttypes_filter
);
784 n
->filter
= vec_safe_length (cfun
->eh
->ttype_data
) + 1;
787 vec_safe_push (cfun
->eh
->ttype_data
, type
);
793 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
794 to speed up the search. Return the filter value to be used. */
797 add_ehspec_entry (ehspec_hash_type
*ehspec_hash
, ttypes_hash_type
*ttypes_hash
,
800 struct ttypes_filter
**slot
, *n
;
801 struct ttypes_filter dummy
;
804 slot
= ehspec_hash
->find_slot (&dummy
, INSERT
);
806 if ((n
= *slot
) == NULL
)
810 if (targetm
.arm_eabi_unwinder
)
811 len
= vec_safe_length (cfun
->eh
->ehspec_data
.arm_eabi
);
813 len
= vec_safe_length (cfun
->eh
->ehspec_data
.other
);
815 /* Filter value is a -1 based byte index into a uleb128 buffer. */
817 n
= XNEW (struct ttypes_filter
);
819 n
->filter
= -(len
+ 1);
822 /* Generate a 0 terminated list of filter values. */
823 for (; list
; list
= TREE_CHAIN (list
))
825 if (targetm
.arm_eabi_unwinder
)
826 vec_safe_push (cfun
->eh
->ehspec_data
.arm_eabi
, TREE_VALUE (list
));
829 /* Look up each type in the list and encode its filter
830 value as a uleb128. */
831 push_uleb128 (&cfun
->eh
->ehspec_data
.other
,
832 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
835 if (targetm
.arm_eabi_unwinder
)
836 vec_safe_push (cfun
->eh
->ehspec_data
.arm_eabi
, NULL_TREE
);
838 vec_safe_push (cfun
->eh
->ehspec_data
.other
, (uchar
)0);
844 /* Generate the action filter values to be used for CATCH and
845 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
846 we use lots of landing pads, and so every type or list can share
847 the same filter value, which saves table space. */
850 assign_filter_values (void)
856 vec_alloc (cfun
->eh
->ttype_data
, 16);
857 if (targetm
.arm_eabi_unwinder
)
858 vec_alloc (cfun
->eh
->ehspec_data
.arm_eabi
, 64);
860 vec_alloc (cfun
->eh
->ehspec_data
.other
, 64);
862 ehspec_hash_type
ehspec (31);
863 ttypes_hash_type
ttypes (31);
865 for (i
= 1; vec_safe_iterate (cfun
->eh
->region_array
, i
, &r
); ++i
)
873 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
875 /* Whatever type_list is (NULL or true list), we build a list
876 of filters for the region. */
877 c
->filter_list
= NULL_TREE
;
879 if (c
->type_list
!= NULL
)
881 /* Get a filter value for each of the types caught and store
882 them in the region's dedicated list. */
883 tree tp_node
= c
->type_list
;
885 for ( ; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
888 = add_ttypes_entry (&ttypes
, TREE_VALUE (tp_node
));
889 tree flt_node
= build_int_cst (integer_type_node
, flt
);
892 = tree_cons (NULL_TREE
, flt_node
, c
->filter_list
);
897 /* Get a filter value for the NULL list also since it
898 will need an action record anyway. */
899 int flt
= add_ttypes_entry (&ttypes
, NULL
);
900 tree flt_node
= build_int_cst (integer_type_node
, flt
);
903 = tree_cons (NULL_TREE
, flt_node
, NULL
);
908 case ERT_ALLOWED_EXCEPTIONS
:
910 = add_ehspec_entry (&ehspec
, &ttypes
, r
->u
.allowed
.type_list
);
919 /* Emit SEQ into basic block just before INSN (that is assumed to be
920 first instruction of some existing BB and return the newly
923 emit_to_new_bb_before (rtx_insn
*seq
, rtx insn
)
930 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
931 call), we don't want it to go into newly created landing pad or other EH
933 for (ei
= ei_start (BLOCK_FOR_INSN (insn
)->preds
); (e
= ei_safe_edge (ei
)); )
934 if (e
->flags
& EDGE_FALLTHRU
)
935 force_nonfallthru (e
);
938 last
= emit_insn_before (seq
, insn
);
939 if (BARRIER_P (last
))
940 last
= PREV_INSN (last
);
941 bb
= create_basic_block (seq
, last
, BLOCK_FOR_INSN (insn
)->prev_bb
);
942 update_bb_for_insn (bb
);
943 bb
->flags
|= BB_SUPERBLOCK
;
947 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
948 at the rtl level. Emit the code required by the target at a landing
949 pad for the given region. */
952 expand_dw2_landing_pad_for_region (eh_region region
)
954 if (targetm
.have_exception_receiver ())
955 emit_insn (targetm
.gen_exception_receiver ());
956 else if (targetm
.have_nonlocal_goto_receiver ())
957 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
961 if (region
->exc_ptr_reg
)
962 emit_move_insn (region
->exc_ptr_reg
,
963 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
964 if (region
->filter_reg
)
965 emit_move_insn (region
->filter_reg
,
966 gen_rtx_REG (targetm
.eh_return_filter_mode (),
967 EH_RETURN_DATA_REGNO (1)));
970 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
973 dw2_build_landing_pads (void)
977 int e_flags
= EDGE_FALLTHRU
;
979 /* If we're going to partition blocks, we need to be able to add
980 new landing pads later, which means that we need to hold on to
981 the post-landing-pad block. Prevent it from being merged away.
982 We'll remove this bit after partitioning. */
983 if (flag_reorder_blocks_and_partition
)
984 e_flags
|= EDGE_PRESERVE
;
986 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
992 if (lp
== NULL
|| lp
->post_landing_pad
== NULL
)
997 lp
->landing_pad
= gen_label_rtx ();
998 emit_label (lp
->landing_pad
);
999 LABEL_PRESERVE_P (lp
->landing_pad
) = 1;
1001 expand_dw2_landing_pad_for_region (lp
->region
);
1006 bb
= emit_to_new_bb_before (seq
, label_rtx (lp
->post_landing_pad
));
1007 e
= make_edge (bb
, bb
->next_bb
, e_flags
);
1008 e
->count
= bb
->count
;
1009 e
->probability
= REG_BR_PROB_BASE
;
1012 struct loop
*loop
= bb
->next_bb
->loop_father
;
1013 /* If we created a pre-header block, add the new block to the
1014 outer loop, otherwise to the loop itself. */
1015 if (bb
->next_bb
== loop
->header
)
1016 add_bb_to_loop (bb
, loop_outer (loop
));
1018 add_bb_to_loop (bb
, loop
);
1024 static vec
<int> sjlj_lp_call_site_index
;
1026 /* Process all active landing pads. Assign each one a compact dispatch
1027 index, and a call-site index. */
1030 sjlj_assign_call_site_values (void)
1032 action_hash_type
ar_hash (31);
1036 vec_alloc (crtl
->eh
.action_record_data
, 64);
1040 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1041 if (lp
&& lp
->post_landing_pad
)
1043 int action
, call_site
;
1045 /* First: build the action table. */
1046 action
= collect_one_action_chain (&ar_hash
, lp
->region
);
1048 /* Next: assign call-site values. If dwarf2 terms, this would be
1049 the region number assigned by convert_to_eh_region_ranges, but
1050 handles no-action and must-not-throw differently. */
1051 /* Map must-not-throw to otherwise unused call-site index 0. */
1054 /* Map no-action to otherwise unused call-site index -1. */
1055 else if (action
== -1)
1057 /* Otherwise, look it up in the table. */
1059 call_site
= add_call_site (GEN_INT (disp_index
), action
, 0);
1060 sjlj_lp_call_site_index
[i
] = call_site
;
1068 /* Emit code to record the current call-site index before every
1069 insn that can throw. */
1072 sjlj_mark_call_sites (void)
1074 int last_call_site
= -2;
1078 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1084 rtx_insn
*before
, *p
;
1086 /* Reset value tracking at extended basic block boundaries. */
1088 last_call_site
= -2;
1090 /* If the function allocates dynamic stack space, the context must
1091 be updated after every allocation/deallocation accordingly. */
1092 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_UPDATE_SJLJ_CONTEXT
)
1097 buf_addr
= plus_constant (Pmode
, XEXP (crtl
->eh
.sjlj_fc
, 0),
1099 expand_builtin_update_setjmp_buf (buf_addr
);
1102 emit_insn_before (p
, insn
);
1105 if (! INSN_P (insn
))
1108 nothrow
= get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1112 this_call_site
= sjlj_lp_call_site_index
[lp
->index
];
1115 /* Calls (and trapping insns) without notes are outside any
1116 exception handling region in this function. Mark them as
1118 this_call_site
= -1;
1122 gcc_assert (r
->type
== ERT_MUST_NOT_THROW
);
1126 if (this_call_site
!= -1)
1127 crtl
->uses_eh_lsda
= 1;
1129 if (this_call_site
== last_call_site
)
1132 /* Don't separate a call from it's argument loads. */
1135 before
= find_first_parameter_load (insn
, NULL
);
1138 mem
= adjust_address (crtl
->eh
.sjlj_fc
, TYPE_MODE (integer_type_node
),
1139 sjlj_fc_call_site_ofs
);
1140 emit_move_insn (mem
, gen_int_mode (this_call_site
, GET_MODE (mem
)));
1144 emit_insn_before (p
, before
);
1145 last_call_site
= this_call_site
;
1149 /* Construct the SjLj_Function_Context. */
1152 sjlj_emit_function_enter (rtx_code_label
*dispatch_label
)
1154 rtx_insn
*fn_begin
, *seq
;
1156 bool fn_begin_outside_block
;
1157 rtx personality
= get_personality_function (current_function_decl
);
1159 fc
= crtl
->eh
.sjlj_fc
;
1163 /* We're storing this libcall's address into memory instead of
1164 calling it directly. Thus, we must call assemble_external_libcall
1165 here, as we can not depend on emit_library_call to do it for us. */
1166 assemble_external_libcall (personality
);
1167 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
1168 emit_move_insn (mem
, personality
);
1170 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
1171 if (crtl
->uses_eh_lsda
)
1176 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
1177 sym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
1178 SYMBOL_REF_FLAGS (sym
) = SYMBOL_FLAG_LOCAL
;
1179 emit_move_insn (mem
, sym
);
1182 emit_move_insn (mem
, const0_rtx
);
1186 #ifdef DONT_USE_BUILTIN_SETJMP
1188 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
1189 TYPE_MODE (integer_type_node
), 1,
1190 plus_constant (Pmode
, XEXP (fc
, 0),
1191 sjlj_fc_jbuf_ofs
), Pmode
);
1193 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
1194 TYPE_MODE (integer_type_node
), 0,
1195 dispatch_label
, REG_BR_PROB_BASE
/ 100);
1197 expand_builtin_setjmp_setup (plus_constant (Pmode
, XEXP (fc
, 0),
1203 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
1204 1, XEXP (fc
, 0), Pmode
);
1209 /* ??? Instead of doing this at the beginning of the function,
1210 do this in a block that is at loop level 0 and dominates all
1211 can_throw_internal instructions. */
1213 fn_begin_outside_block
= true;
1214 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
1215 if (NOTE_P (fn_begin
))
1217 if (NOTE_KIND (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
1219 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin
))
1220 fn_begin_outside_block
= false;
1223 if (fn_begin_outside_block
)
1224 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1226 emit_insn_after (seq
, fn_begin
);
1229 /* Call back from expand_function_end to know where we should put
1230 the call to unwind_sjlj_unregister_libfunc if needed. */
1233 sjlj_emit_function_exit_after (rtx_insn
*after
)
1235 crtl
->eh
.sjlj_exit_after
= after
;
1239 sjlj_emit_function_exit (void)
1241 rtx_insn
*seq
, *insn
;
1245 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
1246 1, XEXP (crtl
->eh
.sjlj_fc
, 0), Pmode
);
1251 /* ??? Really this can be done in any block at loop level 0 that
1252 post-dominates all can_throw_internal instructions. This is
1253 the last possible moment. */
1255 insn
= crtl
->eh
.sjlj_exit_after
;
1257 insn
= NEXT_INSN (insn
);
1259 emit_insn_after (seq
, insn
);
1263 sjlj_emit_dispatch_table (rtx_code_label
*dispatch_label
, int num_dispatch
)
1265 machine_mode unwind_word_mode
= targetm
.unwind_word_mode ();
1266 machine_mode filter_mode
= targetm
.eh_return_filter_mode ();
1268 rtx mem
, fc
, exc_ptr_reg
, filter_reg
;
1274 vec
<tree
> dispatch_labels
= vNULL
;
1276 fc
= crtl
->eh
.sjlj_fc
;
1280 emit_label (dispatch_label
);
1282 #ifndef DONT_USE_BUILTIN_SETJMP
1283 expand_builtin_setjmp_receiver (dispatch_label
);
1285 /* The caller of expand_builtin_setjmp_receiver is responsible for
1286 making sure that the label doesn't vanish. The only other caller
1287 is the expander for __builtin_setjmp_receiver, which places this
1288 label on the nonlocal_goto_label list. Since we're modeling these
1289 CFG edges more exactly, we can use the forced_labels list instead. */
1290 LABEL_PRESERVE_P (dispatch_label
) = 1;
1292 = gen_rtx_INSN_LIST (VOIDmode
, dispatch_label
, forced_labels
);
1295 /* Load up exc_ptr and filter values from the function context. */
1296 mem
= adjust_address (fc
, unwind_word_mode
, sjlj_fc_data_ofs
);
1297 if (unwind_word_mode
!= ptr_mode
)
1299 #ifdef POINTERS_EXTEND_UNSIGNED
1300 mem
= convert_memory_address (ptr_mode
, mem
);
1302 mem
= convert_to_mode (ptr_mode
, mem
, 0);
1305 exc_ptr_reg
= force_reg (ptr_mode
, mem
);
1307 mem
= adjust_address (fc
, unwind_word_mode
,
1308 sjlj_fc_data_ofs
+ GET_MODE_SIZE (unwind_word_mode
));
1309 if (unwind_word_mode
!= filter_mode
)
1310 mem
= convert_to_mode (filter_mode
, mem
, 0);
1311 filter_reg
= force_reg (filter_mode
, mem
);
1313 /* Jump to one of the directly reachable regions. */
1316 rtx_code_label
*first_reachable_label
= NULL
;
1318 /* If there's exactly one call site in the function, don't bother
1319 generating a switch statement. */
1320 if (num_dispatch
> 1)
1321 dispatch_labels
.create (num_dispatch
);
1323 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1324 if (lp
&& lp
->post_landing_pad
)
1327 rtx_code_label
*label
;
1331 lp
->landing_pad
= dispatch_label
;
1333 if (num_dispatch
> 1)
1335 tree t_label
, case_elt
, t
;
1337 t_label
= create_artificial_label (UNKNOWN_LOCATION
);
1338 t
= build_int_cst (integer_type_node
, disp_index
);
1339 case_elt
= build_case_label (t
, NULL
, t_label
);
1340 dispatch_labels
.quick_push (case_elt
);
1341 label
= jump_target_rtx (t_label
);
1344 label
= gen_label_rtx ();
1346 if (disp_index
== 0)
1347 first_reachable_label
= label
;
1352 emit_move_insn (r
->exc_ptr_reg
, exc_ptr_reg
);
1354 emit_move_insn (r
->filter_reg
, filter_reg
);
1356 seq2
= get_insns ();
1359 rtx_insn
*before
= label_rtx (lp
->post_landing_pad
);
1360 bb
= emit_to_new_bb_before (seq2
, before
);
1361 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1362 e
->count
= bb
->count
;
1363 e
->probability
= REG_BR_PROB_BASE
;
1366 struct loop
*loop
= bb
->next_bb
->loop_father
;
1367 /* If we created a pre-header block, add the new block to the
1368 outer loop, otherwise to the loop itself. */
1369 if (bb
->next_bb
== loop
->header
)
1370 add_bb_to_loop (bb
, loop_outer (loop
));
1372 add_bb_to_loop (bb
, loop
);
1373 /* ??? For multiple dispatches we will end up with edges
1374 from the loop tree root into this loop, making it a
1375 multiple-entry loop. Discard all affected loops. */
1376 if (num_dispatch
> 1)
1378 for (loop
= bb
->loop_father
;
1379 loop_outer (loop
); loop
= loop_outer (loop
))
1380 mark_loop_for_removal (loop
);
1386 gcc_assert (disp_index
== num_dispatch
);
1388 if (num_dispatch
> 1)
1390 rtx disp
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
1391 sjlj_fc_call_site_ofs
);
1392 expand_sjlj_dispatch_table (disp
, dispatch_labels
);
1398 bb
= emit_to_new_bb_before (seq
, first_reachable_label
);
1399 if (num_dispatch
== 1)
1401 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1402 e
->count
= bb
->count
;
1403 e
->probability
= REG_BR_PROB_BASE
;
1406 struct loop
*loop
= bb
->next_bb
->loop_father
;
1407 /* If we created a pre-header block, add the new block to the
1408 outer loop, otherwise to the loop itself. */
1409 if (bb
->next_bb
== loop
->header
)
1410 add_bb_to_loop (bb
, loop_outer (loop
));
1412 add_bb_to_loop (bb
, loop
);
1417 /* We are not wiring up edges here, but as the dispatcher call
1418 is at function begin simply associate the block with the
1419 outermost (non-)loop. */
1421 add_bb_to_loop (bb
, current_loops
->tree_root
);
1426 sjlj_build_landing_pads (void)
1430 num_dispatch
= vec_safe_length (cfun
->eh
->lp_array
);
1431 if (num_dispatch
== 0)
1433 sjlj_lp_call_site_index
.safe_grow_cleared (num_dispatch
);
1435 num_dispatch
= sjlj_assign_call_site_values ();
1436 if (num_dispatch
> 0)
1438 rtx_code_label
*dispatch_label
= gen_label_rtx ();
1439 int align
= STACK_SLOT_ALIGNMENT (sjlj_fc_type_node
,
1440 TYPE_MODE (sjlj_fc_type_node
),
1441 TYPE_ALIGN (sjlj_fc_type_node
));
1443 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
1444 int_size_in_bytes (sjlj_fc_type_node
),
1447 sjlj_mark_call_sites ();
1448 sjlj_emit_function_enter (dispatch_label
);
1449 sjlj_emit_dispatch_table (dispatch_label
, num_dispatch
);
1450 sjlj_emit_function_exit ();
1453 /* If we do not have any landing pads, we may still need to register a
1454 personality routine and (empty) LSDA to handle must-not-throw regions. */
1455 else if (function_needs_eh_personality (cfun
) != eh_personality_none
)
1457 int align
= STACK_SLOT_ALIGNMENT (sjlj_fc_type_node
,
1458 TYPE_MODE (sjlj_fc_type_node
),
1459 TYPE_ALIGN (sjlj_fc_type_node
));
1461 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
1462 int_size_in_bytes (sjlj_fc_type_node
),
1465 sjlj_mark_call_sites ();
1466 sjlj_emit_function_enter (NULL
);
1467 sjlj_emit_function_exit ();
1470 sjlj_lp_call_site_index
.release ();
1473 /* Update the sjlj function context. This function should be called
1474 whenever we allocate or deallocate dynamic stack space. */
1477 update_sjlj_context (void)
1479 if (!flag_exceptions
)
1482 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT
);
1485 /* After initial rtl generation, call back to finish generating
1486 exception support code. */
1489 finish_eh_generation (void)
1493 /* Construct the landing pads. */
1494 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
1495 sjlj_build_landing_pads ();
1497 dw2_build_landing_pads ();
1498 break_superblocks ();
1500 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
1501 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1502 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->insns
.r
)
1503 commit_edge_insertions ();
1505 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1506 FOR_EACH_BB_FN (bb
, cfun
)
1512 lp
= get_eh_landing_pad_from_rtx (BB_END (bb
));
1514 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1515 if (e
->flags
& EDGE_EH
)
1518 /* We should not have generated any new throwing insns during this
1519 pass, and we should not have lost any EH edges, so we only need
1520 to handle two cases here:
1521 (1) reachable handler and an existing edge to post-landing-pad,
1522 (2) no reachable handler and no edge. */
1523 gcc_assert ((lp
!= NULL
) == (e
!= NULL
));
1526 gcc_assert (BB_HEAD (e
->dest
) == label_rtx (lp
->post_landing_pad
));
1528 redirect_edge_succ (e
, BLOCK_FOR_INSN (lp
->landing_pad
));
1529 e
->flags
|= (CALL_P (BB_END (bb
))
1530 ? EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
1536 /* This section handles removing dead code for flow. */
1539 remove_eh_landing_pad (eh_landing_pad lp
)
1543 for (pp
= &lp
->region
->landing_pads
; *pp
!= lp
; pp
= &(*pp
)->next_lp
)
1547 if (lp
->post_landing_pad
)
1548 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1549 (*cfun
->eh
->lp_array
)[lp
->index
] = NULL
;
1552 /* Splice the EH region at PP from the region tree. */
1555 remove_eh_handler_splicer (eh_region
*pp
)
1557 eh_region region
= *pp
;
1560 for (lp
= region
->landing_pads
; lp
; lp
= lp
->next_lp
)
1562 if (lp
->post_landing_pad
)
1563 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1564 (*cfun
->eh
->lp_array
)[lp
->index
] = NULL
;
1570 outer
= region
->outer
;
1572 *pp
= p
= region
->inner
;
1581 *pp
= region
->next_peer
;
1583 (*cfun
->eh
->region_array
)[region
->index
] = NULL
;
1586 /* Splice a single EH region REGION from the region tree.
1588 To unlink REGION, we need to find the pointer to it with a relatively
1589 expensive search in REGION's outer region. If you are going to
1590 remove a number of handlers, using remove_unreachable_eh_regions may
1591 be a better option. */
1594 remove_eh_handler (eh_region region
)
1596 eh_region
*pp
, *pp_start
, p
, outer
;
1598 outer
= region
->outer
;
1600 pp_start
= &outer
->inner
;
1602 pp_start
= &cfun
->eh
->region_tree
;
1603 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
1606 remove_eh_handler_splicer (pp
);
1609 /* Worker for remove_unreachable_eh_regions.
1610 PP is a pointer to the region to start a region tree depth-first
1611 search from. R_REACHABLE is the set of regions that have to be
1615 remove_unreachable_eh_regions_worker (eh_region
*pp
, sbitmap r_reachable
)
1619 eh_region region
= *pp
;
1620 remove_unreachable_eh_regions_worker (®ion
->inner
, r_reachable
);
1621 if (!bitmap_bit_p (r_reachable
, region
->index
))
1622 remove_eh_handler_splicer (pp
);
1624 pp
= ®ion
->next_peer
;
1628 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1629 Do this by traversing the EH tree top-down and splice out regions that
1630 are not marked. By removing regions from the leaves, we avoid costly
1631 searches in the region tree. */
1634 remove_unreachable_eh_regions (sbitmap r_reachable
)
1636 remove_unreachable_eh_regions_worker (&cfun
->eh
->region_tree
, r_reachable
);
1639 /* Invokes CALLBACK for every exception handler landing pad label.
1640 Only used by reload hackery; should not be used by new code. */
1643 for_each_eh_label (void (*callback
) (rtx
))
1648 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1652 rtx_code_label
*lab
= lp
->landing_pad
;
1653 if (lab
&& LABEL_P (lab
))
1659 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1662 At the gimple level, we use LP_NR
1663 > 0 : The statement transfers to landing pad LP_NR
1664 = 0 : The statement is outside any EH region
1665 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1667 At the rtl level, we use LP_NR
1668 > 0 : The insn transfers to landing pad LP_NR
1669 = 0 : The insn cannot throw
1670 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1671 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1672 missing note: The insn is outside any EH region.
1674 ??? This difference probably ought to be avoided. We could stand
1675 to record nothrow for arbitrary gimple statements, and so avoid
1676 some moderately complex lookups in stmt_could_throw_p. Perhaps
1677 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1678 no-nonlocal-goto property should be recorded elsewhere as a bit
1679 on the call_insn directly. Perhaps we should make more use of
1680 attaching the trees to call_insns (reachable via symbol_ref in
1681 direct call cases) and just pull the data out of the trees. */
1684 make_reg_eh_region_note (rtx_insn
*insn
, int ecf_flags
, int lp_nr
)
1687 if (ecf_flags
& ECF_NOTHROW
)
1689 else if (lp_nr
!= 0)
1690 value
= GEN_INT (lp_nr
);
1693 add_reg_note (insn
, REG_EH_REGION
, value
);
1696 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1697 nor perform a non-local goto. Replace the region note if it
1701 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn
*insn
)
1703 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1704 rtx intmin
= GEN_INT (INT_MIN
);
1707 XEXP (note
, 0) = intmin
;
1709 add_reg_note (insn
, REG_EH_REGION
, intmin
);
1712 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1716 insn_could_throw_p (const_rtx insn
)
1718 if (!flag_exceptions
)
1722 if (INSN_P (insn
) && cfun
->can_throw_non_call_exceptions
)
1723 return may_trap_p (PATTERN (insn
));
1727 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1728 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1729 to look for a note, or the note itself. */
1732 copy_reg_eh_region_note_forward (rtx note_or_insn
, rtx_insn
*first
, rtx last
)
1735 rtx note
= note_or_insn
;
1737 if (INSN_P (note_or_insn
))
1739 note
= find_reg_note (note_or_insn
, REG_EH_REGION
, NULL_RTX
);
1743 note
= XEXP (note
, 0);
1745 for (insn
= first
; insn
!= last
; insn
= NEXT_INSN (insn
))
1746 if (!find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1747 && insn_could_throw_p (insn
))
1748 add_reg_note (insn
, REG_EH_REGION
, note
);
1751 /* Likewise, but iterate backward. */
1754 copy_reg_eh_region_note_backward (rtx note_or_insn
, rtx_insn
*last
, rtx first
)
1757 rtx note
= note_or_insn
;
1759 if (INSN_P (note_or_insn
))
1761 note
= find_reg_note (note_or_insn
, REG_EH_REGION
, NULL_RTX
);
1765 note
= XEXP (note
, 0);
1767 for (insn
= last
; insn
!= first
; insn
= PREV_INSN (insn
))
1768 if (insn_could_throw_p (insn
))
1769 add_reg_note (insn
, REG_EH_REGION
, note
);
1773 /* Extract all EH information from INSN. Return true if the insn
1774 was marked NOTHROW. */
1777 get_eh_region_and_lp_from_rtx (const_rtx insn
, eh_region
*pr
,
1778 eh_landing_pad
*plp
)
1780 eh_landing_pad lp
= NULL
;
1786 if (! INSN_P (insn
))
1789 if (NONJUMP_INSN_P (insn
)
1790 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1791 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1793 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1796 ret
= !insn_could_throw_p (insn
);
1800 lp_nr
= INTVAL (XEXP (note
, 0));
1801 if (lp_nr
== 0 || lp_nr
== INT_MIN
)
1808 r
= (*cfun
->eh
->region_array
)[-lp_nr
];
1811 lp
= (*cfun
->eh
->lp_array
)[lp_nr
];
1821 /* Return the landing pad to which INSN may go, or NULL if it does not
1822 have a reachable landing pad within this function. */
1825 get_eh_landing_pad_from_rtx (const_rtx insn
)
1830 get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1834 /* Return the region to which INSN may go, or NULL if it does not
1835 have a reachable region within this function. */
1838 get_eh_region_from_rtx (const_rtx insn
)
1843 get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1847 /* Return true if INSN throws and is caught by something in this function. */
1850 can_throw_internal (const_rtx insn
)
1852 return get_eh_landing_pad_from_rtx (insn
) != NULL
;
1855 /* Return true if INSN throws and escapes from the current function. */
1858 can_throw_external (const_rtx insn
)
1864 if (! INSN_P (insn
))
1867 if (NONJUMP_INSN_P (insn
)
1868 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1870 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
1871 int i
, n
= seq
->len ();
1873 for (i
= 0; i
< n
; i
++)
1874 if (can_throw_external (seq
->element (i
)))
1880 nothrow
= get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1882 /* If we can't throw, we obviously can't throw external. */
1886 /* If we have an internal landing pad, then we're not external. */
1890 /* If we're not within an EH region, then we are external. */
1894 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1895 which don't always have landing pads. */
1896 gcc_assert (r
->type
== ERT_MUST_NOT_THROW
);
1900 /* Return true if INSN cannot throw at all. */
1903 insn_nothrow_p (const_rtx insn
)
1908 if (! INSN_P (insn
))
1911 if (NONJUMP_INSN_P (insn
)
1912 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1914 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
1915 int i
, n
= seq
->len ();
1917 for (i
= 0; i
< n
; i
++)
1918 if (!insn_nothrow_p (seq
->element (i
)))
1924 return get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1927 /* Return true if INSN can perform a non-local goto. */
1928 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1931 can_nonlocal_goto (const rtx_insn
*insn
)
1933 if (nonlocal_goto_handler_labels
&& CALL_P (insn
))
1935 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1936 if (!note
|| INTVAL (XEXP (note
, 0)) != INT_MIN
)
1942 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1945 set_nothrow_function_flags (void)
1951 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1952 something that can throw an exception. We specifically exempt
1953 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1954 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1957 crtl
->all_throwers_are_sibcalls
= 1;
1959 /* If we don't know that this implementation of the function will
1960 actually be used, then we must not set TREE_NOTHROW, since
1961 callers must not assume that this function does not throw. */
1962 if (TREE_NOTHROW (current_function_decl
))
1965 if (! flag_exceptions
)
1968 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1969 if (can_throw_external (insn
))
1973 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
1975 crtl
->all_throwers_are_sibcalls
= 0;
1981 && (cgraph_node::get (current_function_decl
)->get_availability ()
1982 >= AVAIL_AVAILABLE
))
1984 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1985 struct cgraph_edge
*e
;
1986 for (e
= node
->callers
; e
; e
= e
->next_caller
)
1987 e
->can_throw_external
= false;
1988 node
->set_nothrow_flag (true);
1991 fprintf (dump_file
, "Marking function nothrow: %s\n\n",
1992 current_function_name ());
1999 const pass_data pass_data_set_nothrow_function_flags
=
2001 RTL_PASS
, /* type */
2002 "nothrow", /* name */
2003 OPTGROUP_NONE
, /* optinfo_flags */
2004 TV_NONE
, /* tv_id */
2005 0, /* properties_required */
2006 0, /* properties_provided */
2007 0, /* properties_destroyed */
2008 0, /* todo_flags_start */
2009 0, /* todo_flags_finish */
2012 class pass_set_nothrow_function_flags
: public rtl_opt_pass
2015 pass_set_nothrow_function_flags (gcc::context
*ctxt
)
2016 : rtl_opt_pass (pass_data_set_nothrow_function_flags
, ctxt
)
2019 /* opt_pass methods: */
2020 virtual unsigned int execute (function
*)
2022 return set_nothrow_function_flags ();
2025 }; // class pass_set_nothrow_function_flags
2030 make_pass_set_nothrow_function_flags (gcc::context
*ctxt
)
2032 return new pass_set_nothrow_function_flags (ctxt
);
2036 /* Various hooks for unwind library. */
2038 /* Expand the EH support builtin functions:
2039 __builtin_eh_pointer and __builtin_eh_filter. */
2042 expand_builtin_eh_common (tree region_nr_t
)
2044 HOST_WIDE_INT region_nr
;
2047 gcc_assert (tree_fits_shwi_p (region_nr_t
));
2048 region_nr
= tree_to_shwi (region_nr_t
);
2050 region
= (*cfun
->eh
->region_array
)[region_nr
];
2052 /* ??? We shouldn't have been able to delete a eh region without
2053 deleting all the code that depended on it. */
2054 gcc_assert (region
!= NULL
);
2059 /* Expand to the exc_ptr value from the given eh region. */
2062 expand_builtin_eh_pointer (tree exp
)
2065 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 0));
2066 if (region
->exc_ptr_reg
== NULL
)
2067 region
->exc_ptr_reg
= gen_reg_rtx (ptr_mode
);
2068 return region
->exc_ptr_reg
;
2071 /* Expand to the filter value from the given eh region. */
2074 expand_builtin_eh_filter (tree exp
)
2077 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 0));
2078 if (region
->filter_reg
== NULL
)
2079 region
->filter_reg
= gen_reg_rtx (targetm
.eh_return_filter_mode ());
2080 return region
->filter_reg
;
2083 /* Copy the exc_ptr and filter values from one landing pad's registers
2084 to another. This is used to inline the resx statement. */
2087 expand_builtin_eh_copy_values (tree exp
)
2090 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 0));
2092 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 1));
2093 machine_mode fmode
= targetm
.eh_return_filter_mode ();
2095 if (dst
->exc_ptr_reg
== NULL
)
2096 dst
->exc_ptr_reg
= gen_reg_rtx (ptr_mode
);
2097 if (src
->exc_ptr_reg
== NULL
)
2098 src
->exc_ptr_reg
= gen_reg_rtx (ptr_mode
);
2100 if (dst
->filter_reg
== NULL
)
2101 dst
->filter_reg
= gen_reg_rtx (fmode
);
2102 if (src
->filter_reg
== NULL
)
2103 src
->filter_reg
= gen_reg_rtx (fmode
);
2105 emit_move_insn (dst
->exc_ptr_reg
, src
->exc_ptr_reg
);
2106 emit_move_insn (dst
->filter_reg
, src
->filter_reg
);
2111 /* Do any necessary initialization to access arbitrary stack frames.
2112 On the SPARC, this means flushing the register windows. */
2115 expand_builtin_unwind_init (void)
2117 /* Set this so all the registers get saved in our frame; we need to be
2118 able to copy the saved values for any registers from frames we unwind. */
2119 crtl
->saves_all_registers
= 1;
2121 SETUP_FRAME_ADDRESSES ();
2124 /* Map a non-negative number to an eh return data register number; expands
2125 to -1 if no return data register is associated with the input number.
2126 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2129 expand_builtin_eh_return_data_regno (tree exp
)
2131 tree which
= CALL_EXPR_ARG (exp
, 0);
2132 unsigned HOST_WIDE_INT iwhich
;
2134 if (TREE_CODE (which
) != INTEGER_CST
)
2136 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2140 iwhich
= tree_to_uhwi (which
);
2141 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
2142 if (iwhich
== INVALID_REGNUM
)
2145 #ifdef DWARF_FRAME_REGNUM
2146 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
2148 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
2151 return GEN_INT (iwhich
);
2154 /* Given a value extracted from the return address register or stack slot,
2155 return the actual address encoded in that value. */
2158 expand_builtin_extract_return_addr (tree addr_tree
)
2160 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
2162 if (GET_MODE (addr
) != Pmode
2163 && GET_MODE (addr
) != VOIDmode
)
2165 #ifdef POINTERS_EXTEND_UNSIGNED
2166 addr
= convert_memory_address (Pmode
, addr
);
2168 addr
= convert_to_mode (Pmode
, addr
, 0);
2172 /* First mask out any unwanted bits. */
2173 rtx mask
= MASK_RETURN_ADDR
;
2175 expand_and (Pmode
, addr
, mask
, addr
);
2177 /* Then adjust to find the real return address. */
2178 if (RETURN_ADDR_OFFSET
)
2179 addr
= plus_constant (Pmode
, addr
, RETURN_ADDR_OFFSET
);
2184 /* Given an actual address in addr_tree, do any necessary encoding
2185 and return the value to be stored in the return address register or
2186 stack slot so the epilogue will return to that address. */
2189 expand_builtin_frob_return_addr (tree addr_tree
)
2191 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
2193 addr
= convert_memory_address (Pmode
, addr
);
2195 if (RETURN_ADDR_OFFSET
)
2197 addr
= force_reg (Pmode
, addr
);
2198 addr
= plus_constant (Pmode
, addr
, -RETURN_ADDR_OFFSET
);
2204 /* Set up the epilogue with the magic bits we'll need to return to the
2205 exception handler. */
2208 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED
,
2213 #ifdef EH_RETURN_STACKADJ_RTX
2214 tmp
= expand_expr (stackadj_tree
, crtl
->eh
.ehr_stackadj
,
2215 VOIDmode
, EXPAND_NORMAL
);
2216 tmp
= convert_memory_address (Pmode
, tmp
);
2217 if (!crtl
->eh
.ehr_stackadj
)
2218 crtl
->eh
.ehr_stackadj
= copy_addr_to_reg (tmp
);
2219 else if (tmp
!= crtl
->eh
.ehr_stackadj
)
2220 emit_move_insn (crtl
->eh
.ehr_stackadj
, tmp
);
2223 tmp
= expand_expr (handler_tree
, crtl
->eh
.ehr_handler
,
2224 VOIDmode
, EXPAND_NORMAL
);
2225 tmp
= convert_memory_address (Pmode
, tmp
);
2226 if (!crtl
->eh
.ehr_handler
)
2227 crtl
->eh
.ehr_handler
= copy_addr_to_reg (tmp
);
2228 else if (tmp
!= crtl
->eh
.ehr_handler
)
2229 emit_move_insn (crtl
->eh
.ehr_handler
, tmp
);
2231 if (!crtl
->eh
.ehr_label
)
2232 crtl
->eh
.ehr_label
= gen_label_rtx ();
2233 emit_jump (crtl
->eh
.ehr_label
);
2236 /* Expand __builtin_eh_return. This exit path from the function loads up
2237 the eh return data registers, adjusts the stack, and branches to a
2238 given PC other than the normal return address. */
2241 expand_eh_return (void)
2243 rtx_code_label
*around_label
;
2245 if (! crtl
->eh
.ehr_label
)
2248 crtl
->calls_eh_return
= 1;
2250 #ifdef EH_RETURN_STACKADJ_RTX
2251 emit_move_insn (EH_RETURN_STACKADJ_RTX
, const0_rtx
);
2254 around_label
= gen_label_rtx ();
2255 emit_jump (around_label
);
2257 emit_label (crtl
->eh
.ehr_label
);
2258 clobber_return_register ();
2260 #ifdef EH_RETURN_STACKADJ_RTX
2261 emit_move_insn (EH_RETURN_STACKADJ_RTX
, crtl
->eh
.ehr_stackadj
);
2264 if (targetm
.have_eh_return ())
2265 emit_insn (targetm
.gen_eh_return (crtl
->eh
.ehr_handler
));
2268 #ifdef EH_RETURN_HANDLER_RTX
2269 emit_move_insn (EH_RETURN_HANDLER_RTX
, crtl
->eh
.ehr_handler
);
2271 error ("__builtin_eh_return not supported on this target");
2275 emit_label (around_label
);
2278 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2279 POINTERS_EXTEND_UNSIGNED and return it. */
2282 expand_builtin_extend_pointer (tree addr_tree
)
2284 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
2287 #ifdef POINTERS_EXTEND_UNSIGNED
2288 extend
= POINTERS_EXTEND_UNSIGNED
;
2290 /* The previous EH code did an unsigned extend by default, so we do this also
2295 return convert_modes (targetm
.unwind_word_mode (), ptr_mode
, addr
, extend
);
2299 add_action_record (action_hash_type
*ar_hash
, int filter
, int next
)
2301 struct action_record
**slot
, *new_ar
, tmp
;
2303 tmp
.filter
= filter
;
2305 slot
= ar_hash
->find_slot (&tmp
, INSERT
);
2307 if ((new_ar
= *slot
) == NULL
)
2309 new_ar
= XNEW (struct action_record
);
2310 new_ar
->offset
= crtl
->eh
.action_record_data
->length () + 1;
2311 new_ar
->filter
= filter
;
2312 new_ar
->next
= next
;
2315 /* The filter value goes in untouched. The link to the next
2316 record is a "self-relative" byte offset, or zero to indicate
2317 that there is no next record. So convert the absolute 1 based
2318 indices we've been carrying around into a displacement. */
2320 push_sleb128 (&crtl
->eh
.action_record_data
, filter
);
2322 next
-= crtl
->eh
.action_record_data
->length () + 1;
2323 push_sleb128 (&crtl
->eh
.action_record_data
, next
);
2326 return new_ar
->offset
;
2330 collect_one_action_chain (action_hash_type
*ar_hash
, eh_region region
)
2334 /* If we've reached the top of the region chain, then we have
2335 no actions, and require no landing pad. */
2339 switch (region
->type
)
2344 /* A cleanup adds a zero filter to the beginning of the chain, but
2345 there are special cases to look out for. If there are *only*
2346 cleanups along a path, then it compresses to a zero action.
2347 Further, if there are multiple cleanups along a path, we only
2348 need to represent one of them, as that is enough to trigger
2349 entry to the landing pad at runtime. */
2350 next
= collect_one_action_chain (ar_hash
, region
->outer
);
2353 for (r
= region
->outer
; r
; r
= r
->outer
)
2354 if (r
->type
== ERT_CLEANUP
)
2356 return add_action_record (ar_hash
, 0, next
);
2363 /* Process the associated catch regions in reverse order.
2364 If there's a catch-all handler, then we don't need to
2365 search outer regions. Use a magic -3 value to record
2366 that we haven't done the outer search. */
2368 for (c
= region
->u
.eh_try
.last_catch
; c
; c
= c
->prev_catch
)
2370 if (c
->type_list
== NULL
)
2372 /* Retrieve the filter from the head of the filter list
2373 where we have stored it (see assign_filter_values). */
2374 int filter
= TREE_INT_CST_LOW (TREE_VALUE (c
->filter_list
));
2375 next
= add_action_record (ar_hash
, filter
, 0);
2379 /* Once the outer search is done, trigger an action record for
2380 each filter we have. */
2385 next
= collect_one_action_chain (ar_hash
, region
->outer
);
2387 /* If there is no next action, terminate the chain. */
2390 /* If all outer actions are cleanups or must_not_throw,
2391 we'll have no action record for it, since we had wanted
2392 to encode these states in the call-site record directly.
2393 Add a cleanup action to the chain to catch these. */
2395 next
= add_action_record (ar_hash
, 0, 0);
2398 flt_node
= c
->filter_list
;
2399 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
2401 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
2402 next
= add_action_record (ar_hash
, filter
, next
);
2409 case ERT_ALLOWED_EXCEPTIONS
:
2410 /* An exception specification adds its filter to the
2411 beginning of the chain. */
2412 next
= collect_one_action_chain (ar_hash
, region
->outer
);
2414 /* If there is no next action, terminate the chain. */
2417 /* If all outer actions are cleanups or must_not_throw,
2418 we'll have no action record for it, since we had wanted
2419 to encode these states in the call-site record directly.
2420 Add a cleanup action to the chain to catch these. */
2422 next
= add_action_record (ar_hash
, 0, 0);
2424 return add_action_record (ar_hash
, region
->u
.allowed
.filter
, next
);
2426 case ERT_MUST_NOT_THROW
:
2427 /* A must-not-throw region with no inner handlers or cleanups
2428 requires no call-site entry. Note that this differs from
2429 the no handler or cleanup case in that we do require an lsda
2430 to be generated. Return a magic -2 value to record this. */
2438 add_call_site (rtx landing_pad
, int action
, int section
)
2440 call_site_record record
;
2442 record
= ggc_alloc
<call_site_record_d
> ();
2443 record
->landing_pad
= landing_pad
;
2444 record
->action
= action
;
2446 vec_safe_push (crtl
->eh
.call_site_record_v
[section
], record
);
2448 return call_site_base
+ crtl
->eh
.call_site_record_v
[section
]->length () - 1;
2452 emit_note_eh_region_end (rtx_insn
*insn
)
2454 rtx_insn
*next
= NEXT_INSN (insn
);
2456 /* Make sure we do not split a call and its corresponding
2457 CALL_ARG_LOCATION note. */
2458 if (next
&& NOTE_P (next
)
2459 && NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
2462 return emit_note_after (NOTE_INSN_EH_REGION_END
, insn
);
2465 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2466 The new note numbers will not refer to region numbers, but
2467 instead to call site entries. */
2470 convert_to_eh_region_ranges (void)
2475 action_hash_type
ar_hash (31);
2476 int last_action
= -3;
2477 rtx_insn
*last_action_insn
= NULL
;
2478 rtx last_landing_pad
= NULL_RTX
;
2479 rtx_insn
*first_no_action_insn
= NULL
;
2482 rtx_insn
*section_switch_note
= NULL
;
2483 rtx_insn
*first_no_action_insn_before_switch
= NULL
;
2484 rtx_insn
*last_no_action_insn_before_switch
= NULL
;
2485 int saved_call_site_base
= call_site_base
;
2487 vec_alloc (crtl
->eh
.action_record_data
, 64);
2489 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
2496 rtx_code_label
*this_landing_pad
;
2499 if (NONJUMP_INSN_P (insn
)
2500 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2501 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2503 nothrow
= get_eh_region_and_lp_from_rtx (insn
, ®ion
, &lp
);
2507 this_action
= collect_one_action_chain (&ar_hash
, region
);
2511 /* Existence of catch handlers, or must-not-throw regions
2512 implies that an lsda is needed (even if empty). */
2513 if (this_action
!= -1)
2514 crtl
->uses_eh_lsda
= 1;
2516 /* Delay creation of region notes for no-action regions
2517 until we're sure that an lsda will be required. */
2518 else if (last_action
== -3)
2520 first_no_action_insn
= iter
;
2524 if (this_action
>= 0)
2525 this_landing_pad
= lp
->landing_pad
;
2527 this_landing_pad
= NULL
;
2529 /* Differing actions or landing pads implies a change in call-site
2530 info, which implies some EH_REGION note should be emitted. */
2531 if (last_action
!= this_action
2532 || last_landing_pad
!= this_landing_pad
)
2534 /* If there is a queued no-action region in the other section
2535 with hot/cold partitioning, emit it now. */
2536 if (first_no_action_insn_before_switch
)
2538 gcc_assert (this_action
!= -1
2539 && last_action
== (first_no_action_insn
2541 call_site
= add_call_site (NULL_RTX
, 0, 0);
2542 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
2543 first_no_action_insn_before_switch
);
2544 NOTE_EH_HANDLER (note
) = call_site
;
2546 = emit_note_eh_region_end (last_no_action_insn_before_switch
);
2547 NOTE_EH_HANDLER (note
) = call_site
;
2548 gcc_assert (last_action
!= -3
2549 || (last_action_insn
2550 == last_no_action_insn_before_switch
));
2551 first_no_action_insn_before_switch
= NULL
;
2552 last_no_action_insn_before_switch
= NULL
;
2555 /* If we'd not seen a previous action (-3) or the previous
2556 action was must-not-throw (-2), then we do not need an
2558 if (last_action
>= -1)
2560 /* If we delayed the creation of the begin, do it now. */
2561 if (first_no_action_insn
)
2563 call_site
= add_call_site (NULL_RTX
, 0, cur_sec
);
2564 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
2565 first_no_action_insn
);
2566 NOTE_EH_HANDLER (note
) = call_site
;
2567 first_no_action_insn
= NULL
;
2570 note
= emit_note_eh_region_end (last_action_insn
);
2571 NOTE_EH_HANDLER (note
) = call_site
;
2574 /* If the new action is must-not-throw, then no region notes
2576 if (this_action
>= -1)
2578 call_site
= add_call_site (this_landing_pad
,
2579 this_action
< 0 ? 0 : this_action
,
2581 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
2582 NOTE_EH_HANDLER (note
) = call_site
;
2585 last_action
= this_action
;
2586 last_landing_pad
= this_landing_pad
;
2588 last_action_insn
= iter
;
2590 else if (NOTE_P (iter
)
2591 && NOTE_KIND (iter
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
2593 gcc_assert (section_switch_note
== NULL_RTX
);
2594 gcc_assert (flag_reorder_blocks_and_partition
);
2595 section_switch_note
= iter
;
2596 if (first_no_action_insn
)
2598 first_no_action_insn_before_switch
= first_no_action_insn
;
2599 last_no_action_insn_before_switch
= last_action_insn
;
2600 first_no_action_insn
= NULL
;
2601 gcc_assert (last_action
== -1);
2604 /* Force closing of current EH region before section switch and
2605 opening a new one afterwards. */
2606 else if (last_action
!= -3)
2607 last_landing_pad
= pc_rtx
;
2608 if (crtl
->eh
.call_site_record_v
[cur_sec
])
2609 call_site_base
+= crtl
->eh
.call_site_record_v
[cur_sec
]->length ();
2611 gcc_assert (crtl
->eh
.call_site_record_v
[cur_sec
] == NULL
);
2612 vec_alloc (crtl
->eh
.call_site_record_v
[cur_sec
], 10);
2615 if (last_action
>= -1 && ! first_no_action_insn
)
2617 note
= emit_note_eh_region_end (last_action_insn
);
2618 NOTE_EH_HANDLER (note
) = call_site
;
2621 call_site_base
= saved_call_site_base
;
2628 const pass_data pass_data_convert_to_eh_region_ranges
=
2630 RTL_PASS
, /* type */
2631 "eh_ranges", /* name */
2632 OPTGROUP_NONE
, /* optinfo_flags */
2633 TV_NONE
, /* tv_id */
2634 0, /* properties_required */
2635 0, /* properties_provided */
2636 0, /* properties_destroyed */
2637 0, /* todo_flags_start */
2638 0, /* todo_flags_finish */
2641 class pass_convert_to_eh_region_ranges
: public rtl_opt_pass
2644 pass_convert_to_eh_region_ranges (gcc::context
*ctxt
)
2645 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges
, ctxt
)
2648 /* opt_pass methods: */
2649 virtual bool gate (function
*);
2650 virtual unsigned int execute (function
*)
2652 return convert_to_eh_region_ranges ();
2655 }; // class pass_convert_to_eh_region_ranges
2658 pass_convert_to_eh_region_ranges::gate (function
*)
2660 /* Nothing to do for SJLJ exceptions or if no regions created. */
2661 if (cfun
->eh
->region_tree
== NULL
)
2663 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
2671 make_pass_convert_to_eh_region_ranges (gcc::context
*ctxt
)
2673 return new pass_convert_to_eh_region_ranges (ctxt
);
2677 push_uleb128 (vec
<uchar
, va_gc
> **data_area
, unsigned int value
)
2681 unsigned char byte
= value
& 0x7f;
2685 vec_safe_push (*data_area
, byte
);
2691 push_sleb128 (vec
<uchar
, va_gc
> **data_area
, int value
)
2698 byte
= value
& 0x7f;
2700 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
2701 || (value
== -1 && (byte
& 0x40) != 0));
2704 vec_safe_push (*data_area
, byte
);
2710 #ifndef HAVE_AS_LEB128
2712 dw2_size_of_call_site_table (int section
)
2714 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[section
]);
2715 int size
= n
* (4 + 4 + 4);
2718 for (i
= 0; i
< n
; ++i
)
2720 struct call_site_record_d
*cs
=
2721 (*crtl
->eh
.call_site_record_v
[section
])[i
];
2722 size
+= size_of_uleb128 (cs
->action
);
2729 sjlj_size_of_call_site_table (void)
2731 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[0]);
2735 for (i
= 0; i
< n
; ++i
)
2737 struct call_site_record_d
*cs
=
2738 (*crtl
->eh
.call_site_record_v
[0])[i
];
2739 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
2740 size
+= size_of_uleb128 (cs
->action
);
2748 dw2_output_call_site_table (int cs_format
, int section
)
2750 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[section
]);
2755 begin
= current_function_func_begin_label
;
2756 else if (first_function_block_is_cold
)
2757 begin
= crtl
->subsections
.hot_section_label
;
2759 begin
= crtl
->subsections
.cold_section_label
;
2761 for (i
= 0; i
< n
; ++i
)
2763 struct call_site_record_d
*cs
= (*crtl
->eh
.call_site_record_v
[section
])[i
];
2764 char reg_start_lab
[32];
2765 char reg_end_lab
[32];
2766 char landing_pad_lab
[32];
2768 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
2769 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
2771 if (cs
->landing_pad
)
2772 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
2773 CODE_LABEL_NUMBER (cs
->landing_pad
));
2775 /* ??? Perhaps use insn length scaling if the assembler supports
2776 generic arithmetic. */
2777 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2778 data4 if the function is small enough. */
2779 if (cs_format
== DW_EH_PE_uleb128
)
2781 dw2_asm_output_delta_uleb128 (reg_start_lab
, begin
,
2782 "region %d start", i
);
2783 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
2785 if (cs
->landing_pad
)
2786 dw2_asm_output_delta_uleb128 (landing_pad_lab
, begin
,
2789 dw2_asm_output_data_uleb128 (0, "landing pad");
2793 dw2_asm_output_delta (4, reg_start_lab
, begin
,
2794 "region %d start", i
);
2795 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
2796 if (cs
->landing_pad
)
2797 dw2_asm_output_delta (4, landing_pad_lab
, begin
,
2800 dw2_asm_output_data (4, 0, "landing pad");
2802 dw2_asm_output_data_uleb128 (cs
->action
, "action");
2805 call_site_base
+= n
;
2809 sjlj_output_call_site_table (void)
2811 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[0]);
2814 for (i
= 0; i
< n
; ++i
)
2816 struct call_site_record_d
*cs
= (*crtl
->eh
.call_site_record_v
[0])[i
];
2818 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
2819 "region %d landing pad", i
);
2820 dw2_asm_output_data_uleb128 (cs
->action
, "action");
2823 call_site_base
+= n
;
2826 /* Switch to the section that should be used for exception tables. */
2829 switch_to_exception_section (const char * ARG_UNUSED (fnname
))
2833 if (exception_section
)
2834 s
= exception_section
;
2839 if (EH_TABLES_CAN_BE_READ_ONLY
)
2842 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2843 flags
= ((! flag_pic
2844 || ((tt_format
& 0x70) != DW_EH_PE_absptr
2845 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
2846 ? 0 : SECTION_WRITE
);
2849 flags
= SECTION_WRITE
;
2851 /* Compute the section and cache it into exception_section,
2852 unless it depends on the function name. */
2853 if (targetm_common
.have_named_sections
)
2855 #ifdef HAVE_LD_EH_GC_SECTIONS
2856 if (flag_function_sections
2857 || (DECL_COMDAT_GROUP (current_function_decl
) && HAVE_COMDAT_GROUP
))
2859 char *section_name
= XNEWVEC (char, strlen (fnname
) + 32);
2860 /* The EH table must match the code section, so only mark
2861 it linkonce if we have COMDAT groups to tie them together. */
2862 if (DECL_COMDAT_GROUP (current_function_decl
) && HAVE_COMDAT_GROUP
)
2863 flags
|= SECTION_LINKONCE
;
2864 sprintf (section_name
, ".gcc_except_table.%s", fnname
);
2865 s
= get_section (section_name
, flags
, current_function_decl
);
2866 free (section_name
);
2871 = s
= get_section (".gcc_except_table", flags
, NULL
);
2875 = s
= flags
== SECTION_WRITE
? data_section
: readonly_data_section
;
2878 switch_to_section (s
);
2882 /* Output a reference from an exception table to the type_info object TYPE.
2883 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2887 output_ttype (tree type
, int tt_format
, int tt_format_size
)
2890 bool is_public
= true;
2892 if (type
== NULL_TREE
)
2896 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2897 runtime types so TYPE should already be a runtime type
2898 reference. When pass_ipa_free_lang data is made a default
2899 pass, we can then remove the call to lookup_type_for_runtime
2902 type
= lookup_type_for_runtime (type
);
2904 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
2906 /* Let cgraph know that the rtti decl is used. Not all of the
2907 paths below go through assemble_integer, which would take
2908 care of this for us. */
2910 if (TREE_CODE (type
) == ADDR_EXPR
)
2912 type
= TREE_OPERAND (type
, 0);
2913 if (TREE_CODE (type
) == VAR_DECL
)
2914 is_public
= TREE_PUBLIC (type
);
2917 gcc_assert (TREE_CODE (type
) == INTEGER_CST
);
2920 /* Allow the target to override the type table entry format. */
2921 if (targetm
.asm_out
.ttype (value
))
2924 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
2925 assemble_integer (value
, tt_format_size
,
2926 tt_format_size
* BITS_PER_UNIT
, 1);
2928 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, is_public
, NULL
);
2932 output_one_function_exception_table (int section
)
2934 int tt_format
, cs_format
, lp_format
, i
;
2935 #ifdef HAVE_AS_LEB128
2936 char ttype_label
[32];
2937 char cs_after_size_label
[32];
2938 char cs_end_label
[32];
2943 int tt_format_size
= 0;
2945 have_tt_data
= (vec_safe_length (cfun
->eh
->ttype_data
)
2946 || (targetm
.arm_eabi_unwinder
2947 ? vec_safe_length (cfun
->eh
->ehspec_data
.arm_eabi
)
2948 : vec_safe_length (cfun
->eh
->ehspec_data
.other
)));
2950 /* Indicate the format of the @TType entries. */
2952 tt_format
= DW_EH_PE_omit
;
2955 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2956 #ifdef HAVE_AS_LEB128
2957 ASM_GENERATE_INTERNAL_LABEL (ttype_label
,
2958 section
? "LLSDATTC" : "LLSDATT",
2959 current_function_funcdef_no
);
2961 tt_format_size
= size_of_encoded_value (tt_format
);
2963 assemble_align (tt_format_size
* BITS_PER_UNIT
);
2966 targetm
.asm_out
.internal_label (asm_out_file
, section
? "LLSDAC" : "LLSDA",
2967 current_function_funcdef_no
);
2969 /* The LSDA header. */
2971 /* Indicate the format of the landing pad start pointer. An omitted
2972 field implies @LPStart == @Start. */
2973 /* Currently we always put @LPStart == @Start. This field would
2974 be most useful in moving the landing pads completely out of
2975 line to another section, but it could also be used to minimize
2976 the size of uleb128 landing pad offsets. */
2977 lp_format
= DW_EH_PE_omit
;
2978 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
2979 eh_data_format_name (lp_format
));
2981 /* @LPStart pointer would go here. */
2983 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
2984 eh_data_format_name (tt_format
));
2986 #ifndef HAVE_AS_LEB128
2987 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
2988 call_site_len
= sjlj_size_of_call_site_table ();
2990 call_site_len
= dw2_size_of_call_site_table (section
);
2993 /* A pc-relative 4-byte displacement to the @TType data. */
2996 #ifdef HAVE_AS_LEB128
2997 char ttype_after_disp_label
[32];
2998 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
,
2999 section
? "LLSDATTDC" : "LLSDATTD",
3000 current_function_funcdef_no
);
3001 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
3002 "@TType base offset");
3003 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3005 /* Ug. Alignment queers things. */
3006 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3008 before_disp
= 1 + 1;
3009 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3011 + vec_safe_length (crtl
->eh
.action_record_data
)
3012 + (vec_safe_length (cfun
->eh
->ttype_data
)
3018 unsigned int disp_size
, pad
;
3021 disp_size
= size_of_uleb128 (disp
);
3022 pad
= before_disp
+ disp_size
+ after_disp
;
3023 if (pad
% tt_format_size
)
3024 pad
= tt_format_size
- (pad
% tt_format_size
);
3027 disp
= after_disp
+ pad
;
3029 while (disp
!= last_disp
);
3031 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3035 /* Indicate the format of the call-site offsets. */
3036 #ifdef HAVE_AS_LEB128
3037 cs_format
= DW_EH_PE_uleb128
;
3039 cs_format
= DW_EH_PE_udata4
;
3041 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3042 eh_data_format_name (cs_format
));
3044 #ifdef HAVE_AS_LEB128
3045 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
,
3046 section
? "LLSDACSBC" : "LLSDACSB",
3047 current_function_funcdef_no
);
3048 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
,
3049 section
? "LLSDACSEC" : "LLSDACSE",
3050 current_function_funcdef_no
);
3051 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3052 "Call-site table length");
3053 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3054 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
3055 sjlj_output_call_site_table ();
3057 dw2_output_call_site_table (cs_format
, section
);
3058 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3060 dw2_asm_output_data_uleb128 (call_site_len
, "Call-site table length");
3061 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
3062 sjlj_output_call_site_table ();
3064 dw2_output_call_site_table (cs_format
, section
);
3067 /* ??? Decode and interpret the data for flag_debug_asm. */
3070 FOR_EACH_VEC_ELT (*crtl
->eh
.action_record_data
, i
, uc
)
3071 dw2_asm_output_data (1, uc
, i
? NULL
: "Action record table");
3075 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3077 i
= vec_safe_length (cfun
->eh
->ttype_data
);
3080 tree type
= (*cfun
->eh
->ttype_data
)[i
];
3081 output_ttype (type
, tt_format
, tt_format_size
);
3084 #ifdef HAVE_AS_LEB128
3086 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3089 /* ??? Decode and interpret the data for flag_debug_asm. */
3090 if (targetm
.arm_eabi_unwinder
)
3094 vec_safe_iterate (cfun
->eh
->ehspec_data
.arm_eabi
, i
, &type
); ++i
)
3095 output_ttype (type
, tt_format
, tt_format_size
);
3101 vec_safe_iterate (cfun
->eh
->ehspec_data
.other
, i
, &uc
); ++i
)
3102 dw2_asm_output_data (1, uc
,
3103 i
? NULL
: "Exception specification table");
3108 output_function_exception_table (const char *fnname
)
3110 rtx personality
= get_personality_function (current_function_decl
);
3112 /* Not all functions need anything. */
3113 if (! crtl
->uses_eh_lsda
)
3118 assemble_external_libcall (personality
);
3120 if (targetm
.asm_out
.emit_except_personality
)
3121 targetm
.asm_out
.emit_except_personality (personality
);
3124 switch_to_exception_section (fnname
);
3126 /* If the target wants a label to begin the table, emit it here. */
3127 targetm
.asm_out
.emit_except_table_label (asm_out_file
);
3129 output_one_function_exception_table (0);
3130 if (crtl
->eh
.call_site_record_v
[1])
3131 output_one_function_exception_table (1);
3133 switch_to_section (current_function_section ());
3137 set_eh_throw_stmt_table (function
*fun
, hash_map
<gimple
*, int> *table
)
3139 fun
->eh
->throw_stmt_table
= table
;
3142 hash_map
<gimple
*, int> *
3143 get_eh_throw_stmt_table (struct function
*fun
)
3145 return fun
->eh
->throw_stmt_table
;
3148 /* Determine if the function needs an EH personality function. */
3150 enum eh_personality_kind
3151 function_needs_eh_personality (struct function
*fn
)
3153 enum eh_personality_kind kind
= eh_personality_none
;
3156 FOR_ALL_EH_REGION_FN (i
, fn
)
3161 /* Can do with any personality including the generic C one. */
3162 kind
= eh_personality_any
;
3166 case ERT_ALLOWED_EXCEPTIONS
:
3167 /* Always needs a EH personality function. The generic C
3168 personality doesn't handle these even for empty type lists. */
3169 return eh_personality_lang
;
3171 case ERT_MUST_NOT_THROW
:
3172 /* Always needs a EH personality function. The language may specify
3173 what abort routine that must be used, e.g. std::terminate. */
3174 return eh_personality_lang
;
3181 /* Dump EH information to OUT. */
3184 dump_eh_tree (FILE * out
, struct function
*fun
)
3188 static const char *const type_name
[] = {
3189 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3192 i
= fun
->eh
->region_tree
;
3196 fprintf (out
, "Eh tree:\n");
3199 fprintf (out
, " %*s %i %s", depth
* 2, "",
3200 i
->index
, type_name
[(int) i
->type
]);
3202 if (i
->landing_pads
)
3206 fprintf (out
, " land:");
3207 if (current_ir_type () == IR_GIMPLE
)
3209 for (lp
= i
->landing_pads
; lp
; lp
= lp
->next_lp
)
3211 fprintf (out
, "{%i,", lp
->index
);
3212 print_generic_expr (out
, lp
->post_landing_pad
, 0);
3220 for (lp
= i
->landing_pads
; lp
; lp
= lp
->next_lp
)
3222 fprintf (out
, "{%i,", lp
->index
);
3223 if (lp
->landing_pad
)
3224 fprintf (out
, "%i%s,", INSN_UID (lp
->landing_pad
),
3225 NOTE_P (lp
->landing_pad
) ? "(del)" : "");
3227 fprintf (out
, "(nil),");
3228 if (lp
->post_landing_pad
)
3230 rtx_insn
*lab
= label_rtx (lp
->post_landing_pad
);
3231 fprintf (out
, "%i%s}", INSN_UID (lab
),
3232 NOTE_P (lab
) ? "(del)" : "");
3235 fprintf (out
, "(nil)}");
3245 case ERT_MUST_NOT_THROW
:
3251 fprintf (out
, " catch:");
3252 for (c
= i
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
3257 fprintf (out
, "lab:");
3258 print_generic_expr (out
, c
->label
, 0);
3261 print_generic_expr (out
, c
->type_list
, 0);
3269 case ERT_ALLOWED_EXCEPTIONS
:
3270 fprintf (out
, " filter :%i types:", i
->u
.allowed
.filter
);
3271 print_generic_expr (out
, i
->u
.allowed
.type_list
, 0);
3276 /* If there are sub-regions, process them. */
3278 i
= i
->inner
, depth
++;
3279 /* If there are peers, process them. */
3280 else if (i
->next_peer
)
3282 /* Otherwise, step back up the tree to the next peer. */
3292 while (i
->next_peer
== NULL
);
3298 /* Dump the EH tree for FN on stderr. */
3301 debug_eh_tree (struct function
*fn
)
3303 dump_eh_tree (stderr
, fn
);
3306 /* Verify invariants on EH datastructures. */
3309 verify_eh_tree (struct function
*fun
)
3312 int nvisited_lp
, nvisited_r
;
3313 int count_lp
, count_r
, depth
, i
;
3317 if (!fun
->eh
->region_tree
)
3321 for (i
= 1; vec_safe_iterate (fun
->eh
->region_array
, i
, &r
); ++i
)
3328 error ("region_array is corrupted for region %i", r
->index
);
3334 for (i
= 1; vec_safe_iterate (fun
->eh
->lp_array
, i
, &lp
); ++i
)
3341 error ("lp_array is corrupted for lp %i", lp
->index
);
3346 depth
= nvisited_lp
= nvisited_r
= 0;
3348 r
= fun
->eh
->region_tree
;
3351 if ((*fun
->eh
->region_array
)[r
->index
] != r
)
3353 error ("region_array is corrupted for region %i", r
->index
);
3356 if (r
->outer
!= outer
)
3358 error ("outer block of region %i is wrong", r
->index
);
3363 error ("negative nesting depth of region %i", r
->index
);
3368 for (lp
= r
->landing_pads
; lp
; lp
= lp
->next_lp
)
3370 if ((*fun
->eh
->lp_array
)[lp
->index
] != lp
)
3372 error ("lp_array is corrupted for lp %i", lp
->index
);
3375 if (lp
->region
!= r
)
3377 error ("region of lp %i is wrong", lp
->index
);
3384 outer
= r
, r
= r
->inner
, depth
++;
3385 else if (r
->next_peer
)
3397 while (r
->next_peer
== NULL
);
3404 error ("tree list ends on depth %i", depth
);
3407 if (count_r
!= nvisited_r
)
3409 error ("region_array does not match region_tree");
3412 if (count_lp
!= nvisited_lp
)
3414 error ("lp_array does not match region_tree");
3420 dump_eh_tree (stderr
, fun
);
3421 internal_error ("verify_eh_tree failed");
3425 #include "gt-except.h"