1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
26 The representation of exceptions changes several times during
27 the compilation process:
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
114 #include "coretypes.h"
119 #include "cfghooks.h"
120 #include "tree-pass.h"
122 #include "stringpool.h"
125 #include "emit-rtl.h"
127 #include "diagnostic.h"
129 #include "fold-const.h"
130 #include "stor-layout.h"
138 #include "libfuncs.h"
141 #include "dwarf2asm.h"
142 #include "dwarf2out.h"
145 #include "common/common-target.h"
146 #include "langhooks.h"
148 #include "tree-pretty-print.h"
150 #include "builtins.h"
151 #include "tree-hash-traits.h"
153 static GTY(()) int call_site_base
;
155 static GTY (()) hash_map
<tree_hash
, tree
> *type_to_runtime_map
;
157 /* Describe the SjLj_Function_Context structure. */
158 static GTY(()) tree sjlj_fc_type_node
;
159 static int sjlj_fc_call_site_ofs
;
160 static int sjlj_fc_data_ofs
;
161 static int sjlj_fc_personality_ofs
;
162 static int sjlj_fc_lsda_ofs
;
163 static int sjlj_fc_jbuf_ofs
;
166 struct GTY(()) call_site_record_d
172 /* In the following structure and associated functions,
173 we represent entries in the action table as 1-based indices.
176 0: null action record, non-null landing pad; implies cleanups
177 -1: null action record, null landing pad; implies no action
178 -2: no call-site entry; implies must_not_throw
179 -3: we have yet to process outer regions
181 Further, no special cases apply to the "next" field of the record.
182 For next, 0 means end of list. */
191 /* Hashtable helpers. */
193 struct action_record_hasher
: free_ptr_hash
<action_record
>
195 static inline hashval_t
hash (const action_record
*);
196 static inline bool equal (const action_record
*, const action_record
*);
200 action_record_hasher::hash (const action_record
*entry
)
202 return entry
->next
* 1009 + entry
->filter
;
206 action_record_hasher::equal (const action_record
*entry
,
207 const action_record
*data
)
209 return entry
->filter
== data
->filter
&& entry
->next
== data
->next
;
212 typedef hash_table
<action_record_hasher
> action_hash_type
;
214 static bool get_eh_region_and_lp_from_rtx (const_rtx
, eh_region
*,
217 static void dw2_build_landing_pads (void);
219 static int collect_one_action_chain (action_hash_type
*, eh_region
);
220 static int add_call_site (rtx
, int, int);
222 static void push_uleb128 (vec
<uchar
, va_gc
> **, unsigned int);
223 static void push_sleb128 (vec
<uchar
, va_gc
> **, int);
224 #ifndef HAVE_AS_LEB128
225 static int dw2_size_of_call_site_table (int);
226 static int sjlj_size_of_call_site_table (void);
228 static void dw2_output_call_site_table (int, int);
229 static void sjlj_output_call_site_table (void);
235 if (! flag_exceptions
)
238 type_to_runtime_map
= hash_map
<tree_hash
, tree
>::create_ggc (31);
240 /* Create the SjLj_Function_Context structure. This should match
241 the definition in unwind-sjlj.c. */
242 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
244 tree f_jbuf
, f_per
, f_lsda
, f_prev
, f_cs
, f_data
, tmp
;
246 sjlj_fc_type_node
= lang_hooks
.types
.make_type (RECORD_TYPE
);
248 f_prev
= build_decl (BUILTINS_LOCATION
,
249 FIELD_DECL
, get_identifier ("__prev"),
250 build_pointer_type (sjlj_fc_type_node
));
251 DECL_FIELD_CONTEXT (f_prev
) = sjlj_fc_type_node
;
253 f_cs
= build_decl (BUILTINS_LOCATION
,
254 FIELD_DECL
, get_identifier ("__call_site"),
256 DECL_FIELD_CONTEXT (f_cs
) = sjlj_fc_type_node
;
258 tmp
= build_index_type (size_int (4 - 1));
259 tmp
= build_array_type (lang_hooks
.types
.type_for_mode
260 (targetm
.unwind_word_mode (), 1),
262 f_data
= build_decl (BUILTINS_LOCATION
,
263 FIELD_DECL
, get_identifier ("__data"), tmp
);
264 DECL_FIELD_CONTEXT (f_data
) = sjlj_fc_type_node
;
266 f_per
= build_decl (BUILTINS_LOCATION
,
267 FIELD_DECL
, get_identifier ("__personality"),
269 DECL_FIELD_CONTEXT (f_per
) = sjlj_fc_type_node
;
271 f_lsda
= build_decl (BUILTINS_LOCATION
,
272 FIELD_DECL
, get_identifier ("__lsda"),
274 DECL_FIELD_CONTEXT (f_lsda
) = sjlj_fc_type_node
;
276 #ifdef DONT_USE_BUILTIN_SETJMP
278 tmp
= size_int (JMP_BUF_SIZE
- 1);
280 /* Should be large enough for most systems, if it is not,
281 JMP_BUF_SIZE should be defined with the proper value. It will
282 also tend to be larger than necessary for most systems, a more
283 optimal port will define JMP_BUF_SIZE. */
284 tmp
= size_int (FIRST_PSEUDO_REGISTER
+ 2 - 1);
287 /* Compute a minimally sized jump buffer. We need room to store at
288 least 3 pointers - stack pointer, frame pointer and return address.
289 Plus for some targets we need room for an extra pointer - in the
290 case of MIPS this is the global pointer. This makes a total of four
291 pointers, but to be safe we actually allocate room for 5.
293 If pointers are smaller than words then we allocate enough room for
294 5 words, just in case the backend needs this much room. For more
295 discussion on this issue see:
296 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
297 if (POINTER_SIZE
> BITS_PER_WORD
)
298 tmp
= size_int (5 - 1);
300 tmp
= size_int ((5 * BITS_PER_WORD
/ POINTER_SIZE
) - 1);
303 tmp
= build_index_type (tmp
);
304 tmp
= build_array_type (ptr_type_node
, tmp
);
305 f_jbuf
= build_decl (BUILTINS_LOCATION
,
306 FIELD_DECL
, get_identifier ("__jbuf"), tmp
);
307 #ifdef DONT_USE_BUILTIN_SETJMP
308 /* We don't know what the alignment requirements of the
309 runtime's jmp_buf has. Overestimate. */
310 DECL_ALIGN (f_jbuf
) = BIGGEST_ALIGNMENT
;
311 DECL_USER_ALIGN (f_jbuf
) = 1;
313 DECL_FIELD_CONTEXT (f_jbuf
) = sjlj_fc_type_node
;
315 TYPE_FIELDS (sjlj_fc_type_node
) = f_prev
;
316 TREE_CHAIN (f_prev
) = f_cs
;
317 TREE_CHAIN (f_cs
) = f_data
;
318 TREE_CHAIN (f_data
) = f_per
;
319 TREE_CHAIN (f_per
) = f_lsda
;
320 TREE_CHAIN (f_lsda
) = f_jbuf
;
322 layout_type (sjlj_fc_type_node
);
324 /* Cache the interesting field offsets so that we have
325 easy access from rtl. */
326 sjlj_fc_call_site_ofs
327 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs
))
328 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs
)) / BITS_PER_UNIT
);
330 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data
))
331 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data
)) / BITS_PER_UNIT
);
332 sjlj_fc_personality_ofs
333 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per
))
334 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per
)) / BITS_PER_UNIT
);
336 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda
))
337 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda
)) / BITS_PER_UNIT
);
339 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf
))
340 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf
)) / BITS_PER_UNIT
);
345 init_eh_for_function (void)
347 cfun
->eh
= ggc_cleared_alloc
<eh_status
> ();
349 /* Make sure zero'th entries are used. */
350 vec_safe_push (cfun
->eh
->region_array
, (eh_region
)0);
351 vec_safe_push (cfun
->eh
->lp_array
, (eh_landing_pad
)0);
354 /* Routines to generate the exception tree somewhat directly.
355 These are used from tree-eh.c when processing exception related
356 nodes during tree optimization. */
359 gen_eh_region (enum eh_region_type type
, eh_region outer
)
363 /* Insert a new blank region as a leaf in the tree. */
364 new_eh
= ggc_cleared_alloc
<eh_region_d
> ();
366 new_eh
->outer
= outer
;
369 new_eh
->next_peer
= outer
->inner
;
370 outer
->inner
= new_eh
;
374 new_eh
->next_peer
= cfun
->eh
->region_tree
;
375 cfun
->eh
->region_tree
= new_eh
;
378 new_eh
->index
= vec_safe_length (cfun
->eh
->region_array
);
379 vec_safe_push (cfun
->eh
->region_array
, new_eh
);
381 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
382 if (targetm
.arm_eabi_unwinder
&& lang_hooks
.eh_use_cxa_end_cleanup
)
383 new_eh
->use_cxa_end_cleanup
= true;
389 gen_eh_region_cleanup (eh_region outer
)
391 return gen_eh_region (ERT_CLEANUP
, outer
);
395 gen_eh_region_try (eh_region outer
)
397 return gen_eh_region (ERT_TRY
, outer
);
401 gen_eh_region_catch (eh_region t
, tree type_or_list
)
404 tree type_list
, type_node
;
406 gcc_assert (t
->type
== ERT_TRY
);
408 /* Ensure to always end up with a type list to normalize further
409 processing, then register each type against the runtime types map. */
410 type_list
= type_or_list
;
413 if (TREE_CODE (type_or_list
) != TREE_LIST
)
414 type_list
= tree_cons (NULL_TREE
, type_or_list
, NULL_TREE
);
416 type_node
= type_list
;
417 for (; type_node
; type_node
= TREE_CHAIN (type_node
))
418 add_type_for_runtime (TREE_VALUE (type_node
));
421 c
= ggc_cleared_alloc
<eh_catch_d
> ();
422 c
->type_list
= type_list
;
423 l
= t
->u
.eh_try
.last_catch
;
428 t
->u
.eh_try
.first_catch
= c
;
429 t
->u
.eh_try
.last_catch
= c
;
435 gen_eh_region_allowed (eh_region outer
, tree allowed
)
437 eh_region region
= gen_eh_region (ERT_ALLOWED_EXCEPTIONS
, outer
);
438 region
->u
.allowed
.type_list
= allowed
;
440 for (; allowed
; allowed
= TREE_CHAIN (allowed
))
441 add_type_for_runtime (TREE_VALUE (allowed
));
447 gen_eh_region_must_not_throw (eh_region outer
)
449 return gen_eh_region (ERT_MUST_NOT_THROW
, outer
);
453 gen_eh_landing_pad (eh_region region
)
455 eh_landing_pad lp
= ggc_cleared_alloc
<eh_landing_pad_d
> ();
457 lp
->next_lp
= region
->landing_pads
;
459 lp
->index
= vec_safe_length (cfun
->eh
->lp_array
);
460 region
->landing_pads
= lp
;
462 vec_safe_push (cfun
->eh
->lp_array
, lp
);
468 get_eh_region_from_number_fn (struct function
*ifun
, int i
)
470 return (*ifun
->eh
->region_array
)[i
];
474 get_eh_region_from_number (int i
)
476 return get_eh_region_from_number_fn (cfun
, i
);
480 get_eh_landing_pad_from_number_fn (struct function
*ifun
, int i
)
482 return (*ifun
->eh
->lp_array
)[i
];
486 get_eh_landing_pad_from_number (int i
)
488 return get_eh_landing_pad_from_number_fn (cfun
, i
);
492 get_eh_region_from_lp_number_fn (struct function
*ifun
, int i
)
495 return (*ifun
->eh
->region_array
)[-i
];
501 lp
= (*ifun
->eh
->lp_array
)[i
];
507 get_eh_region_from_lp_number (int i
)
509 return get_eh_region_from_lp_number_fn (cfun
, i
);
512 /* Returns true if the current function has exception handling regions. */
515 current_function_has_exception_handlers (void)
517 return cfun
->eh
->region_tree
!= NULL
;
520 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
521 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
523 struct duplicate_eh_regions_data
525 duplicate_eh_regions_map label_map
;
526 void *label_map_data
;
527 hash_map
<void *, void *> *eh_map
;
531 duplicate_eh_regions_1 (struct duplicate_eh_regions_data
*data
,
532 eh_region old_r
, eh_region outer
)
534 eh_landing_pad old_lp
, new_lp
;
537 new_r
= gen_eh_region (old_r
->type
, outer
);
538 gcc_assert (!data
->eh_map
->put (old_r
, new_r
));
548 for (oc
= old_r
->u
.eh_try
.first_catch
; oc
; oc
= oc
->next_catch
)
550 /* We should be doing all our region duplication before and
551 during inlining, which is before filter lists are created. */
552 gcc_assert (oc
->filter_list
== NULL
);
553 nc
= gen_eh_region_catch (new_r
, oc
->type_list
);
554 nc
->label
= data
->label_map (oc
->label
, data
->label_map_data
);
559 case ERT_ALLOWED_EXCEPTIONS
:
560 new_r
->u
.allowed
.type_list
= old_r
->u
.allowed
.type_list
;
561 if (old_r
->u
.allowed
.label
)
562 new_r
->u
.allowed
.label
563 = data
->label_map (old_r
->u
.allowed
.label
, data
->label_map_data
);
565 new_r
->u
.allowed
.label
= NULL_TREE
;
568 case ERT_MUST_NOT_THROW
:
569 new_r
->u
.must_not_throw
.failure_loc
=
570 LOCATION_LOCUS (old_r
->u
.must_not_throw
.failure_loc
);
571 new_r
->u
.must_not_throw
.failure_decl
=
572 old_r
->u
.must_not_throw
.failure_decl
;
576 for (old_lp
= old_r
->landing_pads
; old_lp
; old_lp
= old_lp
->next_lp
)
578 /* Don't bother copying unused landing pads. */
579 if (old_lp
->post_landing_pad
== NULL
)
582 new_lp
= gen_eh_landing_pad (new_r
);
583 gcc_assert (!data
->eh_map
->put (old_lp
, new_lp
));
585 new_lp
->post_landing_pad
586 = data
->label_map (old_lp
->post_landing_pad
, data
->label_map_data
);
587 EH_LANDING_PAD_NR (new_lp
->post_landing_pad
) = new_lp
->index
;
590 /* Make sure to preserve the original use of __cxa_end_cleanup. */
591 new_r
->use_cxa_end_cleanup
= old_r
->use_cxa_end_cleanup
;
593 for (old_r
= old_r
->inner
; old_r
; old_r
= old_r
->next_peer
)
594 duplicate_eh_regions_1 (data
, old_r
, new_r
);
597 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
598 the current function and root the tree below OUTER_REGION.
599 The special case of COPY_REGION of NULL means all regions.
600 Remap labels using MAP/MAP_DATA callback. Return a pointer map
601 that allows the caller to remap uses of both EH regions and
604 hash_map
<void *, void *> *
605 duplicate_eh_regions (struct function
*ifun
,
606 eh_region copy_region
, int outer_lp
,
607 duplicate_eh_regions_map map
, void *map_data
)
609 struct duplicate_eh_regions_data data
;
610 eh_region outer_region
;
613 verify_eh_tree (ifun
);
615 data
.label_map
= map
;
616 data
.label_map_data
= map_data
;
617 data
.eh_map
= new hash_map
<void *, void *>;
619 outer_region
= get_eh_region_from_lp_number_fn (cfun
, outer_lp
);
621 /* Copy all the regions in the subtree. */
623 duplicate_eh_regions_1 (&data
, copy_region
, outer_region
);
627 for (r
= ifun
->eh
->region_tree
; r
; r
= r
->next_peer
)
628 duplicate_eh_regions_1 (&data
, r
, outer_region
);
632 verify_eh_tree (cfun
);
637 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
640 eh_region_outermost (struct function
*ifun
, eh_region region_a
,
645 gcc_assert (ifun
->eh
->region_array
);
646 gcc_assert (ifun
->eh
->region_tree
);
648 b_outer
= sbitmap_alloc (ifun
->eh
->region_array
->length ());
649 bitmap_clear (b_outer
);
653 bitmap_set_bit (b_outer
, region_b
->index
);
654 region_b
= region_b
->outer
;
660 if (bitmap_bit_p (b_outer
, region_a
->index
))
662 region_a
= region_a
->outer
;
666 sbitmap_free (b_outer
);
671 add_type_for_runtime (tree type
)
673 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
674 if (TREE_CODE (type
) == NOP_EXPR
)
677 bool existed
= false;
678 tree
*slot
= &type_to_runtime_map
->get_or_insert (type
, &existed
);
680 *slot
= lang_hooks
.eh_runtime_type (type
);
684 lookup_type_for_runtime (tree type
)
686 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
687 if (TREE_CODE (type
) == NOP_EXPR
)
690 /* We should have always inserted the data earlier. */
691 return *type_to_runtime_map
->get (type
);
695 /* Represent an entry in @TTypes for either catch actions
696 or exception filter actions. */
697 struct ttypes_filter
{
702 /* Helper for ttypes_filter hashing. */
704 struct ttypes_filter_hasher
: free_ptr_hash
<ttypes_filter
>
706 typedef tree_node
*compare_type
;
707 static inline hashval_t
hash (const ttypes_filter
*);
708 static inline bool equal (const ttypes_filter
*, const tree_node
*);
711 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
712 (a tree) for a @TTypes type node we are thinking about adding. */
715 ttypes_filter_hasher::equal (const ttypes_filter
*entry
, const tree_node
*data
)
717 return entry
->t
== data
;
721 ttypes_filter_hasher::hash (const ttypes_filter
*entry
)
723 return TREE_HASH (entry
->t
);
726 typedef hash_table
<ttypes_filter_hasher
> ttypes_hash_type
;
729 /* Helper for ehspec hashing. */
731 struct ehspec_hasher
: free_ptr_hash
<ttypes_filter
>
733 static inline hashval_t
hash (const ttypes_filter
*);
734 static inline bool equal (const ttypes_filter
*, const ttypes_filter
*);
737 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
738 exception specification list we are thinking about adding. */
739 /* ??? Currently we use the type lists in the order given. Someone
740 should put these in some canonical order. */
743 ehspec_hasher::equal (const ttypes_filter
*entry
, const ttypes_filter
*data
)
745 return type_list_equal (entry
->t
, data
->t
);
748 /* Hash function for exception specification lists. */
751 ehspec_hasher::hash (const ttypes_filter
*entry
)
756 for (list
= entry
->t
; list
; list
= TREE_CHAIN (list
))
757 h
= (h
<< 5) + (h
>> 27) + TREE_HASH (TREE_VALUE (list
));
761 typedef hash_table
<ehspec_hasher
> ehspec_hash_type
;
764 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
765 to speed up the search. Return the filter value to be used. */
768 add_ttypes_entry (ttypes_hash_type
*ttypes_hash
, tree type
)
770 struct ttypes_filter
**slot
, *n
;
772 slot
= ttypes_hash
->find_slot_with_hash (type
, (hashval_t
) TREE_HASH (type
),
775 if ((n
= *slot
) == NULL
)
777 /* Filter value is a 1 based table index. */
779 n
= XNEW (struct ttypes_filter
);
781 n
->filter
= vec_safe_length (cfun
->eh
->ttype_data
) + 1;
784 vec_safe_push (cfun
->eh
->ttype_data
, type
);
790 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
791 to speed up the search. Return the filter value to be used. */
794 add_ehspec_entry (ehspec_hash_type
*ehspec_hash
, ttypes_hash_type
*ttypes_hash
,
797 struct ttypes_filter
**slot
, *n
;
798 struct ttypes_filter dummy
;
801 slot
= ehspec_hash
->find_slot (&dummy
, INSERT
);
803 if ((n
= *slot
) == NULL
)
807 if (targetm
.arm_eabi_unwinder
)
808 len
= vec_safe_length (cfun
->eh
->ehspec_data
.arm_eabi
);
810 len
= vec_safe_length (cfun
->eh
->ehspec_data
.other
);
812 /* Filter value is a -1 based byte index into a uleb128 buffer. */
814 n
= XNEW (struct ttypes_filter
);
816 n
->filter
= -(len
+ 1);
819 /* Generate a 0 terminated list of filter values. */
820 for (; list
; list
= TREE_CHAIN (list
))
822 if (targetm
.arm_eabi_unwinder
)
823 vec_safe_push (cfun
->eh
->ehspec_data
.arm_eabi
, TREE_VALUE (list
));
826 /* Look up each type in the list and encode its filter
827 value as a uleb128. */
828 push_uleb128 (&cfun
->eh
->ehspec_data
.other
,
829 add_ttypes_entry (ttypes_hash
, TREE_VALUE (list
)));
832 if (targetm
.arm_eabi_unwinder
)
833 vec_safe_push (cfun
->eh
->ehspec_data
.arm_eabi
, NULL_TREE
);
835 vec_safe_push (cfun
->eh
->ehspec_data
.other
, (uchar
)0);
841 /* Generate the action filter values to be used for CATCH and
842 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
843 we use lots of landing pads, and so every type or list can share
844 the same filter value, which saves table space. */
847 assign_filter_values (void)
853 vec_alloc (cfun
->eh
->ttype_data
, 16);
854 if (targetm
.arm_eabi_unwinder
)
855 vec_alloc (cfun
->eh
->ehspec_data
.arm_eabi
, 64);
857 vec_alloc (cfun
->eh
->ehspec_data
.other
, 64);
859 ehspec_hash_type
ehspec (31);
860 ttypes_hash_type
ttypes (31);
862 for (i
= 1; vec_safe_iterate (cfun
->eh
->region_array
, i
, &r
); ++i
)
870 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
872 /* Whatever type_list is (NULL or true list), we build a list
873 of filters for the region. */
874 c
->filter_list
= NULL_TREE
;
876 if (c
->type_list
!= NULL
)
878 /* Get a filter value for each of the types caught and store
879 them in the region's dedicated list. */
880 tree tp_node
= c
->type_list
;
882 for ( ; tp_node
; tp_node
= TREE_CHAIN (tp_node
))
885 = add_ttypes_entry (&ttypes
, TREE_VALUE (tp_node
));
886 tree flt_node
= build_int_cst (integer_type_node
, flt
);
889 = tree_cons (NULL_TREE
, flt_node
, c
->filter_list
);
894 /* Get a filter value for the NULL list also since it
895 will need an action record anyway. */
896 int flt
= add_ttypes_entry (&ttypes
, NULL
);
897 tree flt_node
= build_int_cst (integer_type_node
, flt
);
900 = tree_cons (NULL_TREE
, flt_node
, NULL
);
905 case ERT_ALLOWED_EXCEPTIONS
:
907 = add_ehspec_entry (&ehspec
, &ttypes
, r
->u
.allowed
.type_list
);
916 /* Emit SEQ into basic block just before INSN (that is assumed to be
917 first instruction of some existing BB and return the newly
920 emit_to_new_bb_before (rtx_insn
*seq
, rtx insn
)
927 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
928 call), we don't want it to go into newly created landing pad or other EH
930 for (ei
= ei_start (BLOCK_FOR_INSN (insn
)->preds
); (e
= ei_safe_edge (ei
)); )
931 if (e
->flags
& EDGE_FALLTHRU
)
932 force_nonfallthru (e
);
935 last
= emit_insn_before (seq
, insn
);
936 if (BARRIER_P (last
))
937 last
= PREV_INSN (last
);
938 bb
= create_basic_block (seq
, last
, BLOCK_FOR_INSN (insn
)->prev_bb
);
939 update_bb_for_insn (bb
);
940 bb
->flags
|= BB_SUPERBLOCK
;
944 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
945 at the rtl level. Emit the code required by the target at a landing
946 pad for the given region. */
949 expand_dw2_landing_pad_for_region (eh_region region
)
951 if (targetm
.have_exception_receiver ())
952 emit_insn (targetm
.gen_exception_receiver ());
953 else if (targetm
.have_nonlocal_goto_receiver ())
954 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
958 if (region
->exc_ptr_reg
)
959 emit_move_insn (region
->exc_ptr_reg
,
960 gen_rtx_REG (ptr_mode
, EH_RETURN_DATA_REGNO (0)));
961 if (region
->filter_reg
)
962 emit_move_insn (region
->filter_reg
,
963 gen_rtx_REG (targetm
.eh_return_filter_mode (),
964 EH_RETURN_DATA_REGNO (1)));
967 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
970 dw2_build_landing_pads (void)
974 int e_flags
= EDGE_FALLTHRU
;
976 /* If we're going to partition blocks, we need to be able to add
977 new landing pads later, which means that we need to hold on to
978 the post-landing-pad block. Prevent it from being merged away.
979 We'll remove this bit after partitioning. */
980 if (flag_reorder_blocks_and_partition
)
981 e_flags
|= EDGE_PRESERVE
;
983 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
989 if (lp
== NULL
|| lp
->post_landing_pad
== NULL
)
994 lp
->landing_pad
= gen_label_rtx ();
995 emit_label (lp
->landing_pad
);
996 LABEL_PRESERVE_P (lp
->landing_pad
) = 1;
998 expand_dw2_landing_pad_for_region (lp
->region
);
1003 bb
= emit_to_new_bb_before (seq
, label_rtx (lp
->post_landing_pad
));
1004 e
= make_edge (bb
, bb
->next_bb
, e_flags
);
1005 e
->count
= bb
->count
;
1006 e
->probability
= REG_BR_PROB_BASE
;
1009 struct loop
*loop
= bb
->next_bb
->loop_father
;
1010 /* If we created a pre-header block, add the new block to the
1011 outer loop, otherwise to the loop itself. */
1012 if (bb
->next_bb
== loop
->header
)
1013 add_bb_to_loop (bb
, loop_outer (loop
));
1015 add_bb_to_loop (bb
, loop
);
1021 static vec
<int> sjlj_lp_call_site_index
;
1023 /* Process all active landing pads. Assign each one a compact dispatch
1024 index, and a call-site index. */
1027 sjlj_assign_call_site_values (void)
1029 action_hash_type
ar_hash (31);
1033 vec_alloc (crtl
->eh
.action_record_data
, 64);
1037 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1038 if (lp
&& lp
->post_landing_pad
)
1040 int action
, call_site
;
1042 /* First: build the action table. */
1043 action
= collect_one_action_chain (&ar_hash
, lp
->region
);
1045 /* Next: assign call-site values. If dwarf2 terms, this would be
1046 the region number assigned by convert_to_eh_region_ranges, but
1047 handles no-action and must-not-throw differently. */
1048 /* Map must-not-throw to otherwise unused call-site index 0. */
1051 /* Map no-action to otherwise unused call-site index -1. */
1052 else if (action
== -1)
1054 /* Otherwise, look it up in the table. */
1056 call_site
= add_call_site (GEN_INT (disp_index
), action
, 0);
1057 sjlj_lp_call_site_index
[i
] = call_site
;
1065 /* Emit code to record the current call-site index before every
1066 insn that can throw. */
1069 sjlj_mark_call_sites (void)
1071 int last_call_site
= -2;
1075 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1081 rtx_insn
*before
, *p
;
1083 /* Reset value tracking at extended basic block boundaries. */
1085 last_call_site
= -2;
1087 /* If the function allocates dynamic stack space, the context must
1088 be updated after every allocation/deallocation accordingly. */
1089 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_UPDATE_SJLJ_CONTEXT
)
1094 buf_addr
= plus_constant (Pmode
, XEXP (crtl
->eh
.sjlj_fc
, 0),
1096 expand_builtin_update_setjmp_buf (buf_addr
);
1099 emit_insn_before (p
, insn
);
1102 if (! INSN_P (insn
))
1105 nothrow
= get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1109 this_call_site
= sjlj_lp_call_site_index
[lp
->index
];
1112 /* Calls (and trapping insns) without notes are outside any
1113 exception handling region in this function. Mark them as
1115 this_call_site
= -1;
1119 gcc_assert (r
->type
== ERT_MUST_NOT_THROW
);
1123 if (this_call_site
!= -1)
1124 crtl
->uses_eh_lsda
= 1;
1126 if (this_call_site
== last_call_site
)
1129 /* Don't separate a call from it's argument loads. */
1132 before
= find_first_parameter_load (insn
, NULL
);
1135 mem
= adjust_address (crtl
->eh
.sjlj_fc
, TYPE_MODE (integer_type_node
),
1136 sjlj_fc_call_site_ofs
);
1137 emit_move_insn (mem
, gen_int_mode (this_call_site
, GET_MODE (mem
)));
1141 emit_insn_before (p
, before
);
1142 last_call_site
= this_call_site
;
1146 /* Construct the SjLj_Function_Context. */
1149 sjlj_emit_function_enter (rtx_code_label
*dispatch_label
)
1151 rtx_insn
*fn_begin
, *seq
;
1153 bool fn_begin_outside_block
;
1154 rtx personality
= get_personality_function (current_function_decl
);
1156 fc
= crtl
->eh
.sjlj_fc
;
1160 /* We're storing this libcall's address into memory instead of
1161 calling it directly. Thus, we must call assemble_external_libcall
1162 here, as we can not depend on emit_library_call to do it for us. */
1163 assemble_external_libcall (personality
);
1164 mem
= adjust_address (fc
, Pmode
, sjlj_fc_personality_ofs
);
1165 emit_move_insn (mem
, personality
);
1167 mem
= adjust_address (fc
, Pmode
, sjlj_fc_lsda_ofs
);
1168 if (crtl
->uses_eh_lsda
)
1173 ASM_GENERATE_INTERNAL_LABEL (buf
, "LLSDA", current_function_funcdef_no
);
1174 sym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
1175 SYMBOL_REF_FLAGS (sym
) = SYMBOL_FLAG_LOCAL
;
1176 emit_move_insn (mem
, sym
);
1179 emit_move_insn (mem
, const0_rtx
);
1183 #ifdef DONT_USE_BUILTIN_SETJMP
1185 x
= emit_library_call_value (setjmp_libfunc
, NULL_RTX
, LCT_RETURNS_TWICE
,
1186 TYPE_MODE (integer_type_node
), 1,
1187 plus_constant (Pmode
, XEXP (fc
, 0),
1188 sjlj_fc_jbuf_ofs
), Pmode
);
1190 emit_cmp_and_jump_insns (x
, const0_rtx
, NE
, 0,
1191 TYPE_MODE (integer_type_node
), 0,
1192 dispatch_label
, REG_BR_PROB_BASE
/ 100);
1194 expand_builtin_setjmp_setup (plus_constant (Pmode
, XEXP (fc
, 0),
1200 emit_library_call (unwind_sjlj_register_libfunc
, LCT_NORMAL
, VOIDmode
,
1201 1, XEXP (fc
, 0), Pmode
);
1206 /* ??? Instead of doing this at the beginning of the function,
1207 do this in a block that is at loop level 0 and dominates all
1208 can_throw_internal instructions. */
1210 fn_begin_outside_block
= true;
1211 for (fn_begin
= get_insns (); ; fn_begin
= NEXT_INSN (fn_begin
))
1212 if (NOTE_P (fn_begin
))
1214 if (NOTE_KIND (fn_begin
) == NOTE_INSN_FUNCTION_BEG
)
1216 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin
))
1217 fn_begin_outside_block
= false;
1220 if (fn_begin_outside_block
)
1221 insert_insn_on_edge (seq
, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
1223 emit_insn_after (seq
, fn_begin
);
1226 /* Call back from expand_function_end to know where we should put
1227 the call to unwind_sjlj_unregister_libfunc if needed. */
1230 sjlj_emit_function_exit_after (rtx_insn
*after
)
1232 crtl
->eh
.sjlj_exit_after
= after
;
1236 sjlj_emit_function_exit (void)
1238 rtx_insn
*seq
, *insn
;
1242 emit_library_call (unwind_sjlj_unregister_libfunc
, LCT_NORMAL
, VOIDmode
,
1243 1, XEXP (crtl
->eh
.sjlj_fc
, 0), Pmode
);
1248 /* ??? Really this can be done in any block at loop level 0 that
1249 post-dominates all can_throw_internal instructions. This is
1250 the last possible moment. */
1252 insn
= crtl
->eh
.sjlj_exit_after
;
1254 insn
= NEXT_INSN (insn
);
1256 emit_insn_after (seq
, insn
);
1260 sjlj_emit_dispatch_table (rtx_code_label
*dispatch_label
, int num_dispatch
)
1262 machine_mode unwind_word_mode
= targetm
.unwind_word_mode ();
1263 machine_mode filter_mode
= targetm
.eh_return_filter_mode ();
1265 rtx mem
, fc
, exc_ptr_reg
, filter_reg
;
1271 vec
<tree
> dispatch_labels
= vNULL
;
1273 fc
= crtl
->eh
.sjlj_fc
;
1277 emit_label (dispatch_label
);
1279 #ifndef DONT_USE_BUILTIN_SETJMP
1280 expand_builtin_setjmp_receiver (dispatch_label
);
1282 /* The caller of expand_builtin_setjmp_receiver is responsible for
1283 making sure that the label doesn't vanish. The only other caller
1284 is the expander for __builtin_setjmp_receiver, which places this
1285 label on the nonlocal_goto_label list. Since we're modeling these
1286 CFG edges more exactly, we can use the forced_labels list instead. */
1287 LABEL_PRESERVE_P (dispatch_label
) = 1;
1289 = gen_rtx_INSN_LIST (VOIDmode
, dispatch_label
, forced_labels
);
1292 /* Load up exc_ptr and filter values from the function context. */
1293 mem
= adjust_address (fc
, unwind_word_mode
, sjlj_fc_data_ofs
);
1294 if (unwind_word_mode
!= ptr_mode
)
1296 #ifdef POINTERS_EXTEND_UNSIGNED
1297 mem
= convert_memory_address (ptr_mode
, mem
);
1299 mem
= convert_to_mode (ptr_mode
, mem
, 0);
1302 exc_ptr_reg
= force_reg (ptr_mode
, mem
);
1304 mem
= adjust_address (fc
, unwind_word_mode
,
1305 sjlj_fc_data_ofs
+ GET_MODE_SIZE (unwind_word_mode
));
1306 if (unwind_word_mode
!= filter_mode
)
1307 mem
= convert_to_mode (filter_mode
, mem
, 0);
1308 filter_reg
= force_reg (filter_mode
, mem
);
1310 /* Jump to one of the directly reachable regions. */
1313 rtx_code_label
*first_reachable_label
= NULL
;
1315 /* If there's exactly one call site in the function, don't bother
1316 generating a switch statement. */
1317 if (num_dispatch
> 1)
1318 dispatch_labels
.create (num_dispatch
);
1320 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1321 if (lp
&& lp
->post_landing_pad
)
1324 rtx_code_label
*label
;
1328 lp
->landing_pad
= dispatch_label
;
1330 if (num_dispatch
> 1)
1332 tree t_label
, case_elt
, t
;
1334 t_label
= create_artificial_label (UNKNOWN_LOCATION
);
1335 t
= build_int_cst (integer_type_node
, disp_index
);
1336 case_elt
= build_case_label (t
, NULL
, t_label
);
1337 dispatch_labels
.quick_push (case_elt
);
1338 label
= jump_target_rtx (t_label
);
1341 label
= gen_label_rtx ();
1343 if (disp_index
== 0)
1344 first_reachable_label
= label
;
1349 emit_move_insn (r
->exc_ptr_reg
, exc_ptr_reg
);
1351 emit_move_insn (r
->filter_reg
, filter_reg
);
1353 seq2
= get_insns ();
1356 rtx_insn
*before
= label_rtx (lp
->post_landing_pad
);
1357 bb
= emit_to_new_bb_before (seq2
, before
);
1358 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1359 e
->count
= bb
->count
;
1360 e
->probability
= REG_BR_PROB_BASE
;
1363 struct loop
*loop
= bb
->next_bb
->loop_father
;
1364 /* If we created a pre-header block, add the new block to the
1365 outer loop, otherwise to the loop itself. */
1366 if (bb
->next_bb
== loop
->header
)
1367 add_bb_to_loop (bb
, loop_outer (loop
));
1369 add_bb_to_loop (bb
, loop
);
1370 /* ??? For multiple dispatches we will end up with edges
1371 from the loop tree root into this loop, making it a
1372 multiple-entry loop. Discard all affected loops. */
1373 if (num_dispatch
> 1)
1375 for (loop
= bb
->loop_father
;
1376 loop_outer (loop
); loop
= loop_outer (loop
))
1377 mark_loop_for_removal (loop
);
1383 gcc_assert (disp_index
== num_dispatch
);
1385 if (num_dispatch
> 1)
1387 rtx disp
= adjust_address (fc
, TYPE_MODE (integer_type_node
),
1388 sjlj_fc_call_site_ofs
);
1389 expand_sjlj_dispatch_table (disp
, dispatch_labels
);
1395 bb
= emit_to_new_bb_before (seq
, first_reachable_label
);
1396 if (num_dispatch
== 1)
1398 e
= make_edge (bb
, bb
->next_bb
, EDGE_FALLTHRU
);
1399 e
->count
= bb
->count
;
1400 e
->probability
= REG_BR_PROB_BASE
;
1403 struct loop
*loop
= bb
->next_bb
->loop_father
;
1404 /* If we created a pre-header block, add the new block to the
1405 outer loop, otherwise to the loop itself. */
1406 if (bb
->next_bb
== loop
->header
)
1407 add_bb_to_loop (bb
, loop_outer (loop
));
1409 add_bb_to_loop (bb
, loop
);
1414 /* We are not wiring up edges here, but as the dispatcher call
1415 is at function begin simply associate the block with the
1416 outermost (non-)loop. */
1418 add_bb_to_loop (bb
, current_loops
->tree_root
);
1423 sjlj_build_landing_pads (void)
1427 num_dispatch
= vec_safe_length (cfun
->eh
->lp_array
);
1428 if (num_dispatch
== 0)
1430 sjlj_lp_call_site_index
.safe_grow_cleared (num_dispatch
);
1432 num_dispatch
= sjlj_assign_call_site_values ();
1433 if (num_dispatch
> 0)
1435 rtx_code_label
*dispatch_label
= gen_label_rtx ();
1436 int align
= STACK_SLOT_ALIGNMENT (sjlj_fc_type_node
,
1437 TYPE_MODE (sjlj_fc_type_node
),
1438 TYPE_ALIGN (sjlj_fc_type_node
));
1440 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
1441 int_size_in_bytes (sjlj_fc_type_node
),
1444 sjlj_mark_call_sites ();
1445 sjlj_emit_function_enter (dispatch_label
);
1446 sjlj_emit_dispatch_table (dispatch_label
, num_dispatch
);
1447 sjlj_emit_function_exit ();
1450 /* If we do not have any landing pads, we may still need to register a
1451 personality routine and (empty) LSDA to handle must-not-throw regions. */
1452 else if (function_needs_eh_personality (cfun
) != eh_personality_none
)
1454 int align
= STACK_SLOT_ALIGNMENT (sjlj_fc_type_node
,
1455 TYPE_MODE (sjlj_fc_type_node
),
1456 TYPE_ALIGN (sjlj_fc_type_node
));
1458 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node
),
1459 int_size_in_bytes (sjlj_fc_type_node
),
1462 sjlj_mark_call_sites ();
1463 sjlj_emit_function_enter (NULL
);
1464 sjlj_emit_function_exit ();
1467 sjlj_lp_call_site_index
.release ();
1470 /* Update the sjlj function context. This function should be called
1471 whenever we allocate or deallocate dynamic stack space. */
1474 update_sjlj_context (void)
1476 if (!flag_exceptions
)
1479 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT
);
1482 /* After initial rtl generation, call back to finish generating
1483 exception support code. */
1486 finish_eh_generation (void)
1490 /* Construct the landing pads. */
1491 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
1492 sjlj_build_landing_pads ();
1494 dw2_build_landing_pads ();
1495 break_superblocks ();
1497 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
1498 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1499 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
))->insns
.r
)
1500 commit_edge_insertions ();
1502 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1503 FOR_EACH_BB_FN (bb
, cfun
)
1509 lp
= get_eh_landing_pad_from_rtx (BB_END (bb
));
1511 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1512 if (e
->flags
& EDGE_EH
)
1515 /* We should not have generated any new throwing insns during this
1516 pass, and we should not have lost any EH edges, so we only need
1517 to handle two cases here:
1518 (1) reachable handler and an existing edge to post-landing-pad,
1519 (2) no reachable handler and no edge. */
1520 gcc_assert ((lp
!= NULL
) == (e
!= NULL
));
1523 gcc_assert (BB_HEAD (e
->dest
) == label_rtx (lp
->post_landing_pad
));
1525 redirect_edge_succ (e
, BLOCK_FOR_INSN (lp
->landing_pad
));
1526 e
->flags
|= (CALL_P (BB_END (bb
))
1527 ? EDGE_ABNORMAL
| EDGE_ABNORMAL_CALL
1533 /* This section handles removing dead code for flow. */
1536 remove_eh_landing_pad (eh_landing_pad lp
)
1540 for (pp
= &lp
->region
->landing_pads
; *pp
!= lp
; pp
= &(*pp
)->next_lp
)
1544 if (lp
->post_landing_pad
)
1545 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1546 (*cfun
->eh
->lp_array
)[lp
->index
] = NULL
;
1549 /* Splice the EH region at PP from the region tree. */
1552 remove_eh_handler_splicer (eh_region
*pp
)
1554 eh_region region
= *pp
;
1557 for (lp
= region
->landing_pads
; lp
; lp
= lp
->next_lp
)
1559 if (lp
->post_landing_pad
)
1560 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = 0;
1561 (*cfun
->eh
->lp_array
)[lp
->index
] = NULL
;
1567 outer
= region
->outer
;
1569 *pp
= p
= region
->inner
;
1578 *pp
= region
->next_peer
;
1580 (*cfun
->eh
->region_array
)[region
->index
] = NULL
;
1583 /* Splice a single EH region REGION from the region tree.
1585 To unlink REGION, we need to find the pointer to it with a relatively
1586 expensive search in REGION's outer region. If you are going to
1587 remove a number of handlers, using remove_unreachable_eh_regions may
1588 be a better option. */
1591 remove_eh_handler (eh_region region
)
1593 eh_region
*pp
, *pp_start
, p
, outer
;
1595 outer
= region
->outer
;
1597 pp_start
= &outer
->inner
;
1599 pp_start
= &cfun
->eh
->region_tree
;
1600 for (pp
= pp_start
, p
= *pp
; p
!= region
; pp
= &p
->next_peer
, p
= *pp
)
1603 remove_eh_handler_splicer (pp
);
1606 /* Worker for remove_unreachable_eh_regions.
1607 PP is a pointer to the region to start a region tree depth-first
1608 search from. R_REACHABLE is the set of regions that have to be
1612 remove_unreachable_eh_regions_worker (eh_region
*pp
, sbitmap r_reachable
)
1616 eh_region region
= *pp
;
1617 remove_unreachable_eh_regions_worker (®ion
->inner
, r_reachable
);
1618 if (!bitmap_bit_p (r_reachable
, region
->index
))
1619 remove_eh_handler_splicer (pp
);
1621 pp
= ®ion
->next_peer
;
1625 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1626 Do this by traversing the EH tree top-down and splice out regions that
1627 are not marked. By removing regions from the leaves, we avoid costly
1628 searches in the region tree. */
1631 remove_unreachable_eh_regions (sbitmap r_reachable
)
1633 remove_unreachable_eh_regions_worker (&cfun
->eh
->region_tree
, r_reachable
);
1636 /* Invokes CALLBACK for every exception handler landing pad label.
1637 Only used by reload hackery; should not be used by new code. */
1640 for_each_eh_label (void (*callback
) (rtx
))
1645 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
1649 rtx_code_label
*lab
= lp
->landing_pad
;
1650 if (lab
&& LABEL_P (lab
))
1656 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1659 At the gimple level, we use LP_NR
1660 > 0 : The statement transfers to landing pad LP_NR
1661 = 0 : The statement is outside any EH region
1662 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1664 At the rtl level, we use LP_NR
1665 > 0 : The insn transfers to landing pad LP_NR
1666 = 0 : The insn cannot throw
1667 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1668 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1669 missing note: The insn is outside any EH region.
1671 ??? This difference probably ought to be avoided. We could stand
1672 to record nothrow for arbitrary gimple statements, and so avoid
1673 some moderately complex lookups in stmt_could_throw_p. Perhaps
1674 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1675 no-nonlocal-goto property should be recorded elsewhere as a bit
1676 on the call_insn directly. Perhaps we should make more use of
1677 attaching the trees to call_insns (reachable via symbol_ref in
1678 direct call cases) and just pull the data out of the trees. */
1681 make_reg_eh_region_note (rtx_insn
*insn
, int ecf_flags
, int lp_nr
)
1684 if (ecf_flags
& ECF_NOTHROW
)
1686 else if (lp_nr
!= 0)
1687 value
= GEN_INT (lp_nr
);
1690 add_reg_note (insn
, REG_EH_REGION
, value
);
1693 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1694 nor perform a non-local goto. Replace the region note if it
1698 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn
*insn
)
1700 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1701 rtx intmin
= GEN_INT (INT_MIN
);
1704 XEXP (note
, 0) = intmin
;
1706 add_reg_note (insn
, REG_EH_REGION
, intmin
);
1709 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1713 insn_could_throw_p (const_rtx insn
)
1715 if (!flag_exceptions
)
1719 if (INSN_P (insn
) && cfun
->can_throw_non_call_exceptions
)
1720 return may_trap_p (PATTERN (insn
));
1724 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1725 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1726 to look for a note, or the note itself. */
1729 copy_reg_eh_region_note_forward (rtx note_or_insn
, rtx_insn
*first
, rtx last
)
1732 rtx note
= note_or_insn
;
1734 if (INSN_P (note_or_insn
))
1736 note
= find_reg_note (note_or_insn
, REG_EH_REGION
, NULL_RTX
);
1740 note
= XEXP (note
, 0);
1742 for (insn
= first
; insn
!= last
; insn
= NEXT_INSN (insn
))
1743 if (!find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1744 && insn_could_throw_p (insn
))
1745 add_reg_note (insn
, REG_EH_REGION
, note
);
1748 /* Likewise, but iterate backward. */
1751 copy_reg_eh_region_note_backward (rtx note_or_insn
, rtx_insn
*last
, rtx first
)
1754 rtx note
= note_or_insn
;
1756 if (INSN_P (note_or_insn
))
1758 note
= find_reg_note (note_or_insn
, REG_EH_REGION
, NULL_RTX
);
1762 note
= XEXP (note
, 0);
1764 for (insn
= last
; insn
!= first
; insn
= PREV_INSN (insn
))
1765 if (insn_could_throw_p (insn
))
1766 add_reg_note (insn
, REG_EH_REGION
, note
);
1770 /* Extract all EH information from INSN. Return true if the insn
1771 was marked NOTHROW. */
1774 get_eh_region_and_lp_from_rtx (const_rtx insn
, eh_region
*pr
,
1775 eh_landing_pad
*plp
)
1777 eh_landing_pad lp
= NULL
;
1783 if (! INSN_P (insn
))
1786 if (NONJUMP_INSN_P (insn
)
1787 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1788 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1790 note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1793 ret
= !insn_could_throw_p (insn
);
1797 lp_nr
= INTVAL (XEXP (note
, 0));
1798 if (lp_nr
== 0 || lp_nr
== INT_MIN
)
1805 r
= (*cfun
->eh
->region_array
)[-lp_nr
];
1808 lp
= (*cfun
->eh
->lp_array
)[lp_nr
];
1818 /* Return the landing pad to which INSN may go, or NULL if it does not
1819 have a reachable landing pad within this function. */
1822 get_eh_landing_pad_from_rtx (const_rtx insn
)
1827 get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1831 /* Return the region to which INSN may go, or NULL if it does not
1832 have a reachable region within this function. */
1835 get_eh_region_from_rtx (const_rtx insn
)
1840 get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1844 /* Return true if INSN throws and is caught by something in this function. */
1847 can_throw_internal (const_rtx insn
)
1849 return get_eh_landing_pad_from_rtx (insn
) != NULL
;
1852 /* Return true if INSN throws and escapes from the current function. */
1855 can_throw_external (const_rtx insn
)
1861 if (! INSN_P (insn
))
1864 if (NONJUMP_INSN_P (insn
)
1865 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1867 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
1868 int i
, n
= seq
->len ();
1870 for (i
= 0; i
< n
; i
++)
1871 if (can_throw_external (seq
->element (i
)))
1877 nothrow
= get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1879 /* If we can't throw, we obviously can't throw external. */
1883 /* If we have an internal landing pad, then we're not external. */
1887 /* If we're not within an EH region, then we are external. */
1891 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1892 which don't always have landing pads. */
1893 gcc_assert (r
->type
== ERT_MUST_NOT_THROW
);
1897 /* Return true if INSN cannot throw at all. */
1900 insn_nothrow_p (const_rtx insn
)
1905 if (! INSN_P (insn
))
1908 if (NONJUMP_INSN_P (insn
)
1909 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1911 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
1912 int i
, n
= seq
->len ();
1914 for (i
= 0; i
< n
; i
++)
1915 if (!insn_nothrow_p (seq
->element (i
)))
1921 return get_eh_region_and_lp_from_rtx (insn
, &r
, &lp
);
1924 /* Return true if INSN can perform a non-local goto. */
1925 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1928 can_nonlocal_goto (const rtx_insn
*insn
)
1930 if (nonlocal_goto_handler_labels
&& CALL_P (insn
))
1932 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
1933 if (!note
|| INTVAL (XEXP (note
, 0)) != INT_MIN
)
1939 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1942 set_nothrow_function_flags (void)
1948 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1949 something that can throw an exception. We specifically exempt
1950 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1951 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1954 crtl
->all_throwers_are_sibcalls
= 1;
1956 /* If we don't know that this implementation of the function will
1957 actually be used, then we must not set TREE_NOTHROW, since
1958 callers must not assume that this function does not throw. */
1959 if (TREE_NOTHROW (current_function_decl
))
1962 if (! flag_exceptions
)
1965 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1966 if (can_throw_external (insn
))
1970 if (!CALL_P (insn
) || !SIBLING_CALL_P (insn
))
1972 crtl
->all_throwers_are_sibcalls
= 0;
1978 && (cgraph_node::get (current_function_decl
)->get_availability ()
1979 >= AVAIL_AVAILABLE
))
1981 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
1982 struct cgraph_edge
*e
;
1983 for (e
= node
->callers
; e
; e
= e
->next_caller
)
1984 e
->can_throw_external
= false;
1985 node
->set_nothrow_flag (true);
1988 fprintf (dump_file
, "Marking function nothrow: %s\n\n",
1989 current_function_name ());
1996 const pass_data pass_data_set_nothrow_function_flags
=
1998 RTL_PASS
, /* type */
1999 "nothrow", /* name */
2000 OPTGROUP_NONE
, /* optinfo_flags */
2001 TV_NONE
, /* tv_id */
2002 0, /* properties_required */
2003 0, /* properties_provided */
2004 0, /* properties_destroyed */
2005 0, /* todo_flags_start */
2006 0, /* todo_flags_finish */
2009 class pass_set_nothrow_function_flags
: public rtl_opt_pass
2012 pass_set_nothrow_function_flags (gcc::context
*ctxt
)
2013 : rtl_opt_pass (pass_data_set_nothrow_function_flags
, ctxt
)
2016 /* opt_pass methods: */
2017 virtual unsigned int execute (function
*)
2019 return set_nothrow_function_flags ();
2022 }; // class pass_set_nothrow_function_flags
2027 make_pass_set_nothrow_function_flags (gcc::context
*ctxt
)
2029 return new pass_set_nothrow_function_flags (ctxt
);
2033 /* Various hooks for unwind library. */
2035 /* Expand the EH support builtin functions:
2036 __builtin_eh_pointer and __builtin_eh_filter. */
2039 expand_builtin_eh_common (tree region_nr_t
)
2041 HOST_WIDE_INT region_nr
;
2044 gcc_assert (tree_fits_shwi_p (region_nr_t
));
2045 region_nr
= tree_to_shwi (region_nr_t
);
2047 region
= (*cfun
->eh
->region_array
)[region_nr
];
2049 /* ??? We shouldn't have been able to delete a eh region without
2050 deleting all the code that depended on it. */
2051 gcc_assert (region
!= NULL
);
2056 /* Expand to the exc_ptr value from the given eh region. */
2059 expand_builtin_eh_pointer (tree exp
)
2062 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 0));
2063 if (region
->exc_ptr_reg
== NULL
)
2064 region
->exc_ptr_reg
= gen_reg_rtx (ptr_mode
);
2065 return region
->exc_ptr_reg
;
2068 /* Expand to the filter value from the given eh region. */
2071 expand_builtin_eh_filter (tree exp
)
2074 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 0));
2075 if (region
->filter_reg
== NULL
)
2076 region
->filter_reg
= gen_reg_rtx (targetm
.eh_return_filter_mode ());
2077 return region
->filter_reg
;
2080 /* Copy the exc_ptr and filter values from one landing pad's registers
2081 to another. This is used to inline the resx statement. */
2084 expand_builtin_eh_copy_values (tree exp
)
2087 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 0));
2089 = expand_builtin_eh_common (CALL_EXPR_ARG (exp
, 1));
2090 machine_mode fmode
= targetm
.eh_return_filter_mode ();
2092 if (dst
->exc_ptr_reg
== NULL
)
2093 dst
->exc_ptr_reg
= gen_reg_rtx (ptr_mode
);
2094 if (src
->exc_ptr_reg
== NULL
)
2095 src
->exc_ptr_reg
= gen_reg_rtx (ptr_mode
);
2097 if (dst
->filter_reg
== NULL
)
2098 dst
->filter_reg
= gen_reg_rtx (fmode
);
2099 if (src
->filter_reg
== NULL
)
2100 src
->filter_reg
= gen_reg_rtx (fmode
);
2102 emit_move_insn (dst
->exc_ptr_reg
, src
->exc_ptr_reg
);
2103 emit_move_insn (dst
->filter_reg
, src
->filter_reg
);
2108 /* Do any necessary initialization to access arbitrary stack frames.
2109 On the SPARC, this means flushing the register windows. */
2112 expand_builtin_unwind_init (void)
2114 /* Set this so all the registers get saved in our frame; we need to be
2115 able to copy the saved values for any registers from frames we unwind. */
2116 crtl
->saves_all_registers
= 1;
2118 SETUP_FRAME_ADDRESSES ();
2121 /* Map a non-negative number to an eh return data register number; expands
2122 to -1 if no return data register is associated with the input number.
2123 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2126 expand_builtin_eh_return_data_regno (tree exp
)
2128 tree which
= CALL_EXPR_ARG (exp
, 0);
2129 unsigned HOST_WIDE_INT iwhich
;
2131 if (TREE_CODE (which
) != INTEGER_CST
)
2133 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2137 iwhich
= tree_to_uhwi (which
);
2138 iwhich
= EH_RETURN_DATA_REGNO (iwhich
);
2139 if (iwhich
== INVALID_REGNUM
)
2142 #ifdef DWARF_FRAME_REGNUM
2143 iwhich
= DWARF_FRAME_REGNUM (iwhich
);
2145 iwhich
= DBX_REGISTER_NUMBER (iwhich
);
2148 return GEN_INT (iwhich
);
2151 /* Given a value extracted from the return address register or stack slot,
2152 return the actual address encoded in that value. */
2155 expand_builtin_extract_return_addr (tree addr_tree
)
2157 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
2159 if (GET_MODE (addr
) != Pmode
2160 && GET_MODE (addr
) != VOIDmode
)
2162 #ifdef POINTERS_EXTEND_UNSIGNED
2163 addr
= convert_memory_address (Pmode
, addr
);
2165 addr
= convert_to_mode (Pmode
, addr
, 0);
2169 /* First mask out any unwanted bits. */
2170 rtx mask
= MASK_RETURN_ADDR
;
2172 expand_and (Pmode
, addr
, mask
, addr
);
2174 /* Then adjust to find the real return address. */
2175 if (RETURN_ADDR_OFFSET
)
2176 addr
= plus_constant (Pmode
, addr
, RETURN_ADDR_OFFSET
);
2181 /* Given an actual address in addr_tree, do any necessary encoding
2182 and return the value to be stored in the return address register or
2183 stack slot so the epilogue will return to that address. */
2186 expand_builtin_frob_return_addr (tree addr_tree
)
2188 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
2190 addr
= convert_memory_address (Pmode
, addr
);
2192 if (RETURN_ADDR_OFFSET
)
2194 addr
= force_reg (Pmode
, addr
);
2195 addr
= plus_constant (Pmode
, addr
, -RETURN_ADDR_OFFSET
);
2201 /* Set up the epilogue with the magic bits we'll need to return to the
2202 exception handler. */
2205 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED
,
2210 #ifdef EH_RETURN_STACKADJ_RTX
2211 tmp
= expand_expr (stackadj_tree
, crtl
->eh
.ehr_stackadj
,
2212 VOIDmode
, EXPAND_NORMAL
);
2213 tmp
= convert_memory_address (Pmode
, tmp
);
2214 if (!crtl
->eh
.ehr_stackadj
)
2215 crtl
->eh
.ehr_stackadj
= copy_addr_to_reg (tmp
);
2216 else if (tmp
!= crtl
->eh
.ehr_stackadj
)
2217 emit_move_insn (crtl
->eh
.ehr_stackadj
, tmp
);
2220 tmp
= expand_expr (handler_tree
, crtl
->eh
.ehr_handler
,
2221 VOIDmode
, EXPAND_NORMAL
);
2222 tmp
= convert_memory_address (Pmode
, tmp
);
2223 if (!crtl
->eh
.ehr_handler
)
2224 crtl
->eh
.ehr_handler
= copy_addr_to_reg (tmp
);
2225 else if (tmp
!= crtl
->eh
.ehr_handler
)
2226 emit_move_insn (crtl
->eh
.ehr_handler
, tmp
);
2228 if (!crtl
->eh
.ehr_label
)
2229 crtl
->eh
.ehr_label
= gen_label_rtx ();
2230 emit_jump (crtl
->eh
.ehr_label
);
2233 /* Expand __builtin_eh_return. This exit path from the function loads up
2234 the eh return data registers, adjusts the stack, and branches to a
2235 given PC other than the normal return address. */
2238 expand_eh_return (void)
2240 rtx_code_label
*around_label
;
2242 if (! crtl
->eh
.ehr_label
)
2245 crtl
->calls_eh_return
= 1;
2247 #ifdef EH_RETURN_STACKADJ_RTX
2248 emit_move_insn (EH_RETURN_STACKADJ_RTX
, const0_rtx
);
2251 around_label
= gen_label_rtx ();
2252 emit_jump (around_label
);
2254 emit_label (crtl
->eh
.ehr_label
);
2255 clobber_return_register ();
2257 #ifdef EH_RETURN_STACKADJ_RTX
2258 emit_move_insn (EH_RETURN_STACKADJ_RTX
, crtl
->eh
.ehr_stackadj
);
2261 if (targetm
.have_eh_return ())
2262 emit_insn (targetm
.gen_eh_return (crtl
->eh
.ehr_handler
));
2265 #ifdef EH_RETURN_HANDLER_RTX
2266 emit_move_insn (EH_RETURN_HANDLER_RTX
, crtl
->eh
.ehr_handler
);
2268 error ("__builtin_eh_return not supported on this target");
2272 emit_label (around_label
);
2275 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2276 POINTERS_EXTEND_UNSIGNED and return it. */
2279 expand_builtin_extend_pointer (tree addr_tree
)
2281 rtx addr
= expand_expr (addr_tree
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
2284 #ifdef POINTERS_EXTEND_UNSIGNED
2285 extend
= POINTERS_EXTEND_UNSIGNED
;
2287 /* The previous EH code did an unsigned extend by default, so we do this also
2292 return convert_modes (targetm
.unwind_word_mode (), ptr_mode
, addr
, extend
);
2296 add_action_record (action_hash_type
*ar_hash
, int filter
, int next
)
2298 struct action_record
**slot
, *new_ar
, tmp
;
2300 tmp
.filter
= filter
;
2302 slot
= ar_hash
->find_slot (&tmp
, INSERT
);
2304 if ((new_ar
= *slot
) == NULL
)
2306 new_ar
= XNEW (struct action_record
);
2307 new_ar
->offset
= crtl
->eh
.action_record_data
->length () + 1;
2308 new_ar
->filter
= filter
;
2309 new_ar
->next
= next
;
2312 /* The filter value goes in untouched. The link to the next
2313 record is a "self-relative" byte offset, or zero to indicate
2314 that there is no next record. So convert the absolute 1 based
2315 indices we've been carrying around into a displacement. */
2317 push_sleb128 (&crtl
->eh
.action_record_data
, filter
);
2319 next
-= crtl
->eh
.action_record_data
->length () + 1;
2320 push_sleb128 (&crtl
->eh
.action_record_data
, next
);
2323 return new_ar
->offset
;
2327 collect_one_action_chain (action_hash_type
*ar_hash
, eh_region region
)
2331 /* If we've reached the top of the region chain, then we have
2332 no actions, and require no landing pad. */
2336 switch (region
->type
)
2341 /* A cleanup adds a zero filter to the beginning of the chain, but
2342 there are special cases to look out for. If there are *only*
2343 cleanups along a path, then it compresses to a zero action.
2344 Further, if there are multiple cleanups along a path, we only
2345 need to represent one of them, as that is enough to trigger
2346 entry to the landing pad at runtime. */
2347 next
= collect_one_action_chain (ar_hash
, region
->outer
);
2350 for (r
= region
->outer
; r
; r
= r
->outer
)
2351 if (r
->type
== ERT_CLEANUP
)
2353 return add_action_record (ar_hash
, 0, next
);
2360 /* Process the associated catch regions in reverse order.
2361 If there's a catch-all handler, then we don't need to
2362 search outer regions. Use a magic -3 value to record
2363 that we haven't done the outer search. */
2365 for (c
= region
->u
.eh_try
.last_catch
; c
; c
= c
->prev_catch
)
2367 if (c
->type_list
== NULL
)
2369 /* Retrieve the filter from the head of the filter list
2370 where we have stored it (see assign_filter_values). */
2371 int filter
= TREE_INT_CST_LOW (TREE_VALUE (c
->filter_list
));
2372 next
= add_action_record (ar_hash
, filter
, 0);
2376 /* Once the outer search is done, trigger an action record for
2377 each filter we have. */
2382 next
= collect_one_action_chain (ar_hash
, region
->outer
);
2384 /* If there is no next action, terminate the chain. */
2387 /* If all outer actions are cleanups or must_not_throw,
2388 we'll have no action record for it, since we had wanted
2389 to encode these states in the call-site record directly.
2390 Add a cleanup action to the chain to catch these. */
2392 next
= add_action_record (ar_hash
, 0, 0);
2395 flt_node
= c
->filter_list
;
2396 for (; flt_node
; flt_node
= TREE_CHAIN (flt_node
))
2398 int filter
= TREE_INT_CST_LOW (TREE_VALUE (flt_node
));
2399 next
= add_action_record (ar_hash
, filter
, next
);
2406 case ERT_ALLOWED_EXCEPTIONS
:
2407 /* An exception specification adds its filter to the
2408 beginning of the chain. */
2409 next
= collect_one_action_chain (ar_hash
, region
->outer
);
2411 /* If there is no next action, terminate the chain. */
2414 /* If all outer actions are cleanups or must_not_throw,
2415 we'll have no action record for it, since we had wanted
2416 to encode these states in the call-site record directly.
2417 Add a cleanup action to the chain to catch these. */
2419 next
= add_action_record (ar_hash
, 0, 0);
2421 return add_action_record (ar_hash
, region
->u
.allowed
.filter
, next
);
2423 case ERT_MUST_NOT_THROW
:
2424 /* A must-not-throw region with no inner handlers or cleanups
2425 requires no call-site entry. Note that this differs from
2426 the no handler or cleanup case in that we do require an lsda
2427 to be generated. Return a magic -2 value to record this. */
2435 add_call_site (rtx landing_pad
, int action
, int section
)
2437 call_site_record record
;
2439 record
= ggc_alloc
<call_site_record_d
> ();
2440 record
->landing_pad
= landing_pad
;
2441 record
->action
= action
;
2443 vec_safe_push (crtl
->eh
.call_site_record_v
[section
], record
);
2445 return call_site_base
+ crtl
->eh
.call_site_record_v
[section
]->length () - 1;
2449 emit_note_eh_region_end (rtx_insn
*insn
)
2451 rtx_insn
*next
= NEXT_INSN (insn
);
2453 /* Make sure we do not split a call and its corresponding
2454 CALL_ARG_LOCATION note. */
2455 if (next
&& NOTE_P (next
)
2456 && NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
2459 return emit_note_after (NOTE_INSN_EH_REGION_END
, insn
);
2462 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2463 The new note numbers will not refer to region numbers, but
2464 instead to call site entries. */
2467 convert_to_eh_region_ranges (void)
2472 action_hash_type
ar_hash (31);
2473 int last_action
= -3;
2474 rtx_insn
*last_action_insn
= NULL
;
2475 rtx last_landing_pad
= NULL_RTX
;
2476 rtx_insn
*first_no_action_insn
= NULL
;
2479 rtx_insn
*section_switch_note
= NULL
;
2480 rtx_insn
*first_no_action_insn_before_switch
= NULL
;
2481 rtx_insn
*last_no_action_insn_before_switch
= NULL
;
2482 int saved_call_site_base
= call_site_base
;
2484 vec_alloc (crtl
->eh
.action_record_data
, 64);
2486 for (iter
= get_insns (); iter
; iter
= NEXT_INSN (iter
))
2493 rtx_code_label
*this_landing_pad
;
2496 if (NONJUMP_INSN_P (insn
)
2497 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2498 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2500 nothrow
= get_eh_region_and_lp_from_rtx (insn
, ®ion
, &lp
);
2504 this_action
= collect_one_action_chain (&ar_hash
, region
);
2508 /* Existence of catch handlers, or must-not-throw regions
2509 implies that an lsda is needed (even if empty). */
2510 if (this_action
!= -1)
2511 crtl
->uses_eh_lsda
= 1;
2513 /* Delay creation of region notes for no-action regions
2514 until we're sure that an lsda will be required. */
2515 else if (last_action
== -3)
2517 first_no_action_insn
= iter
;
2521 if (this_action
>= 0)
2522 this_landing_pad
= lp
->landing_pad
;
2524 this_landing_pad
= NULL
;
2526 /* Differing actions or landing pads implies a change in call-site
2527 info, which implies some EH_REGION note should be emitted. */
2528 if (last_action
!= this_action
2529 || last_landing_pad
!= this_landing_pad
)
2531 /* If there is a queued no-action region in the other section
2532 with hot/cold partitioning, emit it now. */
2533 if (first_no_action_insn_before_switch
)
2535 gcc_assert (this_action
!= -1
2536 && last_action
== (first_no_action_insn
2538 call_site
= add_call_site (NULL_RTX
, 0, 0);
2539 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
2540 first_no_action_insn_before_switch
);
2541 NOTE_EH_HANDLER (note
) = call_site
;
2543 = emit_note_eh_region_end (last_no_action_insn_before_switch
);
2544 NOTE_EH_HANDLER (note
) = call_site
;
2545 gcc_assert (last_action
!= -3
2546 || (last_action_insn
2547 == last_no_action_insn_before_switch
));
2548 first_no_action_insn_before_switch
= NULL
;
2549 last_no_action_insn_before_switch
= NULL
;
2552 /* If we'd not seen a previous action (-3) or the previous
2553 action was must-not-throw (-2), then we do not need an
2555 if (last_action
>= -1)
2557 /* If we delayed the creation of the begin, do it now. */
2558 if (first_no_action_insn
)
2560 call_site
= add_call_site (NULL_RTX
, 0, cur_sec
);
2561 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
,
2562 first_no_action_insn
);
2563 NOTE_EH_HANDLER (note
) = call_site
;
2564 first_no_action_insn
= NULL
;
2567 note
= emit_note_eh_region_end (last_action_insn
);
2568 NOTE_EH_HANDLER (note
) = call_site
;
2571 /* If the new action is must-not-throw, then no region notes
2573 if (this_action
>= -1)
2575 call_site
= add_call_site (this_landing_pad
,
2576 this_action
< 0 ? 0 : this_action
,
2578 note
= emit_note_before (NOTE_INSN_EH_REGION_BEG
, iter
);
2579 NOTE_EH_HANDLER (note
) = call_site
;
2582 last_action
= this_action
;
2583 last_landing_pad
= this_landing_pad
;
2585 last_action_insn
= iter
;
2587 else if (NOTE_P (iter
)
2588 && NOTE_KIND (iter
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
2590 gcc_assert (section_switch_note
== NULL_RTX
);
2591 gcc_assert (flag_reorder_blocks_and_partition
);
2592 section_switch_note
= iter
;
2593 if (first_no_action_insn
)
2595 first_no_action_insn_before_switch
= first_no_action_insn
;
2596 last_no_action_insn_before_switch
= last_action_insn
;
2597 first_no_action_insn
= NULL
;
2598 gcc_assert (last_action
== -1);
2601 /* Force closing of current EH region before section switch and
2602 opening a new one afterwards. */
2603 else if (last_action
!= -3)
2604 last_landing_pad
= pc_rtx
;
2605 if (crtl
->eh
.call_site_record_v
[cur_sec
])
2606 call_site_base
+= crtl
->eh
.call_site_record_v
[cur_sec
]->length ();
2608 gcc_assert (crtl
->eh
.call_site_record_v
[cur_sec
] == NULL
);
2609 vec_alloc (crtl
->eh
.call_site_record_v
[cur_sec
], 10);
2612 if (last_action
>= -1 && ! first_no_action_insn
)
2614 note
= emit_note_eh_region_end (last_action_insn
);
2615 NOTE_EH_HANDLER (note
) = call_site
;
2618 call_site_base
= saved_call_site_base
;
2625 const pass_data pass_data_convert_to_eh_region_ranges
=
2627 RTL_PASS
, /* type */
2628 "eh_ranges", /* name */
2629 OPTGROUP_NONE
, /* optinfo_flags */
2630 TV_NONE
, /* tv_id */
2631 0, /* properties_required */
2632 0, /* properties_provided */
2633 0, /* properties_destroyed */
2634 0, /* todo_flags_start */
2635 0, /* todo_flags_finish */
2638 class pass_convert_to_eh_region_ranges
: public rtl_opt_pass
2641 pass_convert_to_eh_region_ranges (gcc::context
*ctxt
)
2642 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges
, ctxt
)
2645 /* opt_pass methods: */
2646 virtual bool gate (function
*);
2647 virtual unsigned int execute (function
*)
2649 return convert_to_eh_region_ranges ();
2652 }; // class pass_convert_to_eh_region_ranges
2655 pass_convert_to_eh_region_ranges::gate (function
*)
2657 /* Nothing to do for SJLJ exceptions or if no regions created. */
2658 if (cfun
->eh
->region_tree
== NULL
)
2660 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
2668 make_pass_convert_to_eh_region_ranges (gcc::context
*ctxt
)
2670 return new pass_convert_to_eh_region_ranges (ctxt
);
2674 push_uleb128 (vec
<uchar
, va_gc
> **data_area
, unsigned int value
)
2678 unsigned char byte
= value
& 0x7f;
2682 vec_safe_push (*data_area
, byte
);
2688 push_sleb128 (vec
<uchar
, va_gc
> **data_area
, int value
)
2695 byte
= value
& 0x7f;
2697 more
= ! ((value
== 0 && (byte
& 0x40) == 0)
2698 || (value
== -1 && (byte
& 0x40) != 0));
2701 vec_safe_push (*data_area
, byte
);
2707 #ifndef HAVE_AS_LEB128
2709 dw2_size_of_call_site_table (int section
)
2711 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[section
]);
2712 int size
= n
* (4 + 4 + 4);
2715 for (i
= 0; i
< n
; ++i
)
2717 struct call_site_record_d
*cs
=
2718 (*crtl
->eh
.call_site_record_v
[section
])[i
];
2719 size
+= size_of_uleb128 (cs
->action
);
2726 sjlj_size_of_call_site_table (void)
2728 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[0]);
2732 for (i
= 0; i
< n
; ++i
)
2734 struct call_site_record_d
*cs
=
2735 (*crtl
->eh
.call_site_record_v
[0])[i
];
2736 size
+= size_of_uleb128 (INTVAL (cs
->landing_pad
));
2737 size
+= size_of_uleb128 (cs
->action
);
2745 dw2_output_call_site_table (int cs_format
, int section
)
2747 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[section
]);
2752 begin
= current_function_func_begin_label
;
2753 else if (first_function_block_is_cold
)
2754 begin
= crtl
->subsections
.hot_section_label
;
2756 begin
= crtl
->subsections
.cold_section_label
;
2758 for (i
= 0; i
< n
; ++i
)
2760 struct call_site_record_d
*cs
= (*crtl
->eh
.call_site_record_v
[section
])[i
];
2761 char reg_start_lab
[32];
2762 char reg_end_lab
[32];
2763 char landing_pad_lab
[32];
2765 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab
, "LEHB", call_site_base
+ i
);
2766 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab
, "LEHE", call_site_base
+ i
);
2768 if (cs
->landing_pad
)
2769 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab
, "L",
2770 CODE_LABEL_NUMBER (cs
->landing_pad
));
2772 /* ??? Perhaps use insn length scaling if the assembler supports
2773 generic arithmetic. */
2774 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2775 data4 if the function is small enough. */
2776 if (cs_format
== DW_EH_PE_uleb128
)
2778 dw2_asm_output_delta_uleb128 (reg_start_lab
, begin
,
2779 "region %d start", i
);
2780 dw2_asm_output_delta_uleb128 (reg_end_lab
, reg_start_lab
,
2782 if (cs
->landing_pad
)
2783 dw2_asm_output_delta_uleb128 (landing_pad_lab
, begin
,
2786 dw2_asm_output_data_uleb128 (0, "landing pad");
2790 dw2_asm_output_delta (4, reg_start_lab
, begin
,
2791 "region %d start", i
);
2792 dw2_asm_output_delta (4, reg_end_lab
, reg_start_lab
, "length");
2793 if (cs
->landing_pad
)
2794 dw2_asm_output_delta (4, landing_pad_lab
, begin
,
2797 dw2_asm_output_data (4, 0, "landing pad");
2799 dw2_asm_output_data_uleb128 (cs
->action
, "action");
2802 call_site_base
+= n
;
2806 sjlj_output_call_site_table (void)
2808 int n
= vec_safe_length (crtl
->eh
.call_site_record_v
[0]);
2811 for (i
= 0; i
< n
; ++i
)
2813 struct call_site_record_d
*cs
= (*crtl
->eh
.call_site_record_v
[0])[i
];
2815 dw2_asm_output_data_uleb128 (INTVAL (cs
->landing_pad
),
2816 "region %d landing pad", i
);
2817 dw2_asm_output_data_uleb128 (cs
->action
, "action");
2820 call_site_base
+= n
;
2823 /* Switch to the section that should be used for exception tables. */
2826 switch_to_exception_section (const char * ARG_UNUSED (fnname
))
2830 if (exception_section
)
2831 s
= exception_section
;
2836 if (EH_TABLES_CAN_BE_READ_ONLY
)
2839 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2840 flags
= ((! flag_pic
2841 || ((tt_format
& 0x70) != DW_EH_PE_absptr
2842 && (tt_format
& 0x70) != DW_EH_PE_aligned
))
2843 ? 0 : SECTION_WRITE
);
2846 flags
= SECTION_WRITE
;
2848 /* Compute the section and cache it into exception_section,
2849 unless it depends on the function name. */
2850 if (targetm_common
.have_named_sections
)
2852 #ifdef HAVE_LD_EH_GC_SECTIONS
2853 if (flag_function_sections
2854 || (DECL_COMDAT_GROUP (current_function_decl
) && HAVE_COMDAT_GROUP
))
2856 char *section_name
= XNEWVEC (char, strlen (fnname
) + 32);
2857 /* The EH table must match the code section, so only mark
2858 it linkonce if we have COMDAT groups to tie them together. */
2859 if (DECL_COMDAT_GROUP (current_function_decl
) && HAVE_COMDAT_GROUP
)
2860 flags
|= SECTION_LINKONCE
;
2861 sprintf (section_name
, ".gcc_except_table.%s", fnname
);
2862 s
= get_section (section_name
, flags
, current_function_decl
);
2863 free (section_name
);
2868 = s
= get_section (".gcc_except_table", flags
, NULL
);
2872 = s
= flags
== SECTION_WRITE
? data_section
: readonly_data_section
;
2875 switch_to_section (s
);
2879 /* Output a reference from an exception table to the type_info object TYPE.
2880 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2884 output_ttype (tree type
, int tt_format
, int tt_format_size
)
2887 bool is_public
= true;
2889 if (type
== NULL_TREE
)
2893 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2894 runtime types so TYPE should already be a runtime type
2895 reference. When pass_ipa_free_lang data is made a default
2896 pass, we can then remove the call to lookup_type_for_runtime
2899 type
= lookup_type_for_runtime (type
);
2901 value
= expand_expr (type
, NULL_RTX
, VOIDmode
, EXPAND_INITIALIZER
);
2903 /* Let cgraph know that the rtti decl is used. Not all of the
2904 paths below go through assemble_integer, which would take
2905 care of this for us. */
2907 if (TREE_CODE (type
) == ADDR_EXPR
)
2909 type
= TREE_OPERAND (type
, 0);
2910 if (TREE_CODE (type
) == VAR_DECL
)
2911 is_public
= TREE_PUBLIC (type
);
2914 gcc_assert (TREE_CODE (type
) == INTEGER_CST
);
2917 /* Allow the target to override the type table entry format. */
2918 if (targetm
.asm_out
.ttype (value
))
2921 if (tt_format
== DW_EH_PE_absptr
|| tt_format
== DW_EH_PE_aligned
)
2922 assemble_integer (value
, tt_format_size
,
2923 tt_format_size
* BITS_PER_UNIT
, 1);
2925 dw2_asm_output_encoded_addr_rtx (tt_format
, value
, is_public
, NULL
);
2929 output_one_function_exception_table (int section
)
2931 int tt_format
, cs_format
, lp_format
, i
;
2932 #ifdef HAVE_AS_LEB128
2933 char ttype_label
[32];
2934 char cs_after_size_label
[32];
2935 char cs_end_label
[32];
2940 int tt_format_size
= 0;
2942 have_tt_data
= (vec_safe_length (cfun
->eh
->ttype_data
)
2943 || (targetm
.arm_eabi_unwinder
2944 ? vec_safe_length (cfun
->eh
->ehspec_data
.arm_eabi
)
2945 : vec_safe_length (cfun
->eh
->ehspec_data
.other
)));
2947 /* Indicate the format of the @TType entries. */
2949 tt_format
= DW_EH_PE_omit
;
2952 tt_format
= ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2953 #ifdef HAVE_AS_LEB128
2954 ASM_GENERATE_INTERNAL_LABEL (ttype_label
,
2955 section
? "LLSDATTC" : "LLSDATT",
2956 current_function_funcdef_no
);
2958 tt_format_size
= size_of_encoded_value (tt_format
);
2960 assemble_align (tt_format_size
* BITS_PER_UNIT
);
2963 targetm
.asm_out
.internal_label (asm_out_file
, section
? "LLSDAC" : "LLSDA",
2964 current_function_funcdef_no
);
2966 /* The LSDA header. */
2968 /* Indicate the format of the landing pad start pointer. An omitted
2969 field implies @LPStart == @Start. */
2970 /* Currently we always put @LPStart == @Start. This field would
2971 be most useful in moving the landing pads completely out of
2972 line to another section, but it could also be used to minimize
2973 the size of uleb128 landing pad offsets. */
2974 lp_format
= DW_EH_PE_omit
;
2975 dw2_asm_output_data (1, lp_format
, "@LPStart format (%s)",
2976 eh_data_format_name (lp_format
));
2978 /* @LPStart pointer would go here. */
2980 dw2_asm_output_data (1, tt_format
, "@TType format (%s)",
2981 eh_data_format_name (tt_format
));
2983 #ifndef HAVE_AS_LEB128
2984 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
2985 call_site_len
= sjlj_size_of_call_site_table ();
2987 call_site_len
= dw2_size_of_call_site_table (section
);
2990 /* A pc-relative 4-byte displacement to the @TType data. */
2993 #ifdef HAVE_AS_LEB128
2994 char ttype_after_disp_label
[32];
2995 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label
,
2996 section
? "LLSDATTDC" : "LLSDATTD",
2997 current_function_funcdef_no
);
2998 dw2_asm_output_delta_uleb128 (ttype_label
, ttype_after_disp_label
,
2999 "@TType base offset");
3000 ASM_OUTPUT_LABEL (asm_out_file
, ttype_after_disp_label
);
3002 /* Ug. Alignment queers things. */
3003 unsigned int before_disp
, after_disp
, last_disp
, disp
;
3005 before_disp
= 1 + 1;
3006 after_disp
= (1 + size_of_uleb128 (call_site_len
)
3008 + vec_safe_length (crtl
->eh
.action_record_data
)
3009 + (vec_safe_length (cfun
->eh
->ttype_data
)
3015 unsigned int disp_size
, pad
;
3018 disp_size
= size_of_uleb128 (disp
);
3019 pad
= before_disp
+ disp_size
+ after_disp
;
3020 if (pad
% tt_format_size
)
3021 pad
= tt_format_size
- (pad
% tt_format_size
);
3024 disp
= after_disp
+ pad
;
3026 while (disp
!= last_disp
);
3028 dw2_asm_output_data_uleb128 (disp
, "@TType base offset");
3032 /* Indicate the format of the call-site offsets. */
3033 #ifdef HAVE_AS_LEB128
3034 cs_format
= DW_EH_PE_uleb128
;
3036 cs_format
= DW_EH_PE_udata4
;
3038 dw2_asm_output_data (1, cs_format
, "call-site format (%s)",
3039 eh_data_format_name (cs_format
));
3041 #ifdef HAVE_AS_LEB128
3042 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label
,
3043 section
? "LLSDACSBC" : "LLSDACSB",
3044 current_function_funcdef_no
);
3045 ASM_GENERATE_INTERNAL_LABEL (cs_end_label
,
3046 section
? "LLSDACSEC" : "LLSDACSE",
3047 current_function_funcdef_no
);
3048 dw2_asm_output_delta_uleb128 (cs_end_label
, cs_after_size_label
,
3049 "Call-site table length");
3050 ASM_OUTPUT_LABEL (asm_out_file
, cs_after_size_label
);
3051 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
3052 sjlj_output_call_site_table ();
3054 dw2_output_call_site_table (cs_format
, section
);
3055 ASM_OUTPUT_LABEL (asm_out_file
, cs_end_label
);
3057 dw2_asm_output_data_uleb128 (call_site_len
, "Call-site table length");
3058 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
3059 sjlj_output_call_site_table ();
3061 dw2_output_call_site_table (cs_format
, section
);
3064 /* ??? Decode and interpret the data for flag_debug_asm. */
3067 FOR_EACH_VEC_ELT (*crtl
->eh
.action_record_data
, i
, uc
)
3068 dw2_asm_output_data (1, uc
, i
? NULL
: "Action record table");
3072 assemble_align (tt_format_size
* BITS_PER_UNIT
);
3074 i
= vec_safe_length (cfun
->eh
->ttype_data
);
3077 tree type
= (*cfun
->eh
->ttype_data
)[i
];
3078 output_ttype (type
, tt_format
, tt_format_size
);
3081 #ifdef HAVE_AS_LEB128
3083 ASM_OUTPUT_LABEL (asm_out_file
, ttype_label
);
3086 /* ??? Decode and interpret the data for flag_debug_asm. */
3087 if (targetm
.arm_eabi_unwinder
)
3091 vec_safe_iterate (cfun
->eh
->ehspec_data
.arm_eabi
, i
, &type
); ++i
)
3092 output_ttype (type
, tt_format
, tt_format_size
);
3098 vec_safe_iterate (cfun
->eh
->ehspec_data
.other
, i
, &uc
); ++i
)
3099 dw2_asm_output_data (1, uc
,
3100 i
? NULL
: "Exception specification table");
3105 output_function_exception_table (const char *fnname
)
3107 rtx personality
= get_personality_function (current_function_decl
);
3109 /* Not all functions need anything. */
3110 if (! crtl
->uses_eh_lsda
)
3115 assemble_external_libcall (personality
);
3117 if (targetm
.asm_out
.emit_except_personality
)
3118 targetm
.asm_out
.emit_except_personality (personality
);
3121 switch_to_exception_section (fnname
);
3123 /* If the target wants a label to begin the table, emit it here. */
3124 targetm
.asm_out
.emit_except_table_label (asm_out_file
);
3126 output_one_function_exception_table (0);
3127 if (crtl
->eh
.call_site_record_v
[1])
3128 output_one_function_exception_table (1);
3130 switch_to_section (current_function_section ());
3134 set_eh_throw_stmt_table (function
*fun
, hash_map
<gimple
*, int> *table
)
3136 fun
->eh
->throw_stmt_table
= table
;
3139 hash_map
<gimple
*, int> *
3140 get_eh_throw_stmt_table (struct function
*fun
)
3142 return fun
->eh
->throw_stmt_table
;
3145 /* Determine if the function needs an EH personality function. */
3147 enum eh_personality_kind
3148 function_needs_eh_personality (struct function
*fn
)
3150 enum eh_personality_kind kind
= eh_personality_none
;
3153 FOR_ALL_EH_REGION_FN (i
, fn
)
3158 /* Can do with any personality including the generic C one. */
3159 kind
= eh_personality_any
;
3163 case ERT_ALLOWED_EXCEPTIONS
:
3164 /* Always needs a EH personality function. The generic C
3165 personality doesn't handle these even for empty type lists. */
3166 return eh_personality_lang
;
3168 case ERT_MUST_NOT_THROW
:
3169 /* Always needs a EH personality function. The language may specify
3170 what abort routine that must be used, e.g. std::terminate. */
3171 return eh_personality_lang
;
3178 /* Dump EH information to OUT. */
3181 dump_eh_tree (FILE * out
, struct function
*fun
)
3185 static const char *const type_name
[] = {
3186 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3189 i
= fun
->eh
->region_tree
;
3193 fprintf (out
, "Eh tree:\n");
3196 fprintf (out
, " %*s %i %s", depth
* 2, "",
3197 i
->index
, type_name
[(int) i
->type
]);
3199 if (i
->landing_pads
)
3203 fprintf (out
, " land:");
3204 if (current_ir_type () == IR_GIMPLE
)
3206 for (lp
= i
->landing_pads
; lp
; lp
= lp
->next_lp
)
3208 fprintf (out
, "{%i,", lp
->index
);
3209 print_generic_expr (out
, lp
->post_landing_pad
, 0);
3217 for (lp
= i
->landing_pads
; lp
; lp
= lp
->next_lp
)
3219 fprintf (out
, "{%i,", lp
->index
);
3220 if (lp
->landing_pad
)
3221 fprintf (out
, "%i%s,", INSN_UID (lp
->landing_pad
),
3222 NOTE_P (lp
->landing_pad
) ? "(del)" : "");
3224 fprintf (out
, "(nil),");
3225 if (lp
->post_landing_pad
)
3227 rtx_insn
*lab
= label_rtx (lp
->post_landing_pad
);
3228 fprintf (out
, "%i%s}", INSN_UID (lab
),
3229 NOTE_P (lab
) ? "(del)" : "");
3232 fprintf (out
, "(nil)}");
3242 case ERT_MUST_NOT_THROW
:
3248 fprintf (out
, " catch:");
3249 for (c
= i
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
3254 fprintf (out
, "lab:");
3255 print_generic_expr (out
, c
->label
, 0);
3258 print_generic_expr (out
, c
->type_list
, 0);
3266 case ERT_ALLOWED_EXCEPTIONS
:
3267 fprintf (out
, " filter :%i types:", i
->u
.allowed
.filter
);
3268 print_generic_expr (out
, i
->u
.allowed
.type_list
, 0);
3273 /* If there are sub-regions, process them. */
3275 i
= i
->inner
, depth
++;
3276 /* If there are peers, process them. */
3277 else if (i
->next_peer
)
3279 /* Otherwise, step back up the tree to the next peer. */
3289 while (i
->next_peer
== NULL
);
3295 /* Dump the EH tree for FN on stderr. */
3298 debug_eh_tree (struct function
*fn
)
3300 dump_eh_tree (stderr
, fn
);
3303 /* Verify invariants on EH datastructures. */
3306 verify_eh_tree (struct function
*fun
)
3309 int nvisited_lp
, nvisited_r
;
3310 int count_lp
, count_r
, depth
, i
;
3314 if (!fun
->eh
->region_tree
)
3318 for (i
= 1; vec_safe_iterate (fun
->eh
->region_array
, i
, &r
); ++i
)
3325 error ("region_array is corrupted for region %i", r
->index
);
3331 for (i
= 1; vec_safe_iterate (fun
->eh
->lp_array
, i
, &lp
); ++i
)
3338 error ("lp_array is corrupted for lp %i", lp
->index
);
3343 depth
= nvisited_lp
= nvisited_r
= 0;
3345 r
= fun
->eh
->region_tree
;
3348 if ((*fun
->eh
->region_array
)[r
->index
] != r
)
3350 error ("region_array is corrupted for region %i", r
->index
);
3353 if (r
->outer
!= outer
)
3355 error ("outer block of region %i is wrong", r
->index
);
3360 error ("negative nesting depth of region %i", r
->index
);
3365 for (lp
= r
->landing_pads
; lp
; lp
= lp
->next_lp
)
3367 if ((*fun
->eh
->lp_array
)[lp
->index
] != lp
)
3369 error ("lp_array is corrupted for lp %i", lp
->index
);
3372 if (lp
->region
!= r
)
3374 error ("region of lp %i is wrong", lp
->index
);
3381 outer
= r
, r
= r
->inner
, depth
++;
3382 else if (r
->next_peer
)
3394 while (r
->next_peer
== NULL
);
3401 error ("tree list ends on depth %i", depth
);
3404 if (count_r
!= nvisited_r
)
3406 error ("region_array does not match region_tree");
3409 if (count_lp
!= nvisited_lp
)
3411 error ("lp_array does not match region_tree");
3417 dump_eh_tree (stderr
, fun
);
3418 internal_error ("verify_eh_tree failed");
3422 #include "gt-except.h"