]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/except.c
[PATCH 7/9] ENABLE_CHECKING refactoring: middle-end, LTO FE
[thirdparty/gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "backend.h"
116 #include "cfghooks.h"
117 #include "rtl.h"
118 #include "alias.h"
119 #include "tree.h"
120 #include "fold-const.h"
121 #include "stringpool.h"
122 #include "stor-layout.h"
123 #include "flags.h"
124 #include "insn-codes.h"
125 #include "optabs.h"
126 #include "insn-config.h"
127 #include "expmed.h"
128 #include "dojump.h"
129 #include "explow.h"
130 #include "calls.h"
131 #include "emit-rtl.h"
132 #include "varasm.h"
133 #include "stmt.h"
134 #include "expr.h"
135 #include "libfuncs.h"
136 #include "except.h"
137 #include "output.h"
138 #include "dwarf2asm.h"
139 #include "dwarf2out.h"
140 #include "dwarf2.h"
141 #include "toplev.h"
142 #include "intl.h"
143 #include "tm_p.h"
144 #include "target.h"
145 #include "common/common-target.h"
146 #include "langhooks.h"
147 #include "cfgrtl.h"
148 #include "cgraph.h"
149 #include "diagnostic.h"
150 #include "tree-pretty-print.h"
151 #include "tree-pass.h"
152 #include "cfgloop.h"
153 #include "builtins.h"
154 #include "tree-hash-traits.h"
155
156 static GTY(()) int call_site_base;
157
158 static GTY (()) hash_map<tree_hash, tree> *type_to_runtime_map;
159
160 /* Describe the SjLj_Function_Context structure. */
161 static GTY(()) tree sjlj_fc_type_node;
162 static int sjlj_fc_call_site_ofs;
163 static int sjlj_fc_data_ofs;
164 static int sjlj_fc_personality_ofs;
165 static int sjlj_fc_lsda_ofs;
166 static int sjlj_fc_jbuf_ofs;
167 \f
168
169 struct GTY(()) call_site_record_d
170 {
171 rtx landing_pad;
172 int action;
173 };
174
175 /* In the following structure and associated functions,
176 we represent entries in the action table as 1-based indices.
177 Special cases are:
178
179 0: null action record, non-null landing pad; implies cleanups
180 -1: null action record, null landing pad; implies no action
181 -2: no call-site entry; implies must_not_throw
182 -3: we have yet to process outer regions
183
184 Further, no special cases apply to the "next" field of the record.
185 For next, 0 means end of list. */
186
187 struct action_record
188 {
189 int offset;
190 int filter;
191 int next;
192 };
193
194 /* Hashtable helpers. */
195
196 struct action_record_hasher : free_ptr_hash <action_record>
197 {
198 static inline hashval_t hash (const action_record *);
199 static inline bool equal (const action_record *, const action_record *);
200 };
201
202 inline hashval_t
203 action_record_hasher::hash (const action_record *entry)
204 {
205 return entry->next * 1009 + entry->filter;
206 }
207
208 inline bool
209 action_record_hasher::equal (const action_record *entry,
210 const action_record *data)
211 {
212 return entry->filter == data->filter && entry->next == data->next;
213 }
214
215 typedef hash_table<action_record_hasher> action_hash_type;
216 \f
217 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
218 eh_landing_pad *);
219
220 static void dw2_build_landing_pads (void);
221
222 static int collect_one_action_chain (action_hash_type *, eh_region);
223 static int add_call_site (rtx, int, int);
224
225 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
226 static void push_sleb128 (vec<uchar, va_gc> **, int);
227 #ifndef HAVE_AS_LEB128
228 static int dw2_size_of_call_site_table (int);
229 static int sjlj_size_of_call_site_table (void);
230 #endif
231 static void dw2_output_call_site_table (int, int);
232 static void sjlj_output_call_site_table (void);
233
234 \f
235 void
236 init_eh (void)
237 {
238 if (! flag_exceptions)
239 return;
240
241 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
242
243 /* Create the SjLj_Function_Context structure. This should match
244 the definition in unwind-sjlj.c. */
245 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
246 {
247 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
248
249 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
250
251 f_prev = build_decl (BUILTINS_LOCATION,
252 FIELD_DECL, get_identifier ("__prev"),
253 build_pointer_type (sjlj_fc_type_node));
254 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
255
256 f_cs = build_decl (BUILTINS_LOCATION,
257 FIELD_DECL, get_identifier ("__call_site"),
258 integer_type_node);
259 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
260
261 tmp = build_index_type (size_int (4 - 1));
262 tmp = build_array_type (lang_hooks.types.type_for_mode
263 (targetm.unwind_word_mode (), 1),
264 tmp);
265 f_data = build_decl (BUILTINS_LOCATION,
266 FIELD_DECL, get_identifier ("__data"), tmp);
267 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
268
269 f_per = build_decl (BUILTINS_LOCATION,
270 FIELD_DECL, get_identifier ("__personality"),
271 ptr_type_node);
272 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
273
274 f_lsda = build_decl (BUILTINS_LOCATION,
275 FIELD_DECL, get_identifier ("__lsda"),
276 ptr_type_node);
277 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
278
279 #ifdef DONT_USE_BUILTIN_SETJMP
280 #ifdef JMP_BUF_SIZE
281 tmp = size_int (JMP_BUF_SIZE - 1);
282 #else
283 /* Should be large enough for most systems, if it is not,
284 JMP_BUF_SIZE should be defined with the proper value. It will
285 also tend to be larger than necessary for most systems, a more
286 optimal port will define JMP_BUF_SIZE. */
287 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
288 #endif
289 #else
290 /* Compute a minimally sized jump buffer. We need room to store at
291 least 3 pointers - stack pointer, frame pointer and return address.
292 Plus for some targets we need room for an extra pointer - in the
293 case of MIPS this is the global pointer. This makes a total of four
294 pointers, but to be safe we actually allocate room for 5.
295
296 If pointers are smaller than words then we allocate enough room for
297 5 words, just in case the backend needs this much room. For more
298 discussion on this issue see:
299 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
300 if (POINTER_SIZE > BITS_PER_WORD)
301 tmp = size_int (5 - 1);
302 else
303 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
304 #endif
305
306 tmp = build_index_type (tmp);
307 tmp = build_array_type (ptr_type_node, tmp);
308 f_jbuf = build_decl (BUILTINS_LOCATION,
309 FIELD_DECL, get_identifier ("__jbuf"), tmp);
310 #ifdef DONT_USE_BUILTIN_SETJMP
311 /* We don't know what the alignment requirements of the
312 runtime's jmp_buf has. Overestimate. */
313 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
314 DECL_USER_ALIGN (f_jbuf) = 1;
315 #endif
316 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
317
318 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
319 TREE_CHAIN (f_prev) = f_cs;
320 TREE_CHAIN (f_cs) = f_data;
321 TREE_CHAIN (f_data) = f_per;
322 TREE_CHAIN (f_per) = f_lsda;
323 TREE_CHAIN (f_lsda) = f_jbuf;
324
325 layout_type (sjlj_fc_type_node);
326
327 /* Cache the interesting field offsets so that we have
328 easy access from rtl. */
329 sjlj_fc_call_site_ofs
330 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
331 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
332 sjlj_fc_data_ofs
333 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
334 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
335 sjlj_fc_personality_ofs
336 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
337 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
338 sjlj_fc_lsda_ofs
339 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
340 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
341 sjlj_fc_jbuf_ofs
342 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
343 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
344 }
345 }
346
347 void
348 init_eh_for_function (void)
349 {
350 cfun->eh = ggc_cleared_alloc<eh_status> ();
351
352 /* Make sure zero'th entries are used. */
353 vec_safe_push (cfun->eh->region_array, (eh_region)0);
354 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
355 }
356 \f
357 /* Routines to generate the exception tree somewhat directly.
358 These are used from tree-eh.c when processing exception related
359 nodes during tree optimization. */
360
361 static eh_region
362 gen_eh_region (enum eh_region_type type, eh_region outer)
363 {
364 eh_region new_eh;
365
366 /* Insert a new blank region as a leaf in the tree. */
367 new_eh = ggc_cleared_alloc<eh_region_d> ();
368 new_eh->type = type;
369 new_eh->outer = outer;
370 if (outer)
371 {
372 new_eh->next_peer = outer->inner;
373 outer->inner = new_eh;
374 }
375 else
376 {
377 new_eh->next_peer = cfun->eh->region_tree;
378 cfun->eh->region_tree = new_eh;
379 }
380
381 new_eh->index = vec_safe_length (cfun->eh->region_array);
382 vec_safe_push (cfun->eh->region_array, new_eh);
383
384 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
385 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
386 new_eh->use_cxa_end_cleanup = true;
387
388 return new_eh;
389 }
390
391 eh_region
392 gen_eh_region_cleanup (eh_region outer)
393 {
394 return gen_eh_region (ERT_CLEANUP, outer);
395 }
396
397 eh_region
398 gen_eh_region_try (eh_region outer)
399 {
400 return gen_eh_region (ERT_TRY, outer);
401 }
402
403 eh_catch
404 gen_eh_region_catch (eh_region t, tree type_or_list)
405 {
406 eh_catch c, l;
407 tree type_list, type_node;
408
409 gcc_assert (t->type == ERT_TRY);
410
411 /* Ensure to always end up with a type list to normalize further
412 processing, then register each type against the runtime types map. */
413 type_list = type_or_list;
414 if (type_or_list)
415 {
416 if (TREE_CODE (type_or_list) != TREE_LIST)
417 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
418
419 type_node = type_list;
420 for (; type_node; type_node = TREE_CHAIN (type_node))
421 add_type_for_runtime (TREE_VALUE (type_node));
422 }
423
424 c = ggc_cleared_alloc<eh_catch_d> ();
425 c->type_list = type_list;
426 l = t->u.eh_try.last_catch;
427 c->prev_catch = l;
428 if (l)
429 l->next_catch = c;
430 else
431 t->u.eh_try.first_catch = c;
432 t->u.eh_try.last_catch = c;
433
434 return c;
435 }
436
437 eh_region
438 gen_eh_region_allowed (eh_region outer, tree allowed)
439 {
440 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
441 region->u.allowed.type_list = allowed;
442
443 for (; allowed ; allowed = TREE_CHAIN (allowed))
444 add_type_for_runtime (TREE_VALUE (allowed));
445
446 return region;
447 }
448
449 eh_region
450 gen_eh_region_must_not_throw (eh_region outer)
451 {
452 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
453 }
454
455 eh_landing_pad
456 gen_eh_landing_pad (eh_region region)
457 {
458 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
459
460 lp->next_lp = region->landing_pads;
461 lp->region = region;
462 lp->index = vec_safe_length (cfun->eh->lp_array);
463 region->landing_pads = lp;
464
465 vec_safe_push (cfun->eh->lp_array, lp);
466
467 return lp;
468 }
469
470 eh_region
471 get_eh_region_from_number_fn (struct function *ifun, int i)
472 {
473 return (*ifun->eh->region_array)[i];
474 }
475
476 eh_region
477 get_eh_region_from_number (int i)
478 {
479 return get_eh_region_from_number_fn (cfun, i);
480 }
481
482 eh_landing_pad
483 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
484 {
485 return (*ifun->eh->lp_array)[i];
486 }
487
488 eh_landing_pad
489 get_eh_landing_pad_from_number (int i)
490 {
491 return get_eh_landing_pad_from_number_fn (cfun, i);
492 }
493
494 eh_region
495 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
496 {
497 if (i < 0)
498 return (*ifun->eh->region_array)[-i];
499 else if (i == 0)
500 return NULL;
501 else
502 {
503 eh_landing_pad lp;
504 lp = (*ifun->eh->lp_array)[i];
505 return lp->region;
506 }
507 }
508
509 eh_region
510 get_eh_region_from_lp_number (int i)
511 {
512 return get_eh_region_from_lp_number_fn (cfun, i);
513 }
514 \f
515 /* Returns true if the current function has exception handling regions. */
516
517 bool
518 current_function_has_exception_handlers (void)
519 {
520 return cfun->eh->region_tree != NULL;
521 }
522 \f
523 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
524 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
525
526 struct duplicate_eh_regions_data
527 {
528 duplicate_eh_regions_map label_map;
529 void *label_map_data;
530 hash_map<void *, void *> *eh_map;
531 };
532
533 static void
534 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
535 eh_region old_r, eh_region outer)
536 {
537 eh_landing_pad old_lp, new_lp;
538 eh_region new_r;
539
540 new_r = gen_eh_region (old_r->type, outer);
541 gcc_assert (!data->eh_map->put (old_r, new_r));
542
543 switch (old_r->type)
544 {
545 case ERT_CLEANUP:
546 break;
547
548 case ERT_TRY:
549 {
550 eh_catch oc, nc;
551 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
552 {
553 /* We should be doing all our region duplication before and
554 during inlining, which is before filter lists are created. */
555 gcc_assert (oc->filter_list == NULL);
556 nc = gen_eh_region_catch (new_r, oc->type_list);
557 nc->label = data->label_map (oc->label, data->label_map_data);
558 }
559 }
560 break;
561
562 case ERT_ALLOWED_EXCEPTIONS:
563 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
564 if (old_r->u.allowed.label)
565 new_r->u.allowed.label
566 = data->label_map (old_r->u.allowed.label, data->label_map_data);
567 else
568 new_r->u.allowed.label = NULL_TREE;
569 break;
570
571 case ERT_MUST_NOT_THROW:
572 new_r->u.must_not_throw.failure_loc =
573 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
574 new_r->u.must_not_throw.failure_decl =
575 old_r->u.must_not_throw.failure_decl;
576 break;
577 }
578
579 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
580 {
581 /* Don't bother copying unused landing pads. */
582 if (old_lp->post_landing_pad == NULL)
583 continue;
584
585 new_lp = gen_eh_landing_pad (new_r);
586 gcc_assert (!data->eh_map->put (old_lp, new_lp));
587
588 new_lp->post_landing_pad
589 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
590 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
591 }
592
593 /* Make sure to preserve the original use of __cxa_end_cleanup. */
594 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
595
596 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
597 duplicate_eh_regions_1 (data, old_r, new_r);
598 }
599
600 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
601 the current function and root the tree below OUTER_REGION.
602 The special case of COPY_REGION of NULL means all regions.
603 Remap labels using MAP/MAP_DATA callback. Return a pointer map
604 that allows the caller to remap uses of both EH regions and
605 EH landing pads. */
606
607 hash_map<void *, void *> *
608 duplicate_eh_regions (struct function *ifun,
609 eh_region copy_region, int outer_lp,
610 duplicate_eh_regions_map map, void *map_data)
611 {
612 struct duplicate_eh_regions_data data;
613 eh_region outer_region;
614
615 if (flag_checking)
616 verify_eh_tree (ifun);
617
618 data.label_map = map;
619 data.label_map_data = map_data;
620 data.eh_map = new hash_map<void *, void *>;
621
622 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
623
624 /* Copy all the regions in the subtree. */
625 if (copy_region)
626 duplicate_eh_regions_1 (&data, copy_region, outer_region);
627 else
628 {
629 eh_region r;
630 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
631 duplicate_eh_regions_1 (&data, r, outer_region);
632 }
633
634 if (flag_checking)
635 verify_eh_tree (cfun);
636
637 return data.eh_map;
638 }
639
640 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
641
642 eh_region
643 eh_region_outermost (struct function *ifun, eh_region region_a,
644 eh_region region_b)
645 {
646 sbitmap b_outer;
647
648 gcc_assert (ifun->eh->region_array);
649 gcc_assert (ifun->eh->region_tree);
650
651 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
652 bitmap_clear (b_outer);
653
654 do
655 {
656 bitmap_set_bit (b_outer, region_b->index);
657 region_b = region_b->outer;
658 }
659 while (region_b);
660
661 do
662 {
663 if (bitmap_bit_p (b_outer, region_a->index))
664 break;
665 region_a = region_a->outer;
666 }
667 while (region_a);
668
669 sbitmap_free (b_outer);
670 return region_a;
671 }
672 \f
673 void
674 add_type_for_runtime (tree type)
675 {
676 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
677 if (TREE_CODE (type) == NOP_EXPR)
678 return;
679
680 bool existed = false;
681 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
682 if (!existed)
683 *slot = lang_hooks.eh_runtime_type (type);
684 }
685
686 tree
687 lookup_type_for_runtime (tree type)
688 {
689 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
690 if (TREE_CODE (type) == NOP_EXPR)
691 return type;
692
693 /* We should have always inserted the data earlier. */
694 return *type_to_runtime_map->get (type);
695 }
696
697 \f
698 /* Represent an entry in @TTypes for either catch actions
699 or exception filter actions. */
700 struct ttypes_filter {
701 tree t;
702 int filter;
703 };
704
705 /* Helper for ttypes_filter hashing. */
706
707 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
708 {
709 typedef tree_node *compare_type;
710 static inline hashval_t hash (const ttypes_filter *);
711 static inline bool equal (const ttypes_filter *, const tree_node *);
712 };
713
714 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
715 (a tree) for a @TTypes type node we are thinking about adding. */
716
717 inline bool
718 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
719 {
720 return entry->t == data;
721 }
722
723 inline hashval_t
724 ttypes_filter_hasher::hash (const ttypes_filter *entry)
725 {
726 return TREE_HASH (entry->t);
727 }
728
729 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
730
731
732 /* Helper for ehspec hashing. */
733
734 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
735 {
736 static inline hashval_t hash (const ttypes_filter *);
737 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
738 };
739
740 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
741 exception specification list we are thinking about adding. */
742 /* ??? Currently we use the type lists in the order given. Someone
743 should put these in some canonical order. */
744
745 inline bool
746 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
747 {
748 return type_list_equal (entry->t, data->t);
749 }
750
751 /* Hash function for exception specification lists. */
752
753 inline hashval_t
754 ehspec_hasher::hash (const ttypes_filter *entry)
755 {
756 hashval_t h = 0;
757 tree list;
758
759 for (list = entry->t; list ; list = TREE_CHAIN (list))
760 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
761 return h;
762 }
763
764 typedef hash_table<ehspec_hasher> ehspec_hash_type;
765
766
767 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
768 to speed up the search. Return the filter value to be used. */
769
770 static int
771 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
772 {
773 struct ttypes_filter **slot, *n;
774
775 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
776 INSERT);
777
778 if ((n = *slot) == NULL)
779 {
780 /* Filter value is a 1 based table index. */
781
782 n = XNEW (struct ttypes_filter);
783 n->t = type;
784 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
785 *slot = n;
786
787 vec_safe_push (cfun->eh->ttype_data, type);
788 }
789
790 return n->filter;
791 }
792
793 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
794 to speed up the search. Return the filter value to be used. */
795
796 static int
797 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
798 tree list)
799 {
800 struct ttypes_filter **slot, *n;
801 struct ttypes_filter dummy;
802
803 dummy.t = list;
804 slot = ehspec_hash->find_slot (&dummy, INSERT);
805
806 if ((n = *slot) == NULL)
807 {
808 int len;
809
810 if (targetm.arm_eabi_unwinder)
811 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
812 else
813 len = vec_safe_length (cfun->eh->ehspec_data.other);
814
815 /* Filter value is a -1 based byte index into a uleb128 buffer. */
816
817 n = XNEW (struct ttypes_filter);
818 n->t = list;
819 n->filter = -(len + 1);
820 *slot = n;
821
822 /* Generate a 0 terminated list of filter values. */
823 for (; list ; list = TREE_CHAIN (list))
824 {
825 if (targetm.arm_eabi_unwinder)
826 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
827 else
828 {
829 /* Look up each type in the list and encode its filter
830 value as a uleb128. */
831 push_uleb128 (&cfun->eh->ehspec_data.other,
832 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
833 }
834 }
835 if (targetm.arm_eabi_unwinder)
836 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
837 else
838 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
839 }
840
841 return n->filter;
842 }
843
844 /* Generate the action filter values to be used for CATCH and
845 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
846 we use lots of landing pads, and so every type or list can share
847 the same filter value, which saves table space. */
848
849 void
850 assign_filter_values (void)
851 {
852 int i;
853 eh_region r;
854 eh_catch c;
855
856 vec_alloc (cfun->eh->ttype_data, 16);
857 if (targetm.arm_eabi_unwinder)
858 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
859 else
860 vec_alloc (cfun->eh->ehspec_data.other, 64);
861
862 ehspec_hash_type ehspec (31);
863 ttypes_hash_type ttypes (31);
864
865 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
866 {
867 if (r == NULL)
868 continue;
869
870 switch (r->type)
871 {
872 case ERT_TRY:
873 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
874 {
875 /* Whatever type_list is (NULL or true list), we build a list
876 of filters for the region. */
877 c->filter_list = NULL_TREE;
878
879 if (c->type_list != NULL)
880 {
881 /* Get a filter value for each of the types caught and store
882 them in the region's dedicated list. */
883 tree tp_node = c->type_list;
884
885 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
886 {
887 int flt
888 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
889 tree flt_node = build_int_cst (integer_type_node, flt);
890
891 c->filter_list
892 = tree_cons (NULL_TREE, flt_node, c->filter_list);
893 }
894 }
895 else
896 {
897 /* Get a filter value for the NULL list also since it
898 will need an action record anyway. */
899 int flt = add_ttypes_entry (&ttypes, NULL);
900 tree flt_node = build_int_cst (integer_type_node, flt);
901
902 c->filter_list
903 = tree_cons (NULL_TREE, flt_node, NULL);
904 }
905 }
906 break;
907
908 case ERT_ALLOWED_EXCEPTIONS:
909 r->u.allowed.filter
910 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
911 break;
912
913 default:
914 break;
915 }
916 }
917 }
918
919 /* Emit SEQ into basic block just before INSN (that is assumed to be
920 first instruction of some existing BB and return the newly
921 produced block. */
922 static basic_block
923 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
924 {
925 rtx_insn *last;
926 basic_block bb;
927 edge e;
928 edge_iterator ei;
929
930 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
931 call), we don't want it to go into newly created landing pad or other EH
932 construct. */
933 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
934 if (e->flags & EDGE_FALLTHRU)
935 force_nonfallthru (e);
936 else
937 ei_next (&ei);
938 last = emit_insn_before (seq, insn);
939 if (BARRIER_P (last))
940 last = PREV_INSN (last);
941 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
942 update_bb_for_insn (bb);
943 bb->flags |= BB_SUPERBLOCK;
944 return bb;
945 }
946 \f
947 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
948 at the rtl level. Emit the code required by the target at a landing
949 pad for the given region. */
950
951 void
952 expand_dw2_landing_pad_for_region (eh_region region)
953 {
954 if (targetm.have_exception_receiver ())
955 emit_insn (targetm.gen_exception_receiver ());
956 else if (targetm.have_nonlocal_goto_receiver ())
957 emit_insn (targetm.gen_nonlocal_goto_receiver ());
958 else
959 { /* Nothing */ }
960
961 if (region->exc_ptr_reg)
962 emit_move_insn (region->exc_ptr_reg,
963 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
964 if (region->filter_reg)
965 emit_move_insn (region->filter_reg,
966 gen_rtx_REG (targetm.eh_return_filter_mode (),
967 EH_RETURN_DATA_REGNO (1)));
968 }
969
970 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
971
972 static void
973 dw2_build_landing_pads (void)
974 {
975 int i;
976 eh_landing_pad lp;
977 int e_flags = EDGE_FALLTHRU;
978
979 /* If we're going to partition blocks, we need to be able to add
980 new landing pads later, which means that we need to hold on to
981 the post-landing-pad block. Prevent it from being merged away.
982 We'll remove this bit after partitioning. */
983 if (flag_reorder_blocks_and_partition)
984 e_flags |= EDGE_PRESERVE;
985
986 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
987 {
988 basic_block bb;
989 rtx_insn *seq;
990 edge e;
991
992 if (lp == NULL || lp->post_landing_pad == NULL)
993 continue;
994
995 start_sequence ();
996
997 lp->landing_pad = gen_label_rtx ();
998 emit_label (lp->landing_pad);
999 LABEL_PRESERVE_P (lp->landing_pad) = 1;
1000
1001 expand_dw2_landing_pad_for_region (lp->region);
1002
1003 seq = get_insns ();
1004 end_sequence ();
1005
1006 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1007 e = make_edge (bb, bb->next_bb, e_flags);
1008 e->count = bb->count;
1009 e->probability = REG_BR_PROB_BASE;
1010 if (current_loops)
1011 {
1012 struct loop *loop = bb->next_bb->loop_father;
1013 /* If we created a pre-header block, add the new block to the
1014 outer loop, otherwise to the loop itself. */
1015 if (bb->next_bb == loop->header)
1016 add_bb_to_loop (bb, loop_outer (loop));
1017 else
1018 add_bb_to_loop (bb, loop);
1019 }
1020 }
1021 }
1022
1023 \f
1024 static vec<int> sjlj_lp_call_site_index;
1025
1026 /* Process all active landing pads. Assign each one a compact dispatch
1027 index, and a call-site index. */
1028
1029 static int
1030 sjlj_assign_call_site_values (void)
1031 {
1032 action_hash_type ar_hash (31);
1033 int i, disp_index;
1034 eh_landing_pad lp;
1035
1036 vec_alloc (crtl->eh.action_record_data, 64);
1037
1038 disp_index = 0;
1039 call_site_base = 1;
1040 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1041 if (lp && lp->post_landing_pad)
1042 {
1043 int action, call_site;
1044
1045 /* First: build the action table. */
1046 action = collect_one_action_chain (&ar_hash, lp->region);
1047
1048 /* Next: assign call-site values. If dwarf2 terms, this would be
1049 the region number assigned by convert_to_eh_region_ranges, but
1050 handles no-action and must-not-throw differently. */
1051 /* Map must-not-throw to otherwise unused call-site index 0. */
1052 if (action == -2)
1053 call_site = 0;
1054 /* Map no-action to otherwise unused call-site index -1. */
1055 else if (action == -1)
1056 call_site = -1;
1057 /* Otherwise, look it up in the table. */
1058 else
1059 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1060 sjlj_lp_call_site_index[i] = call_site;
1061
1062 disp_index++;
1063 }
1064
1065 return disp_index;
1066 }
1067
1068 /* Emit code to record the current call-site index before every
1069 insn that can throw. */
1070
1071 static void
1072 sjlj_mark_call_sites (void)
1073 {
1074 int last_call_site = -2;
1075 rtx_insn *insn;
1076 rtx mem;
1077
1078 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1079 {
1080 eh_landing_pad lp;
1081 eh_region r;
1082 bool nothrow;
1083 int this_call_site;
1084 rtx_insn *before, *p;
1085
1086 /* Reset value tracking at extended basic block boundaries. */
1087 if (LABEL_P (insn))
1088 last_call_site = -2;
1089
1090 /* If the function allocates dynamic stack space, the context must
1091 be updated after every allocation/deallocation accordingly. */
1092 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1093 {
1094 rtx buf_addr;
1095
1096 start_sequence ();
1097 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1098 sjlj_fc_jbuf_ofs);
1099 expand_builtin_update_setjmp_buf (buf_addr);
1100 p = get_insns ();
1101 end_sequence ();
1102 emit_insn_before (p, insn);
1103 }
1104
1105 if (! INSN_P (insn))
1106 continue;
1107
1108 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1109 if (nothrow)
1110 continue;
1111 if (lp)
1112 this_call_site = sjlj_lp_call_site_index[lp->index];
1113 else if (r == NULL)
1114 {
1115 /* Calls (and trapping insns) without notes are outside any
1116 exception handling region in this function. Mark them as
1117 no action. */
1118 this_call_site = -1;
1119 }
1120 else
1121 {
1122 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1123 this_call_site = 0;
1124 }
1125
1126 if (this_call_site != -1)
1127 crtl->uses_eh_lsda = 1;
1128
1129 if (this_call_site == last_call_site)
1130 continue;
1131
1132 /* Don't separate a call from it's argument loads. */
1133 before = insn;
1134 if (CALL_P (insn))
1135 before = find_first_parameter_load (insn, NULL);
1136
1137 start_sequence ();
1138 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1139 sjlj_fc_call_site_ofs);
1140 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1141 p = get_insns ();
1142 end_sequence ();
1143
1144 emit_insn_before (p, before);
1145 last_call_site = this_call_site;
1146 }
1147 }
1148
1149 /* Construct the SjLj_Function_Context. */
1150
1151 static void
1152 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1153 {
1154 rtx_insn *fn_begin, *seq;
1155 rtx fc, mem;
1156 bool fn_begin_outside_block;
1157 rtx personality = get_personality_function (current_function_decl);
1158
1159 fc = crtl->eh.sjlj_fc;
1160
1161 start_sequence ();
1162
1163 /* We're storing this libcall's address into memory instead of
1164 calling it directly. Thus, we must call assemble_external_libcall
1165 here, as we can not depend on emit_library_call to do it for us. */
1166 assemble_external_libcall (personality);
1167 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1168 emit_move_insn (mem, personality);
1169
1170 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1171 if (crtl->uses_eh_lsda)
1172 {
1173 char buf[20];
1174 rtx sym;
1175
1176 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1177 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1178 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1179 emit_move_insn (mem, sym);
1180 }
1181 else
1182 emit_move_insn (mem, const0_rtx);
1183
1184 if (dispatch_label)
1185 {
1186 #ifdef DONT_USE_BUILTIN_SETJMP
1187 rtx x;
1188 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1189 TYPE_MODE (integer_type_node), 1,
1190 plus_constant (Pmode, XEXP (fc, 0),
1191 sjlj_fc_jbuf_ofs), Pmode);
1192
1193 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1194 TYPE_MODE (integer_type_node), 0,
1195 dispatch_label, REG_BR_PROB_BASE / 100);
1196 #else
1197 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1198 sjlj_fc_jbuf_ofs),
1199 dispatch_label);
1200 #endif
1201 }
1202
1203 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1204 1, XEXP (fc, 0), Pmode);
1205
1206 seq = get_insns ();
1207 end_sequence ();
1208
1209 /* ??? Instead of doing this at the beginning of the function,
1210 do this in a block that is at loop level 0 and dominates all
1211 can_throw_internal instructions. */
1212
1213 fn_begin_outside_block = true;
1214 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1215 if (NOTE_P (fn_begin))
1216 {
1217 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1218 break;
1219 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1220 fn_begin_outside_block = false;
1221 }
1222
1223 if (fn_begin_outside_block)
1224 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1225 else
1226 emit_insn_after (seq, fn_begin);
1227 }
1228
1229 /* Call back from expand_function_end to know where we should put
1230 the call to unwind_sjlj_unregister_libfunc if needed. */
1231
1232 void
1233 sjlj_emit_function_exit_after (rtx_insn *after)
1234 {
1235 crtl->eh.sjlj_exit_after = after;
1236 }
1237
1238 static void
1239 sjlj_emit_function_exit (void)
1240 {
1241 rtx_insn *seq, *insn;
1242
1243 start_sequence ();
1244
1245 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1246 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1247
1248 seq = get_insns ();
1249 end_sequence ();
1250
1251 /* ??? Really this can be done in any block at loop level 0 that
1252 post-dominates all can_throw_internal instructions. This is
1253 the last possible moment. */
1254
1255 insn = crtl->eh.sjlj_exit_after;
1256 if (LABEL_P (insn))
1257 insn = NEXT_INSN (insn);
1258
1259 emit_insn_after (seq, insn);
1260 }
1261
1262 static void
1263 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1264 {
1265 machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1266 machine_mode filter_mode = targetm.eh_return_filter_mode ();
1267 eh_landing_pad lp;
1268 rtx mem, fc, exc_ptr_reg, filter_reg;
1269 rtx_insn *seq;
1270 basic_block bb;
1271 eh_region r;
1272 edge e;
1273 int i, disp_index;
1274 vec<tree> dispatch_labels = vNULL;
1275
1276 fc = crtl->eh.sjlj_fc;
1277
1278 start_sequence ();
1279
1280 emit_label (dispatch_label);
1281
1282 #ifndef DONT_USE_BUILTIN_SETJMP
1283 expand_builtin_setjmp_receiver (dispatch_label);
1284
1285 /* The caller of expand_builtin_setjmp_receiver is responsible for
1286 making sure that the label doesn't vanish. The only other caller
1287 is the expander for __builtin_setjmp_receiver, which places this
1288 label on the nonlocal_goto_label list. Since we're modeling these
1289 CFG edges more exactly, we can use the forced_labels list instead. */
1290 LABEL_PRESERVE_P (dispatch_label) = 1;
1291 forced_labels
1292 = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1293 #endif
1294
1295 /* Load up exc_ptr and filter values from the function context. */
1296 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1297 if (unwind_word_mode != ptr_mode)
1298 {
1299 #ifdef POINTERS_EXTEND_UNSIGNED
1300 mem = convert_memory_address (ptr_mode, mem);
1301 #else
1302 mem = convert_to_mode (ptr_mode, mem, 0);
1303 #endif
1304 }
1305 exc_ptr_reg = force_reg (ptr_mode, mem);
1306
1307 mem = adjust_address (fc, unwind_word_mode,
1308 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1309 if (unwind_word_mode != filter_mode)
1310 mem = convert_to_mode (filter_mode, mem, 0);
1311 filter_reg = force_reg (filter_mode, mem);
1312
1313 /* Jump to one of the directly reachable regions. */
1314
1315 disp_index = 0;
1316 rtx_code_label *first_reachable_label = NULL;
1317
1318 /* If there's exactly one call site in the function, don't bother
1319 generating a switch statement. */
1320 if (num_dispatch > 1)
1321 dispatch_labels.create (num_dispatch);
1322
1323 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1324 if (lp && lp->post_landing_pad)
1325 {
1326 rtx_insn *seq2;
1327 rtx_code_label *label;
1328
1329 start_sequence ();
1330
1331 lp->landing_pad = dispatch_label;
1332
1333 if (num_dispatch > 1)
1334 {
1335 tree t_label, case_elt, t;
1336
1337 t_label = create_artificial_label (UNKNOWN_LOCATION);
1338 t = build_int_cst (integer_type_node, disp_index);
1339 case_elt = build_case_label (t, NULL, t_label);
1340 dispatch_labels.quick_push (case_elt);
1341 label = jump_target_rtx (t_label);
1342 }
1343 else
1344 label = gen_label_rtx ();
1345
1346 if (disp_index == 0)
1347 first_reachable_label = label;
1348 emit_label (label);
1349
1350 r = lp->region;
1351 if (r->exc_ptr_reg)
1352 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1353 if (r->filter_reg)
1354 emit_move_insn (r->filter_reg, filter_reg);
1355
1356 seq2 = get_insns ();
1357 end_sequence ();
1358
1359 rtx_insn *before = label_rtx (lp->post_landing_pad);
1360 bb = emit_to_new_bb_before (seq2, before);
1361 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1362 e->count = bb->count;
1363 e->probability = REG_BR_PROB_BASE;
1364 if (current_loops)
1365 {
1366 struct loop *loop = bb->next_bb->loop_father;
1367 /* If we created a pre-header block, add the new block to the
1368 outer loop, otherwise to the loop itself. */
1369 if (bb->next_bb == loop->header)
1370 add_bb_to_loop (bb, loop_outer (loop));
1371 else
1372 add_bb_to_loop (bb, loop);
1373 /* ??? For multiple dispatches we will end up with edges
1374 from the loop tree root into this loop, making it a
1375 multiple-entry loop. Discard all affected loops. */
1376 if (num_dispatch > 1)
1377 {
1378 for (loop = bb->loop_father;
1379 loop_outer (loop); loop = loop_outer (loop))
1380 mark_loop_for_removal (loop);
1381 }
1382 }
1383
1384 disp_index++;
1385 }
1386 gcc_assert (disp_index == num_dispatch);
1387
1388 if (num_dispatch > 1)
1389 {
1390 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1391 sjlj_fc_call_site_ofs);
1392 expand_sjlj_dispatch_table (disp, dispatch_labels);
1393 }
1394
1395 seq = get_insns ();
1396 end_sequence ();
1397
1398 bb = emit_to_new_bb_before (seq, first_reachable_label);
1399 if (num_dispatch == 1)
1400 {
1401 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1402 e->count = bb->count;
1403 e->probability = REG_BR_PROB_BASE;
1404 if (current_loops)
1405 {
1406 struct loop *loop = bb->next_bb->loop_father;
1407 /* If we created a pre-header block, add the new block to the
1408 outer loop, otherwise to the loop itself. */
1409 if (bb->next_bb == loop->header)
1410 add_bb_to_loop (bb, loop_outer (loop));
1411 else
1412 add_bb_to_loop (bb, loop);
1413 }
1414 }
1415 else
1416 {
1417 /* We are not wiring up edges here, but as the dispatcher call
1418 is at function begin simply associate the block with the
1419 outermost (non-)loop. */
1420 if (current_loops)
1421 add_bb_to_loop (bb, current_loops->tree_root);
1422 }
1423 }
1424
1425 static void
1426 sjlj_build_landing_pads (void)
1427 {
1428 int num_dispatch;
1429
1430 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1431 if (num_dispatch == 0)
1432 return;
1433 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1434
1435 num_dispatch = sjlj_assign_call_site_values ();
1436 if (num_dispatch > 0)
1437 {
1438 rtx_code_label *dispatch_label = gen_label_rtx ();
1439 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1440 TYPE_MODE (sjlj_fc_type_node),
1441 TYPE_ALIGN (sjlj_fc_type_node));
1442 crtl->eh.sjlj_fc
1443 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1444 int_size_in_bytes (sjlj_fc_type_node),
1445 align);
1446
1447 sjlj_mark_call_sites ();
1448 sjlj_emit_function_enter (dispatch_label);
1449 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1450 sjlj_emit_function_exit ();
1451 }
1452
1453 /* If we do not have any landing pads, we may still need to register a
1454 personality routine and (empty) LSDA to handle must-not-throw regions. */
1455 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1456 {
1457 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1458 TYPE_MODE (sjlj_fc_type_node),
1459 TYPE_ALIGN (sjlj_fc_type_node));
1460 crtl->eh.sjlj_fc
1461 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1462 int_size_in_bytes (sjlj_fc_type_node),
1463 align);
1464
1465 sjlj_mark_call_sites ();
1466 sjlj_emit_function_enter (NULL);
1467 sjlj_emit_function_exit ();
1468 }
1469
1470 sjlj_lp_call_site_index.release ();
1471 }
1472
1473 /* Update the sjlj function context. This function should be called
1474 whenever we allocate or deallocate dynamic stack space. */
1475
1476 void
1477 update_sjlj_context (void)
1478 {
1479 if (!flag_exceptions)
1480 return;
1481
1482 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1483 }
1484
1485 /* After initial rtl generation, call back to finish generating
1486 exception support code. */
1487
1488 void
1489 finish_eh_generation (void)
1490 {
1491 basic_block bb;
1492
1493 /* Construct the landing pads. */
1494 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1495 sjlj_build_landing_pads ();
1496 else
1497 dw2_build_landing_pads ();
1498 break_superblocks ();
1499
1500 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1501 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1502 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1503 commit_edge_insertions ();
1504
1505 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1506 FOR_EACH_BB_FN (bb, cfun)
1507 {
1508 eh_landing_pad lp;
1509 edge_iterator ei;
1510 edge e;
1511
1512 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1513
1514 FOR_EACH_EDGE (e, ei, bb->succs)
1515 if (e->flags & EDGE_EH)
1516 break;
1517
1518 /* We should not have generated any new throwing insns during this
1519 pass, and we should not have lost any EH edges, so we only need
1520 to handle two cases here:
1521 (1) reachable handler and an existing edge to post-landing-pad,
1522 (2) no reachable handler and no edge. */
1523 gcc_assert ((lp != NULL) == (e != NULL));
1524 if (lp != NULL)
1525 {
1526 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1527
1528 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1529 e->flags |= (CALL_P (BB_END (bb))
1530 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1531 : EDGE_ABNORMAL);
1532 }
1533 }
1534 }
1535 \f
1536 /* This section handles removing dead code for flow. */
1537
1538 void
1539 remove_eh_landing_pad (eh_landing_pad lp)
1540 {
1541 eh_landing_pad *pp;
1542
1543 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1544 continue;
1545 *pp = lp->next_lp;
1546
1547 if (lp->post_landing_pad)
1548 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1549 (*cfun->eh->lp_array)[lp->index] = NULL;
1550 }
1551
1552 /* Splice the EH region at PP from the region tree. */
1553
1554 static void
1555 remove_eh_handler_splicer (eh_region *pp)
1556 {
1557 eh_region region = *pp;
1558 eh_landing_pad lp;
1559
1560 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1561 {
1562 if (lp->post_landing_pad)
1563 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1564 (*cfun->eh->lp_array)[lp->index] = NULL;
1565 }
1566
1567 if (region->inner)
1568 {
1569 eh_region p, outer;
1570 outer = region->outer;
1571
1572 *pp = p = region->inner;
1573 do
1574 {
1575 p->outer = outer;
1576 pp = &p->next_peer;
1577 p = *pp;
1578 }
1579 while (p);
1580 }
1581 *pp = region->next_peer;
1582
1583 (*cfun->eh->region_array)[region->index] = NULL;
1584 }
1585
1586 /* Splice a single EH region REGION from the region tree.
1587
1588 To unlink REGION, we need to find the pointer to it with a relatively
1589 expensive search in REGION's outer region. If you are going to
1590 remove a number of handlers, using remove_unreachable_eh_regions may
1591 be a better option. */
1592
1593 void
1594 remove_eh_handler (eh_region region)
1595 {
1596 eh_region *pp, *pp_start, p, outer;
1597
1598 outer = region->outer;
1599 if (outer)
1600 pp_start = &outer->inner;
1601 else
1602 pp_start = &cfun->eh->region_tree;
1603 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1604 continue;
1605
1606 remove_eh_handler_splicer (pp);
1607 }
1608
1609 /* Worker for remove_unreachable_eh_regions.
1610 PP is a pointer to the region to start a region tree depth-first
1611 search from. R_REACHABLE is the set of regions that have to be
1612 preserved. */
1613
1614 static void
1615 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1616 {
1617 while (*pp)
1618 {
1619 eh_region region = *pp;
1620 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1621 if (!bitmap_bit_p (r_reachable, region->index))
1622 remove_eh_handler_splicer (pp);
1623 else
1624 pp = &region->next_peer;
1625 }
1626 }
1627
1628 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1629 Do this by traversing the EH tree top-down and splice out regions that
1630 are not marked. By removing regions from the leaves, we avoid costly
1631 searches in the region tree. */
1632
1633 void
1634 remove_unreachable_eh_regions (sbitmap r_reachable)
1635 {
1636 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1637 }
1638
1639 /* Invokes CALLBACK for every exception handler landing pad label.
1640 Only used by reload hackery; should not be used by new code. */
1641
1642 void
1643 for_each_eh_label (void (*callback) (rtx))
1644 {
1645 eh_landing_pad lp;
1646 int i;
1647
1648 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1649 {
1650 if (lp)
1651 {
1652 rtx_code_label *lab = lp->landing_pad;
1653 if (lab && LABEL_P (lab))
1654 (*callback) (lab);
1655 }
1656 }
1657 }
1658 \f
1659 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1660 call insn.
1661
1662 At the gimple level, we use LP_NR
1663 > 0 : The statement transfers to landing pad LP_NR
1664 = 0 : The statement is outside any EH region
1665 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1666
1667 At the rtl level, we use LP_NR
1668 > 0 : The insn transfers to landing pad LP_NR
1669 = 0 : The insn cannot throw
1670 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1671 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1672 missing note: The insn is outside any EH region.
1673
1674 ??? This difference probably ought to be avoided. We could stand
1675 to record nothrow for arbitrary gimple statements, and so avoid
1676 some moderately complex lookups in stmt_could_throw_p. Perhaps
1677 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1678 no-nonlocal-goto property should be recorded elsewhere as a bit
1679 on the call_insn directly. Perhaps we should make more use of
1680 attaching the trees to call_insns (reachable via symbol_ref in
1681 direct call cases) and just pull the data out of the trees. */
1682
1683 void
1684 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1685 {
1686 rtx value;
1687 if (ecf_flags & ECF_NOTHROW)
1688 value = const0_rtx;
1689 else if (lp_nr != 0)
1690 value = GEN_INT (lp_nr);
1691 else
1692 return;
1693 add_reg_note (insn, REG_EH_REGION, value);
1694 }
1695
1696 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1697 nor perform a non-local goto. Replace the region note if it
1698 already exists. */
1699
1700 void
1701 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1702 {
1703 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1704 rtx intmin = GEN_INT (INT_MIN);
1705
1706 if (note != 0)
1707 XEXP (note, 0) = intmin;
1708 else
1709 add_reg_note (insn, REG_EH_REGION, intmin);
1710 }
1711
1712 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1713 to the contrary. */
1714
1715 bool
1716 insn_could_throw_p (const_rtx insn)
1717 {
1718 if (!flag_exceptions)
1719 return false;
1720 if (CALL_P (insn))
1721 return true;
1722 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1723 return may_trap_p (PATTERN (insn));
1724 return false;
1725 }
1726
1727 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1728 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1729 to look for a note, or the note itself. */
1730
1731 void
1732 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1733 {
1734 rtx_insn *insn;
1735 rtx note = note_or_insn;
1736
1737 if (INSN_P (note_or_insn))
1738 {
1739 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1740 if (note == NULL)
1741 return;
1742 }
1743 note = XEXP (note, 0);
1744
1745 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1746 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1747 && insn_could_throw_p (insn))
1748 add_reg_note (insn, REG_EH_REGION, note);
1749 }
1750
1751 /* Likewise, but iterate backward. */
1752
1753 void
1754 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1755 {
1756 rtx_insn *insn;
1757 rtx note = note_or_insn;
1758
1759 if (INSN_P (note_or_insn))
1760 {
1761 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1762 if (note == NULL)
1763 return;
1764 }
1765 note = XEXP (note, 0);
1766
1767 for (insn = last; insn != first; insn = PREV_INSN (insn))
1768 if (insn_could_throw_p (insn))
1769 add_reg_note (insn, REG_EH_REGION, note);
1770 }
1771
1772
1773 /* Extract all EH information from INSN. Return true if the insn
1774 was marked NOTHROW. */
1775
1776 static bool
1777 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1778 eh_landing_pad *plp)
1779 {
1780 eh_landing_pad lp = NULL;
1781 eh_region r = NULL;
1782 bool ret = false;
1783 rtx note;
1784 int lp_nr;
1785
1786 if (! INSN_P (insn))
1787 goto egress;
1788
1789 if (NONJUMP_INSN_P (insn)
1790 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1791 insn = XVECEXP (PATTERN (insn), 0, 0);
1792
1793 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1794 if (!note)
1795 {
1796 ret = !insn_could_throw_p (insn);
1797 goto egress;
1798 }
1799
1800 lp_nr = INTVAL (XEXP (note, 0));
1801 if (lp_nr == 0 || lp_nr == INT_MIN)
1802 {
1803 ret = true;
1804 goto egress;
1805 }
1806
1807 if (lp_nr < 0)
1808 r = (*cfun->eh->region_array)[-lp_nr];
1809 else
1810 {
1811 lp = (*cfun->eh->lp_array)[lp_nr];
1812 r = lp->region;
1813 }
1814
1815 egress:
1816 *plp = lp;
1817 *pr = r;
1818 return ret;
1819 }
1820
1821 /* Return the landing pad to which INSN may go, or NULL if it does not
1822 have a reachable landing pad within this function. */
1823
1824 eh_landing_pad
1825 get_eh_landing_pad_from_rtx (const_rtx insn)
1826 {
1827 eh_landing_pad lp;
1828 eh_region r;
1829
1830 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1831 return lp;
1832 }
1833
1834 /* Return the region to which INSN may go, or NULL if it does not
1835 have a reachable region within this function. */
1836
1837 eh_region
1838 get_eh_region_from_rtx (const_rtx insn)
1839 {
1840 eh_landing_pad lp;
1841 eh_region r;
1842
1843 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1844 return r;
1845 }
1846
1847 /* Return true if INSN throws and is caught by something in this function. */
1848
1849 bool
1850 can_throw_internal (const_rtx insn)
1851 {
1852 return get_eh_landing_pad_from_rtx (insn) != NULL;
1853 }
1854
1855 /* Return true if INSN throws and escapes from the current function. */
1856
1857 bool
1858 can_throw_external (const_rtx insn)
1859 {
1860 eh_landing_pad lp;
1861 eh_region r;
1862 bool nothrow;
1863
1864 if (! INSN_P (insn))
1865 return false;
1866
1867 if (NONJUMP_INSN_P (insn)
1868 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1869 {
1870 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1871 int i, n = seq->len ();
1872
1873 for (i = 0; i < n; i++)
1874 if (can_throw_external (seq->element (i)))
1875 return true;
1876
1877 return false;
1878 }
1879
1880 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1881
1882 /* If we can't throw, we obviously can't throw external. */
1883 if (nothrow)
1884 return false;
1885
1886 /* If we have an internal landing pad, then we're not external. */
1887 if (lp != NULL)
1888 return false;
1889
1890 /* If we're not within an EH region, then we are external. */
1891 if (r == NULL)
1892 return true;
1893
1894 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1895 which don't always have landing pads. */
1896 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1897 return false;
1898 }
1899
1900 /* Return true if INSN cannot throw at all. */
1901
1902 bool
1903 insn_nothrow_p (const_rtx insn)
1904 {
1905 eh_landing_pad lp;
1906 eh_region r;
1907
1908 if (! INSN_P (insn))
1909 return true;
1910
1911 if (NONJUMP_INSN_P (insn)
1912 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1913 {
1914 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1915 int i, n = seq->len ();
1916
1917 for (i = 0; i < n; i++)
1918 if (!insn_nothrow_p (seq->element (i)))
1919 return false;
1920
1921 return true;
1922 }
1923
1924 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1925 }
1926
1927 /* Return true if INSN can perform a non-local goto. */
1928 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1929
1930 bool
1931 can_nonlocal_goto (const rtx_insn *insn)
1932 {
1933 if (nonlocal_goto_handler_labels && CALL_P (insn))
1934 {
1935 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1936 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1937 return true;
1938 }
1939 return false;
1940 }
1941 \f
1942 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1943
1944 static unsigned int
1945 set_nothrow_function_flags (void)
1946 {
1947 rtx_insn *insn;
1948
1949 crtl->nothrow = 1;
1950
1951 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1952 something that can throw an exception. We specifically exempt
1953 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1954 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1955 is optimistic. */
1956
1957 crtl->all_throwers_are_sibcalls = 1;
1958
1959 /* If we don't know that this implementation of the function will
1960 actually be used, then we must not set TREE_NOTHROW, since
1961 callers must not assume that this function does not throw. */
1962 if (TREE_NOTHROW (current_function_decl))
1963 return 0;
1964
1965 if (! flag_exceptions)
1966 return 0;
1967
1968 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1969 if (can_throw_external (insn))
1970 {
1971 crtl->nothrow = 0;
1972
1973 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1974 {
1975 crtl->all_throwers_are_sibcalls = 0;
1976 return 0;
1977 }
1978 }
1979
1980 if (crtl->nothrow
1981 && (cgraph_node::get (current_function_decl)->get_availability ()
1982 >= AVAIL_AVAILABLE))
1983 {
1984 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1985 struct cgraph_edge *e;
1986 for (e = node->callers; e; e = e->next_caller)
1987 e->can_throw_external = false;
1988 node->set_nothrow_flag (true);
1989
1990 if (dump_file)
1991 fprintf (dump_file, "Marking function nothrow: %s\n\n",
1992 current_function_name ());
1993 }
1994 return 0;
1995 }
1996
1997 namespace {
1998
1999 const pass_data pass_data_set_nothrow_function_flags =
2000 {
2001 RTL_PASS, /* type */
2002 "nothrow", /* name */
2003 OPTGROUP_NONE, /* optinfo_flags */
2004 TV_NONE, /* tv_id */
2005 0, /* properties_required */
2006 0, /* properties_provided */
2007 0, /* properties_destroyed */
2008 0, /* todo_flags_start */
2009 0, /* todo_flags_finish */
2010 };
2011
2012 class pass_set_nothrow_function_flags : public rtl_opt_pass
2013 {
2014 public:
2015 pass_set_nothrow_function_flags (gcc::context *ctxt)
2016 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2017 {}
2018
2019 /* opt_pass methods: */
2020 virtual unsigned int execute (function *)
2021 {
2022 return set_nothrow_function_flags ();
2023 }
2024
2025 }; // class pass_set_nothrow_function_flags
2026
2027 } // anon namespace
2028
2029 rtl_opt_pass *
2030 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2031 {
2032 return new pass_set_nothrow_function_flags (ctxt);
2033 }
2034
2035 \f
2036 /* Various hooks for unwind library. */
2037
2038 /* Expand the EH support builtin functions:
2039 __builtin_eh_pointer and __builtin_eh_filter. */
2040
2041 static eh_region
2042 expand_builtin_eh_common (tree region_nr_t)
2043 {
2044 HOST_WIDE_INT region_nr;
2045 eh_region region;
2046
2047 gcc_assert (tree_fits_shwi_p (region_nr_t));
2048 region_nr = tree_to_shwi (region_nr_t);
2049
2050 region = (*cfun->eh->region_array)[region_nr];
2051
2052 /* ??? We shouldn't have been able to delete a eh region without
2053 deleting all the code that depended on it. */
2054 gcc_assert (region != NULL);
2055
2056 return region;
2057 }
2058
2059 /* Expand to the exc_ptr value from the given eh region. */
2060
2061 rtx
2062 expand_builtin_eh_pointer (tree exp)
2063 {
2064 eh_region region
2065 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2066 if (region->exc_ptr_reg == NULL)
2067 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2068 return region->exc_ptr_reg;
2069 }
2070
2071 /* Expand to the filter value from the given eh region. */
2072
2073 rtx
2074 expand_builtin_eh_filter (tree exp)
2075 {
2076 eh_region region
2077 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2078 if (region->filter_reg == NULL)
2079 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2080 return region->filter_reg;
2081 }
2082
2083 /* Copy the exc_ptr and filter values from one landing pad's registers
2084 to another. This is used to inline the resx statement. */
2085
2086 rtx
2087 expand_builtin_eh_copy_values (tree exp)
2088 {
2089 eh_region dst
2090 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2091 eh_region src
2092 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2093 machine_mode fmode = targetm.eh_return_filter_mode ();
2094
2095 if (dst->exc_ptr_reg == NULL)
2096 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2097 if (src->exc_ptr_reg == NULL)
2098 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2099
2100 if (dst->filter_reg == NULL)
2101 dst->filter_reg = gen_reg_rtx (fmode);
2102 if (src->filter_reg == NULL)
2103 src->filter_reg = gen_reg_rtx (fmode);
2104
2105 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2106 emit_move_insn (dst->filter_reg, src->filter_reg);
2107
2108 return const0_rtx;
2109 }
2110
2111 /* Do any necessary initialization to access arbitrary stack frames.
2112 On the SPARC, this means flushing the register windows. */
2113
2114 void
2115 expand_builtin_unwind_init (void)
2116 {
2117 /* Set this so all the registers get saved in our frame; we need to be
2118 able to copy the saved values for any registers from frames we unwind. */
2119 crtl->saves_all_registers = 1;
2120
2121 SETUP_FRAME_ADDRESSES ();
2122 }
2123
2124 /* Map a non-negative number to an eh return data register number; expands
2125 to -1 if no return data register is associated with the input number.
2126 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2127
2128 rtx
2129 expand_builtin_eh_return_data_regno (tree exp)
2130 {
2131 tree which = CALL_EXPR_ARG (exp, 0);
2132 unsigned HOST_WIDE_INT iwhich;
2133
2134 if (TREE_CODE (which) != INTEGER_CST)
2135 {
2136 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2137 return constm1_rtx;
2138 }
2139
2140 iwhich = tree_to_uhwi (which);
2141 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2142 if (iwhich == INVALID_REGNUM)
2143 return constm1_rtx;
2144
2145 #ifdef DWARF_FRAME_REGNUM
2146 iwhich = DWARF_FRAME_REGNUM (iwhich);
2147 #else
2148 iwhich = DBX_REGISTER_NUMBER (iwhich);
2149 #endif
2150
2151 return GEN_INT (iwhich);
2152 }
2153
2154 /* Given a value extracted from the return address register or stack slot,
2155 return the actual address encoded in that value. */
2156
2157 rtx
2158 expand_builtin_extract_return_addr (tree addr_tree)
2159 {
2160 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2161
2162 if (GET_MODE (addr) != Pmode
2163 && GET_MODE (addr) != VOIDmode)
2164 {
2165 #ifdef POINTERS_EXTEND_UNSIGNED
2166 addr = convert_memory_address (Pmode, addr);
2167 #else
2168 addr = convert_to_mode (Pmode, addr, 0);
2169 #endif
2170 }
2171
2172 /* First mask out any unwanted bits. */
2173 rtx mask = MASK_RETURN_ADDR;
2174 if (mask)
2175 expand_and (Pmode, addr, mask, addr);
2176
2177 /* Then adjust to find the real return address. */
2178 if (RETURN_ADDR_OFFSET)
2179 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2180
2181 return addr;
2182 }
2183
2184 /* Given an actual address in addr_tree, do any necessary encoding
2185 and return the value to be stored in the return address register or
2186 stack slot so the epilogue will return to that address. */
2187
2188 rtx
2189 expand_builtin_frob_return_addr (tree addr_tree)
2190 {
2191 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2192
2193 addr = convert_memory_address (Pmode, addr);
2194
2195 if (RETURN_ADDR_OFFSET)
2196 {
2197 addr = force_reg (Pmode, addr);
2198 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2199 }
2200
2201 return addr;
2202 }
2203
2204 /* Set up the epilogue with the magic bits we'll need to return to the
2205 exception handler. */
2206
2207 void
2208 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2209 tree handler_tree)
2210 {
2211 rtx tmp;
2212
2213 #ifdef EH_RETURN_STACKADJ_RTX
2214 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2215 VOIDmode, EXPAND_NORMAL);
2216 tmp = convert_memory_address (Pmode, tmp);
2217 if (!crtl->eh.ehr_stackadj)
2218 crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2219 else if (tmp != crtl->eh.ehr_stackadj)
2220 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2221 #endif
2222
2223 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2224 VOIDmode, EXPAND_NORMAL);
2225 tmp = convert_memory_address (Pmode, tmp);
2226 if (!crtl->eh.ehr_handler)
2227 crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2228 else if (tmp != crtl->eh.ehr_handler)
2229 emit_move_insn (crtl->eh.ehr_handler, tmp);
2230
2231 if (!crtl->eh.ehr_label)
2232 crtl->eh.ehr_label = gen_label_rtx ();
2233 emit_jump (crtl->eh.ehr_label);
2234 }
2235
2236 /* Expand __builtin_eh_return. This exit path from the function loads up
2237 the eh return data registers, adjusts the stack, and branches to a
2238 given PC other than the normal return address. */
2239
2240 void
2241 expand_eh_return (void)
2242 {
2243 rtx_code_label *around_label;
2244
2245 if (! crtl->eh.ehr_label)
2246 return;
2247
2248 crtl->calls_eh_return = 1;
2249
2250 #ifdef EH_RETURN_STACKADJ_RTX
2251 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2252 #endif
2253
2254 around_label = gen_label_rtx ();
2255 emit_jump (around_label);
2256
2257 emit_label (crtl->eh.ehr_label);
2258 clobber_return_register ();
2259
2260 #ifdef EH_RETURN_STACKADJ_RTX
2261 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2262 #endif
2263
2264 if (targetm.have_eh_return ())
2265 emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
2266 else
2267 {
2268 #ifdef EH_RETURN_HANDLER_RTX
2269 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2270 #else
2271 error ("__builtin_eh_return not supported on this target");
2272 #endif
2273 }
2274
2275 emit_label (around_label);
2276 }
2277
2278 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2279 POINTERS_EXTEND_UNSIGNED and return it. */
2280
2281 rtx
2282 expand_builtin_extend_pointer (tree addr_tree)
2283 {
2284 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2285 int extend;
2286
2287 #ifdef POINTERS_EXTEND_UNSIGNED
2288 extend = POINTERS_EXTEND_UNSIGNED;
2289 #else
2290 /* The previous EH code did an unsigned extend by default, so we do this also
2291 for consistency. */
2292 extend = 1;
2293 #endif
2294
2295 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2296 }
2297 \f
2298 static int
2299 add_action_record (action_hash_type *ar_hash, int filter, int next)
2300 {
2301 struct action_record **slot, *new_ar, tmp;
2302
2303 tmp.filter = filter;
2304 tmp.next = next;
2305 slot = ar_hash->find_slot (&tmp, INSERT);
2306
2307 if ((new_ar = *slot) == NULL)
2308 {
2309 new_ar = XNEW (struct action_record);
2310 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2311 new_ar->filter = filter;
2312 new_ar->next = next;
2313 *slot = new_ar;
2314
2315 /* The filter value goes in untouched. The link to the next
2316 record is a "self-relative" byte offset, or zero to indicate
2317 that there is no next record. So convert the absolute 1 based
2318 indices we've been carrying around into a displacement. */
2319
2320 push_sleb128 (&crtl->eh.action_record_data, filter);
2321 if (next)
2322 next -= crtl->eh.action_record_data->length () + 1;
2323 push_sleb128 (&crtl->eh.action_record_data, next);
2324 }
2325
2326 return new_ar->offset;
2327 }
2328
2329 static int
2330 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2331 {
2332 int next;
2333
2334 /* If we've reached the top of the region chain, then we have
2335 no actions, and require no landing pad. */
2336 if (region == NULL)
2337 return -1;
2338
2339 switch (region->type)
2340 {
2341 case ERT_CLEANUP:
2342 {
2343 eh_region r;
2344 /* A cleanup adds a zero filter to the beginning of the chain, but
2345 there are special cases to look out for. If there are *only*
2346 cleanups along a path, then it compresses to a zero action.
2347 Further, if there are multiple cleanups along a path, we only
2348 need to represent one of them, as that is enough to trigger
2349 entry to the landing pad at runtime. */
2350 next = collect_one_action_chain (ar_hash, region->outer);
2351 if (next <= 0)
2352 return 0;
2353 for (r = region->outer; r ; r = r->outer)
2354 if (r->type == ERT_CLEANUP)
2355 return next;
2356 return add_action_record (ar_hash, 0, next);
2357 }
2358
2359 case ERT_TRY:
2360 {
2361 eh_catch c;
2362
2363 /* Process the associated catch regions in reverse order.
2364 If there's a catch-all handler, then we don't need to
2365 search outer regions. Use a magic -3 value to record
2366 that we haven't done the outer search. */
2367 next = -3;
2368 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2369 {
2370 if (c->type_list == NULL)
2371 {
2372 /* Retrieve the filter from the head of the filter list
2373 where we have stored it (see assign_filter_values). */
2374 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2375 next = add_action_record (ar_hash, filter, 0);
2376 }
2377 else
2378 {
2379 /* Once the outer search is done, trigger an action record for
2380 each filter we have. */
2381 tree flt_node;
2382
2383 if (next == -3)
2384 {
2385 next = collect_one_action_chain (ar_hash, region->outer);
2386
2387 /* If there is no next action, terminate the chain. */
2388 if (next == -1)
2389 next = 0;
2390 /* If all outer actions are cleanups or must_not_throw,
2391 we'll have no action record for it, since we had wanted
2392 to encode these states in the call-site record directly.
2393 Add a cleanup action to the chain to catch these. */
2394 else if (next <= 0)
2395 next = add_action_record (ar_hash, 0, 0);
2396 }
2397
2398 flt_node = c->filter_list;
2399 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2400 {
2401 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2402 next = add_action_record (ar_hash, filter, next);
2403 }
2404 }
2405 }
2406 return next;
2407 }
2408
2409 case ERT_ALLOWED_EXCEPTIONS:
2410 /* An exception specification adds its filter to the
2411 beginning of the chain. */
2412 next = collect_one_action_chain (ar_hash, region->outer);
2413
2414 /* If there is no next action, terminate the chain. */
2415 if (next == -1)
2416 next = 0;
2417 /* If all outer actions are cleanups or must_not_throw,
2418 we'll have no action record for it, since we had wanted
2419 to encode these states in the call-site record directly.
2420 Add a cleanup action to the chain to catch these. */
2421 else if (next <= 0)
2422 next = add_action_record (ar_hash, 0, 0);
2423
2424 return add_action_record (ar_hash, region->u.allowed.filter, next);
2425
2426 case ERT_MUST_NOT_THROW:
2427 /* A must-not-throw region with no inner handlers or cleanups
2428 requires no call-site entry. Note that this differs from
2429 the no handler or cleanup case in that we do require an lsda
2430 to be generated. Return a magic -2 value to record this. */
2431 return -2;
2432 }
2433
2434 gcc_unreachable ();
2435 }
2436
2437 static int
2438 add_call_site (rtx landing_pad, int action, int section)
2439 {
2440 call_site_record record;
2441
2442 record = ggc_alloc<call_site_record_d> ();
2443 record->landing_pad = landing_pad;
2444 record->action = action;
2445
2446 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2447
2448 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2449 }
2450
2451 static rtx_note *
2452 emit_note_eh_region_end (rtx_insn *insn)
2453 {
2454 rtx_insn *next = NEXT_INSN (insn);
2455
2456 /* Make sure we do not split a call and its corresponding
2457 CALL_ARG_LOCATION note. */
2458 if (next && NOTE_P (next)
2459 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2460 insn = next;
2461
2462 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2463 }
2464
2465 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2466 The new note numbers will not refer to region numbers, but
2467 instead to call site entries. */
2468
2469 static unsigned int
2470 convert_to_eh_region_ranges (void)
2471 {
2472 rtx insn;
2473 rtx_insn *iter;
2474 rtx_note *note;
2475 action_hash_type ar_hash (31);
2476 int last_action = -3;
2477 rtx_insn *last_action_insn = NULL;
2478 rtx last_landing_pad = NULL_RTX;
2479 rtx_insn *first_no_action_insn = NULL;
2480 int call_site = 0;
2481 int cur_sec = 0;
2482 rtx_insn *section_switch_note = NULL;
2483 rtx_insn *first_no_action_insn_before_switch = NULL;
2484 rtx_insn *last_no_action_insn_before_switch = NULL;
2485 int saved_call_site_base = call_site_base;
2486
2487 vec_alloc (crtl->eh.action_record_data, 64);
2488
2489 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2490 if (INSN_P (iter))
2491 {
2492 eh_landing_pad lp;
2493 eh_region region;
2494 bool nothrow;
2495 int this_action;
2496 rtx_code_label *this_landing_pad;
2497
2498 insn = iter;
2499 if (NONJUMP_INSN_P (insn)
2500 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2501 insn = XVECEXP (PATTERN (insn), 0, 0);
2502
2503 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2504 if (nothrow)
2505 continue;
2506 if (region)
2507 this_action = collect_one_action_chain (&ar_hash, region);
2508 else
2509 this_action = -1;
2510
2511 /* Existence of catch handlers, or must-not-throw regions
2512 implies that an lsda is needed (even if empty). */
2513 if (this_action != -1)
2514 crtl->uses_eh_lsda = 1;
2515
2516 /* Delay creation of region notes for no-action regions
2517 until we're sure that an lsda will be required. */
2518 else if (last_action == -3)
2519 {
2520 first_no_action_insn = iter;
2521 last_action = -1;
2522 }
2523
2524 if (this_action >= 0)
2525 this_landing_pad = lp->landing_pad;
2526 else
2527 this_landing_pad = NULL;
2528
2529 /* Differing actions or landing pads implies a change in call-site
2530 info, which implies some EH_REGION note should be emitted. */
2531 if (last_action != this_action
2532 || last_landing_pad != this_landing_pad)
2533 {
2534 /* If there is a queued no-action region in the other section
2535 with hot/cold partitioning, emit it now. */
2536 if (first_no_action_insn_before_switch)
2537 {
2538 gcc_assert (this_action != -1
2539 && last_action == (first_no_action_insn
2540 ? -1 : -3));
2541 call_site = add_call_site (NULL_RTX, 0, 0);
2542 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2543 first_no_action_insn_before_switch);
2544 NOTE_EH_HANDLER (note) = call_site;
2545 note
2546 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2547 NOTE_EH_HANDLER (note) = call_site;
2548 gcc_assert (last_action != -3
2549 || (last_action_insn
2550 == last_no_action_insn_before_switch));
2551 first_no_action_insn_before_switch = NULL;
2552 last_no_action_insn_before_switch = NULL;
2553 call_site_base++;
2554 }
2555 /* If we'd not seen a previous action (-3) or the previous
2556 action was must-not-throw (-2), then we do not need an
2557 end note. */
2558 if (last_action >= -1)
2559 {
2560 /* If we delayed the creation of the begin, do it now. */
2561 if (first_no_action_insn)
2562 {
2563 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2564 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2565 first_no_action_insn);
2566 NOTE_EH_HANDLER (note) = call_site;
2567 first_no_action_insn = NULL;
2568 }
2569
2570 note = emit_note_eh_region_end (last_action_insn);
2571 NOTE_EH_HANDLER (note) = call_site;
2572 }
2573
2574 /* If the new action is must-not-throw, then no region notes
2575 are created. */
2576 if (this_action >= -1)
2577 {
2578 call_site = add_call_site (this_landing_pad,
2579 this_action < 0 ? 0 : this_action,
2580 cur_sec);
2581 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2582 NOTE_EH_HANDLER (note) = call_site;
2583 }
2584
2585 last_action = this_action;
2586 last_landing_pad = this_landing_pad;
2587 }
2588 last_action_insn = iter;
2589 }
2590 else if (NOTE_P (iter)
2591 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2592 {
2593 gcc_assert (section_switch_note == NULL_RTX);
2594 gcc_assert (flag_reorder_blocks_and_partition);
2595 section_switch_note = iter;
2596 if (first_no_action_insn)
2597 {
2598 first_no_action_insn_before_switch = first_no_action_insn;
2599 last_no_action_insn_before_switch = last_action_insn;
2600 first_no_action_insn = NULL;
2601 gcc_assert (last_action == -1);
2602 last_action = -3;
2603 }
2604 /* Force closing of current EH region before section switch and
2605 opening a new one afterwards. */
2606 else if (last_action != -3)
2607 last_landing_pad = pc_rtx;
2608 if (crtl->eh.call_site_record_v[cur_sec])
2609 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2610 cur_sec++;
2611 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2612 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2613 }
2614
2615 if (last_action >= -1 && ! first_no_action_insn)
2616 {
2617 note = emit_note_eh_region_end (last_action_insn);
2618 NOTE_EH_HANDLER (note) = call_site;
2619 }
2620
2621 call_site_base = saved_call_site_base;
2622
2623 return 0;
2624 }
2625
2626 namespace {
2627
2628 const pass_data pass_data_convert_to_eh_region_ranges =
2629 {
2630 RTL_PASS, /* type */
2631 "eh_ranges", /* name */
2632 OPTGROUP_NONE, /* optinfo_flags */
2633 TV_NONE, /* tv_id */
2634 0, /* properties_required */
2635 0, /* properties_provided */
2636 0, /* properties_destroyed */
2637 0, /* todo_flags_start */
2638 0, /* todo_flags_finish */
2639 };
2640
2641 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2642 {
2643 public:
2644 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2645 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2646 {}
2647
2648 /* opt_pass methods: */
2649 virtual bool gate (function *);
2650 virtual unsigned int execute (function *)
2651 {
2652 return convert_to_eh_region_ranges ();
2653 }
2654
2655 }; // class pass_convert_to_eh_region_ranges
2656
2657 bool
2658 pass_convert_to_eh_region_ranges::gate (function *)
2659 {
2660 /* Nothing to do for SJLJ exceptions or if no regions created. */
2661 if (cfun->eh->region_tree == NULL)
2662 return false;
2663 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2664 return false;
2665 return true;
2666 }
2667
2668 } // anon namespace
2669
2670 rtl_opt_pass *
2671 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2672 {
2673 return new pass_convert_to_eh_region_ranges (ctxt);
2674 }
2675 \f
2676 static void
2677 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2678 {
2679 do
2680 {
2681 unsigned char byte = value & 0x7f;
2682 value >>= 7;
2683 if (value)
2684 byte |= 0x80;
2685 vec_safe_push (*data_area, byte);
2686 }
2687 while (value);
2688 }
2689
2690 static void
2691 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2692 {
2693 unsigned char byte;
2694 int more;
2695
2696 do
2697 {
2698 byte = value & 0x7f;
2699 value >>= 7;
2700 more = ! ((value == 0 && (byte & 0x40) == 0)
2701 || (value == -1 && (byte & 0x40) != 0));
2702 if (more)
2703 byte |= 0x80;
2704 vec_safe_push (*data_area, byte);
2705 }
2706 while (more);
2707 }
2708
2709 \f
2710 #ifndef HAVE_AS_LEB128
2711 static int
2712 dw2_size_of_call_site_table (int section)
2713 {
2714 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2715 int size = n * (4 + 4 + 4);
2716 int i;
2717
2718 for (i = 0; i < n; ++i)
2719 {
2720 struct call_site_record_d *cs =
2721 (*crtl->eh.call_site_record_v[section])[i];
2722 size += size_of_uleb128 (cs->action);
2723 }
2724
2725 return size;
2726 }
2727
2728 static int
2729 sjlj_size_of_call_site_table (void)
2730 {
2731 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2732 int size = 0;
2733 int i;
2734
2735 for (i = 0; i < n; ++i)
2736 {
2737 struct call_site_record_d *cs =
2738 (*crtl->eh.call_site_record_v[0])[i];
2739 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2740 size += size_of_uleb128 (cs->action);
2741 }
2742
2743 return size;
2744 }
2745 #endif
2746
2747 static void
2748 dw2_output_call_site_table (int cs_format, int section)
2749 {
2750 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2751 int i;
2752 const char *begin;
2753
2754 if (section == 0)
2755 begin = current_function_func_begin_label;
2756 else if (first_function_block_is_cold)
2757 begin = crtl->subsections.hot_section_label;
2758 else
2759 begin = crtl->subsections.cold_section_label;
2760
2761 for (i = 0; i < n; ++i)
2762 {
2763 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2764 char reg_start_lab[32];
2765 char reg_end_lab[32];
2766 char landing_pad_lab[32];
2767
2768 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2769 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2770
2771 if (cs->landing_pad)
2772 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2773 CODE_LABEL_NUMBER (cs->landing_pad));
2774
2775 /* ??? Perhaps use insn length scaling if the assembler supports
2776 generic arithmetic. */
2777 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2778 data4 if the function is small enough. */
2779 if (cs_format == DW_EH_PE_uleb128)
2780 {
2781 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2782 "region %d start", i);
2783 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2784 "length");
2785 if (cs->landing_pad)
2786 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2787 "landing pad");
2788 else
2789 dw2_asm_output_data_uleb128 (0, "landing pad");
2790 }
2791 else
2792 {
2793 dw2_asm_output_delta (4, reg_start_lab, begin,
2794 "region %d start", i);
2795 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2796 if (cs->landing_pad)
2797 dw2_asm_output_delta (4, landing_pad_lab, begin,
2798 "landing pad");
2799 else
2800 dw2_asm_output_data (4, 0, "landing pad");
2801 }
2802 dw2_asm_output_data_uleb128 (cs->action, "action");
2803 }
2804
2805 call_site_base += n;
2806 }
2807
2808 static void
2809 sjlj_output_call_site_table (void)
2810 {
2811 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2812 int i;
2813
2814 for (i = 0; i < n; ++i)
2815 {
2816 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2817
2818 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2819 "region %d landing pad", i);
2820 dw2_asm_output_data_uleb128 (cs->action, "action");
2821 }
2822
2823 call_site_base += n;
2824 }
2825
2826 /* Switch to the section that should be used for exception tables. */
2827
2828 static void
2829 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2830 {
2831 section *s;
2832
2833 if (exception_section)
2834 s = exception_section;
2835 else
2836 {
2837 int flags;
2838
2839 if (EH_TABLES_CAN_BE_READ_ONLY)
2840 {
2841 int tt_format =
2842 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2843 flags = ((! flag_pic
2844 || ((tt_format & 0x70) != DW_EH_PE_absptr
2845 && (tt_format & 0x70) != DW_EH_PE_aligned))
2846 ? 0 : SECTION_WRITE);
2847 }
2848 else
2849 flags = SECTION_WRITE;
2850
2851 /* Compute the section and cache it into exception_section,
2852 unless it depends on the function name. */
2853 if (targetm_common.have_named_sections)
2854 {
2855 #ifdef HAVE_LD_EH_GC_SECTIONS
2856 if (flag_function_sections
2857 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2858 {
2859 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2860 /* The EH table must match the code section, so only mark
2861 it linkonce if we have COMDAT groups to tie them together. */
2862 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2863 flags |= SECTION_LINKONCE;
2864 sprintf (section_name, ".gcc_except_table.%s", fnname);
2865 s = get_section (section_name, flags, current_function_decl);
2866 free (section_name);
2867 }
2868 else
2869 #endif
2870 exception_section
2871 = s = get_section (".gcc_except_table", flags, NULL);
2872 }
2873 else
2874 exception_section
2875 = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
2876 }
2877
2878 switch_to_section (s);
2879 }
2880
2881
2882 /* Output a reference from an exception table to the type_info object TYPE.
2883 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2884 the value. */
2885
2886 static void
2887 output_ttype (tree type, int tt_format, int tt_format_size)
2888 {
2889 rtx value;
2890 bool is_public = true;
2891
2892 if (type == NULL_TREE)
2893 value = const0_rtx;
2894 else
2895 {
2896 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2897 runtime types so TYPE should already be a runtime type
2898 reference. When pass_ipa_free_lang data is made a default
2899 pass, we can then remove the call to lookup_type_for_runtime
2900 below. */
2901 if (TYPE_P (type))
2902 type = lookup_type_for_runtime (type);
2903
2904 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2905
2906 /* Let cgraph know that the rtti decl is used. Not all of the
2907 paths below go through assemble_integer, which would take
2908 care of this for us. */
2909 STRIP_NOPS (type);
2910 if (TREE_CODE (type) == ADDR_EXPR)
2911 {
2912 type = TREE_OPERAND (type, 0);
2913 if (TREE_CODE (type) == VAR_DECL)
2914 is_public = TREE_PUBLIC (type);
2915 }
2916 else
2917 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2918 }
2919
2920 /* Allow the target to override the type table entry format. */
2921 if (targetm.asm_out.ttype (value))
2922 return;
2923
2924 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2925 assemble_integer (value, tt_format_size,
2926 tt_format_size * BITS_PER_UNIT, 1);
2927 else
2928 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2929 }
2930
2931 static void
2932 output_one_function_exception_table (int section)
2933 {
2934 int tt_format, cs_format, lp_format, i;
2935 #ifdef HAVE_AS_LEB128
2936 char ttype_label[32];
2937 char cs_after_size_label[32];
2938 char cs_end_label[32];
2939 #else
2940 int call_site_len;
2941 #endif
2942 int have_tt_data;
2943 int tt_format_size = 0;
2944
2945 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2946 || (targetm.arm_eabi_unwinder
2947 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2948 : vec_safe_length (cfun->eh->ehspec_data.other)));
2949
2950 /* Indicate the format of the @TType entries. */
2951 if (! have_tt_data)
2952 tt_format = DW_EH_PE_omit;
2953 else
2954 {
2955 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2956 #ifdef HAVE_AS_LEB128
2957 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2958 section ? "LLSDATTC" : "LLSDATT",
2959 current_function_funcdef_no);
2960 #endif
2961 tt_format_size = size_of_encoded_value (tt_format);
2962
2963 assemble_align (tt_format_size * BITS_PER_UNIT);
2964 }
2965
2966 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2967 current_function_funcdef_no);
2968
2969 /* The LSDA header. */
2970
2971 /* Indicate the format of the landing pad start pointer. An omitted
2972 field implies @LPStart == @Start. */
2973 /* Currently we always put @LPStart == @Start. This field would
2974 be most useful in moving the landing pads completely out of
2975 line to another section, but it could also be used to minimize
2976 the size of uleb128 landing pad offsets. */
2977 lp_format = DW_EH_PE_omit;
2978 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2979 eh_data_format_name (lp_format));
2980
2981 /* @LPStart pointer would go here. */
2982
2983 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2984 eh_data_format_name (tt_format));
2985
2986 #ifndef HAVE_AS_LEB128
2987 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2988 call_site_len = sjlj_size_of_call_site_table ();
2989 else
2990 call_site_len = dw2_size_of_call_site_table (section);
2991 #endif
2992
2993 /* A pc-relative 4-byte displacement to the @TType data. */
2994 if (have_tt_data)
2995 {
2996 #ifdef HAVE_AS_LEB128
2997 char ttype_after_disp_label[32];
2998 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2999 section ? "LLSDATTDC" : "LLSDATTD",
3000 current_function_funcdef_no);
3001 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3002 "@TType base offset");
3003 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3004 #else
3005 /* Ug. Alignment queers things. */
3006 unsigned int before_disp, after_disp, last_disp, disp;
3007
3008 before_disp = 1 + 1;
3009 after_disp = (1 + size_of_uleb128 (call_site_len)
3010 + call_site_len
3011 + vec_safe_length (crtl->eh.action_record_data)
3012 + (vec_safe_length (cfun->eh->ttype_data)
3013 * tt_format_size));
3014
3015 disp = after_disp;
3016 do
3017 {
3018 unsigned int disp_size, pad;
3019
3020 last_disp = disp;
3021 disp_size = size_of_uleb128 (disp);
3022 pad = before_disp + disp_size + after_disp;
3023 if (pad % tt_format_size)
3024 pad = tt_format_size - (pad % tt_format_size);
3025 else
3026 pad = 0;
3027 disp = after_disp + pad;
3028 }
3029 while (disp != last_disp);
3030
3031 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3032 #endif
3033 }
3034
3035 /* Indicate the format of the call-site offsets. */
3036 #ifdef HAVE_AS_LEB128
3037 cs_format = DW_EH_PE_uleb128;
3038 #else
3039 cs_format = DW_EH_PE_udata4;
3040 #endif
3041 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3042 eh_data_format_name (cs_format));
3043
3044 #ifdef HAVE_AS_LEB128
3045 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3046 section ? "LLSDACSBC" : "LLSDACSB",
3047 current_function_funcdef_no);
3048 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3049 section ? "LLSDACSEC" : "LLSDACSE",
3050 current_function_funcdef_no);
3051 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3052 "Call-site table length");
3053 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3054 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3055 sjlj_output_call_site_table ();
3056 else
3057 dw2_output_call_site_table (cs_format, section);
3058 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3059 #else
3060 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3061 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3062 sjlj_output_call_site_table ();
3063 else
3064 dw2_output_call_site_table (cs_format, section);
3065 #endif
3066
3067 /* ??? Decode and interpret the data for flag_debug_asm. */
3068 {
3069 uchar uc;
3070 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3071 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3072 }
3073
3074 if (have_tt_data)
3075 assemble_align (tt_format_size * BITS_PER_UNIT);
3076
3077 i = vec_safe_length (cfun->eh->ttype_data);
3078 while (i-- > 0)
3079 {
3080 tree type = (*cfun->eh->ttype_data)[i];
3081 output_ttype (type, tt_format, tt_format_size);
3082 }
3083
3084 #ifdef HAVE_AS_LEB128
3085 if (have_tt_data)
3086 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3087 #endif
3088
3089 /* ??? Decode and interpret the data for flag_debug_asm. */
3090 if (targetm.arm_eabi_unwinder)
3091 {
3092 tree type;
3093 for (i = 0;
3094 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3095 output_ttype (type, tt_format, tt_format_size);
3096 }
3097 else
3098 {
3099 uchar uc;
3100 for (i = 0;
3101 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3102 dw2_asm_output_data (1, uc,
3103 i ? NULL : "Exception specification table");
3104 }
3105 }
3106
3107 void
3108 output_function_exception_table (const char *fnname)
3109 {
3110 rtx personality = get_personality_function (current_function_decl);
3111
3112 /* Not all functions need anything. */
3113 if (! crtl->uses_eh_lsda)
3114 return;
3115
3116 if (personality)
3117 {
3118 assemble_external_libcall (personality);
3119
3120 if (targetm.asm_out.emit_except_personality)
3121 targetm.asm_out.emit_except_personality (personality);
3122 }
3123
3124 switch_to_exception_section (fnname);
3125
3126 /* If the target wants a label to begin the table, emit it here. */
3127 targetm.asm_out.emit_except_table_label (asm_out_file);
3128
3129 output_one_function_exception_table (0);
3130 if (crtl->eh.call_site_record_v[1])
3131 output_one_function_exception_table (1);
3132
3133 switch_to_section (current_function_section ());
3134 }
3135
3136 void
3137 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3138 {
3139 fun->eh->throw_stmt_table = table;
3140 }
3141
3142 hash_map<gimple *, int> *
3143 get_eh_throw_stmt_table (struct function *fun)
3144 {
3145 return fun->eh->throw_stmt_table;
3146 }
3147 \f
3148 /* Determine if the function needs an EH personality function. */
3149
3150 enum eh_personality_kind
3151 function_needs_eh_personality (struct function *fn)
3152 {
3153 enum eh_personality_kind kind = eh_personality_none;
3154 eh_region i;
3155
3156 FOR_ALL_EH_REGION_FN (i, fn)
3157 {
3158 switch (i->type)
3159 {
3160 case ERT_CLEANUP:
3161 /* Can do with any personality including the generic C one. */
3162 kind = eh_personality_any;
3163 break;
3164
3165 case ERT_TRY:
3166 case ERT_ALLOWED_EXCEPTIONS:
3167 /* Always needs a EH personality function. The generic C
3168 personality doesn't handle these even for empty type lists. */
3169 return eh_personality_lang;
3170
3171 case ERT_MUST_NOT_THROW:
3172 /* Always needs a EH personality function. The language may specify
3173 what abort routine that must be used, e.g. std::terminate. */
3174 return eh_personality_lang;
3175 }
3176 }
3177
3178 return kind;
3179 }
3180 \f
3181 /* Dump EH information to OUT. */
3182
3183 void
3184 dump_eh_tree (FILE * out, struct function *fun)
3185 {
3186 eh_region i;
3187 int depth = 0;
3188 static const char *const type_name[] = {
3189 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3190 };
3191
3192 i = fun->eh->region_tree;
3193 if (!i)
3194 return;
3195
3196 fprintf (out, "Eh tree:\n");
3197 while (1)
3198 {
3199 fprintf (out, " %*s %i %s", depth * 2, "",
3200 i->index, type_name[(int) i->type]);
3201
3202 if (i->landing_pads)
3203 {
3204 eh_landing_pad lp;
3205
3206 fprintf (out, " land:");
3207 if (current_ir_type () == IR_GIMPLE)
3208 {
3209 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3210 {
3211 fprintf (out, "{%i,", lp->index);
3212 print_generic_expr (out, lp->post_landing_pad, 0);
3213 fputc ('}', out);
3214 if (lp->next_lp)
3215 fputc (',', out);
3216 }
3217 }
3218 else
3219 {
3220 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3221 {
3222 fprintf (out, "{%i,", lp->index);
3223 if (lp->landing_pad)
3224 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3225 NOTE_P (lp->landing_pad) ? "(del)" : "");
3226 else
3227 fprintf (out, "(nil),");
3228 if (lp->post_landing_pad)
3229 {
3230 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3231 fprintf (out, "%i%s}", INSN_UID (lab),
3232 NOTE_P (lab) ? "(del)" : "");
3233 }
3234 else
3235 fprintf (out, "(nil)}");
3236 if (lp->next_lp)
3237 fputc (',', out);
3238 }
3239 }
3240 }
3241
3242 switch (i->type)
3243 {
3244 case ERT_CLEANUP:
3245 case ERT_MUST_NOT_THROW:
3246 break;
3247
3248 case ERT_TRY:
3249 {
3250 eh_catch c;
3251 fprintf (out, " catch:");
3252 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3253 {
3254 fputc ('{', out);
3255 if (c->label)
3256 {
3257 fprintf (out, "lab:");
3258 print_generic_expr (out, c->label, 0);
3259 fputc (';', out);
3260 }
3261 print_generic_expr (out, c->type_list, 0);
3262 fputc ('}', out);
3263 if (c->next_catch)
3264 fputc (',', out);
3265 }
3266 }
3267 break;
3268
3269 case ERT_ALLOWED_EXCEPTIONS:
3270 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3271 print_generic_expr (out, i->u.allowed.type_list, 0);
3272 break;
3273 }
3274 fputc ('\n', out);
3275
3276 /* If there are sub-regions, process them. */
3277 if (i->inner)
3278 i = i->inner, depth++;
3279 /* If there are peers, process them. */
3280 else if (i->next_peer)
3281 i = i->next_peer;
3282 /* Otherwise, step back up the tree to the next peer. */
3283 else
3284 {
3285 do
3286 {
3287 i = i->outer;
3288 depth--;
3289 if (i == NULL)
3290 return;
3291 }
3292 while (i->next_peer == NULL);
3293 i = i->next_peer;
3294 }
3295 }
3296 }
3297
3298 /* Dump the EH tree for FN on stderr. */
3299
3300 DEBUG_FUNCTION void
3301 debug_eh_tree (struct function *fn)
3302 {
3303 dump_eh_tree (stderr, fn);
3304 }
3305
3306 /* Verify invariants on EH datastructures. */
3307
3308 DEBUG_FUNCTION void
3309 verify_eh_tree (struct function *fun)
3310 {
3311 eh_region r, outer;
3312 int nvisited_lp, nvisited_r;
3313 int count_lp, count_r, depth, i;
3314 eh_landing_pad lp;
3315 bool err = false;
3316
3317 if (!fun->eh->region_tree)
3318 return;
3319
3320 count_r = 0;
3321 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3322 if (r)
3323 {
3324 if (r->index == i)
3325 count_r++;
3326 else
3327 {
3328 error ("region_array is corrupted for region %i", r->index);
3329 err = true;
3330 }
3331 }
3332
3333 count_lp = 0;
3334 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3335 if (lp)
3336 {
3337 if (lp->index == i)
3338 count_lp++;
3339 else
3340 {
3341 error ("lp_array is corrupted for lp %i", lp->index);
3342 err = true;
3343 }
3344 }
3345
3346 depth = nvisited_lp = nvisited_r = 0;
3347 outer = NULL;
3348 r = fun->eh->region_tree;
3349 while (1)
3350 {
3351 if ((*fun->eh->region_array)[r->index] != r)
3352 {
3353 error ("region_array is corrupted for region %i", r->index);
3354 err = true;
3355 }
3356 if (r->outer != outer)
3357 {
3358 error ("outer block of region %i is wrong", r->index);
3359 err = true;
3360 }
3361 if (depth < 0)
3362 {
3363 error ("negative nesting depth of region %i", r->index);
3364 err = true;
3365 }
3366 nvisited_r++;
3367
3368 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3369 {
3370 if ((*fun->eh->lp_array)[lp->index] != lp)
3371 {
3372 error ("lp_array is corrupted for lp %i", lp->index);
3373 err = true;
3374 }
3375 if (lp->region != r)
3376 {
3377 error ("region of lp %i is wrong", lp->index);
3378 err = true;
3379 }
3380 nvisited_lp++;
3381 }
3382
3383 if (r->inner)
3384 outer = r, r = r->inner, depth++;
3385 else if (r->next_peer)
3386 r = r->next_peer;
3387 else
3388 {
3389 do
3390 {
3391 r = r->outer;
3392 if (r == NULL)
3393 goto region_done;
3394 depth--;
3395 outer = r->outer;
3396 }
3397 while (r->next_peer == NULL);
3398 r = r->next_peer;
3399 }
3400 }
3401 region_done:
3402 if (depth != 0)
3403 {
3404 error ("tree list ends on depth %i", depth);
3405 err = true;
3406 }
3407 if (count_r != nvisited_r)
3408 {
3409 error ("region_array does not match region_tree");
3410 err = true;
3411 }
3412 if (count_lp != nvisited_lp)
3413 {
3414 error ("lp_array does not match region_tree");
3415 err = true;
3416 }
3417
3418 if (err)
3419 {
3420 dump_eh_tree (stderr, fun);
3421 internal_error ("verify_eh_tree failed");
3422 }
3423 }
3424 \f
3425 #include "gt-except.h"