]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/except.c
alias.c: Reorder #include statements and remove duplicates.
[thirdparty/gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "backend.h"
116 #include "target.h"
117 #include "rtl.h"
118 #include "tree.h"
119 #include "cfghooks.h"
120 #include "tree-pass.h"
121 #include "tm_p.h"
122 #include "stringpool.h"
123 #include "expmed.h"
124 #include "optabs.h"
125 #include "emit-rtl.h"
126 #include "cgraph.h"
127 #include "diagnostic.h"
128 #include "alias.h"
129 #include "fold-const.h"
130 #include "stor-layout.h"
131 #include "flags.h"
132 #include "dojump.h"
133 #include "explow.h"
134 #include "calls.h"
135 #include "varasm.h"
136 #include "stmt.h"
137 #include "expr.h"
138 #include "libfuncs.h"
139 #include "except.h"
140 #include "output.h"
141 #include "dwarf2asm.h"
142 #include "dwarf2out.h"
143 #include "toplev.h"
144 #include "intl.h"
145 #include "common/common-target.h"
146 #include "langhooks.h"
147 #include "cfgrtl.h"
148 #include "tree-pretty-print.h"
149 #include "cfgloop.h"
150 #include "builtins.h"
151 #include "tree-hash-traits.h"
152
153 static GTY(()) int call_site_base;
154
155 static GTY (()) hash_map<tree_hash, tree> *type_to_runtime_map;
156
157 /* Describe the SjLj_Function_Context structure. */
158 static GTY(()) tree sjlj_fc_type_node;
159 static int sjlj_fc_call_site_ofs;
160 static int sjlj_fc_data_ofs;
161 static int sjlj_fc_personality_ofs;
162 static int sjlj_fc_lsda_ofs;
163 static int sjlj_fc_jbuf_ofs;
164 \f
165
166 struct GTY(()) call_site_record_d
167 {
168 rtx landing_pad;
169 int action;
170 };
171
172 /* In the following structure and associated functions,
173 we represent entries in the action table as 1-based indices.
174 Special cases are:
175
176 0: null action record, non-null landing pad; implies cleanups
177 -1: null action record, null landing pad; implies no action
178 -2: no call-site entry; implies must_not_throw
179 -3: we have yet to process outer regions
180
181 Further, no special cases apply to the "next" field of the record.
182 For next, 0 means end of list. */
183
184 struct action_record
185 {
186 int offset;
187 int filter;
188 int next;
189 };
190
191 /* Hashtable helpers. */
192
193 struct action_record_hasher : free_ptr_hash <action_record>
194 {
195 static inline hashval_t hash (const action_record *);
196 static inline bool equal (const action_record *, const action_record *);
197 };
198
199 inline hashval_t
200 action_record_hasher::hash (const action_record *entry)
201 {
202 return entry->next * 1009 + entry->filter;
203 }
204
205 inline bool
206 action_record_hasher::equal (const action_record *entry,
207 const action_record *data)
208 {
209 return entry->filter == data->filter && entry->next == data->next;
210 }
211
212 typedef hash_table<action_record_hasher> action_hash_type;
213 \f
214 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
215 eh_landing_pad *);
216
217 static void dw2_build_landing_pads (void);
218
219 static int collect_one_action_chain (action_hash_type *, eh_region);
220 static int add_call_site (rtx, int, int);
221
222 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
223 static void push_sleb128 (vec<uchar, va_gc> **, int);
224 #ifndef HAVE_AS_LEB128
225 static int dw2_size_of_call_site_table (int);
226 static int sjlj_size_of_call_site_table (void);
227 #endif
228 static void dw2_output_call_site_table (int, int);
229 static void sjlj_output_call_site_table (void);
230
231 \f
232 void
233 init_eh (void)
234 {
235 if (! flag_exceptions)
236 return;
237
238 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
239
240 /* Create the SjLj_Function_Context structure. This should match
241 the definition in unwind-sjlj.c. */
242 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
243 {
244 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
245
246 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
247
248 f_prev = build_decl (BUILTINS_LOCATION,
249 FIELD_DECL, get_identifier ("__prev"),
250 build_pointer_type (sjlj_fc_type_node));
251 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
252
253 f_cs = build_decl (BUILTINS_LOCATION,
254 FIELD_DECL, get_identifier ("__call_site"),
255 integer_type_node);
256 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
257
258 tmp = build_index_type (size_int (4 - 1));
259 tmp = build_array_type (lang_hooks.types.type_for_mode
260 (targetm.unwind_word_mode (), 1),
261 tmp);
262 f_data = build_decl (BUILTINS_LOCATION,
263 FIELD_DECL, get_identifier ("__data"), tmp);
264 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
265
266 f_per = build_decl (BUILTINS_LOCATION,
267 FIELD_DECL, get_identifier ("__personality"),
268 ptr_type_node);
269 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
270
271 f_lsda = build_decl (BUILTINS_LOCATION,
272 FIELD_DECL, get_identifier ("__lsda"),
273 ptr_type_node);
274 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
275
276 #ifdef DONT_USE_BUILTIN_SETJMP
277 #ifdef JMP_BUF_SIZE
278 tmp = size_int (JMP_BUF_SIZE - 1);
279 #else
280 /* Should be large enough for most systems, if it is not,
281 JMP_BUF_SIZE should be defined with the proper value. It will
282 also tend to be larger than necessary for most systems, a more
283 optimal port will define JMP_BUF_SIZE. */
284 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
285 #endif
286 #else
287 /* Compute a minimally sized jump buffer. We need room to store at
288 least 3 pointers - stack pointer, frame pointer and return address.
289 Plus for some targets we need room for an extra pointer - in the
290 case of MIPS this is the global pointer. This makes a total of four
291 pointers, but to be safe we actually allocate room for 5.
292
293 If pointers are smaller than words then we allocate enough room for
294 5 words, just in case the backend needs this much room. For more
295 discussion on this issue see:
296 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
297 if (POINTER_SIZE > BITS_PER_WORD)
298 tmp = size_int (5 - 1);
299 else
300 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
301 #endif
302
303 tmp = build_index_type (tmp);
304 tmp = build_array_type (ptr_type_node, tmp);
305 f_jbuf = build_decl (BUILTINS_LOCATION,
306 FIELD_DECL, get_identifier ("__jbuf"), tmp);
307 #ifdef DONT_USE_BUILTIN_SETJMP
308 /* We don't know what the alignment requirements of the
309 runtime's jmp_buf has. Overestimate. */
310 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
311 DECL_USER_ALIGN (f_jbuf) = 1;
312 #endif
313 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
314
315 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
316 TREE_CHAIN (f_prev) = f_cs;
317 TREE_CHAIN (f_cs) = f_data;
318 TREE_CHAIN (f_data) = f_per;
319 TREE_CHAIN (f_per) = f_lsda;
320 TREE_CHAIN (f_lsda) = f_jbuf;
321
322 layout_type (sjlj_fc_type_node);
323
324 /* Cache the interesting field offsets so that we have
325 easy access from rtl. */
326 sjlj_fc_call_site_ofs
327 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
328 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
329 sjlj_fc_data_ofs
330 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
331 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
332 sjlj_fc_personality_ofs
333 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
334 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
335 sjlj_fc_lsda_ofs
336 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
337 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
338 sjlj_fc_jbuf_ofs
339 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
340 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
341 }
342 }
343
344 void
345 init_eh_for_function (void)
346 {
347 cfun->eh = ggc_cleared_alloc<eh_status> ();
348
349 /* Make sure zero'th entries are used. */
350 vec_safe_push (cfun->eh->region_array, (eh_region)0);
351 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
352 }
353 \f
354 /* Routines to generate the exception tree somewhat directly.
355 These are used from tree-eh.c when processing exception related
356 nodes during tree optimization. */
357
358 static eh_region
359 gen_eh_region (enum eh_region_type type, eh_region outer)
360 {
361 eh_region new_eh;
362
363 /* Insert a new blank region as a leaf in the tree. */
364 new_eh = ggc_cleared_alloc<eh_region_d> ();
365 new_eh->type = type;
366 new_eh->outer = outer;
367 if (outer)
368 {
369 new_eh->next_peer = outer->inner;
370 outer->inner = new_eh;
371 }
372 else
373 {
374 new_eh->next_peer = cfun->eh->region_tree;
375 cfun->eh->region_tree = new_eh;
376 }
377
378 new_eh->index = vec_safe_length (cfun->eh->region_array);
379 vec_safe_push (cfun->eh->region_array, new_eh);
380
381 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
382 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
383 new_eh->use_cxa_end_cleanup = true;
384
385 return new_eh;
386 }
387
388 eh_region
389 gen_eh_region_cleanup (eh_region outer)
390 {
391 return gen_eh_region (ERT_CLEANUP, outer);
392 }
393
394 eh_region
395 gen_eh_region_try (eh_region outer)
396 {
397 return gen_eh_region (ERT_TRY, outer);
398 }
399
400 eh_catch
401 gen_eh_region_catch (eh_region t, tree type_or_list)
402 {
403 eh_catch c, l;
404 tree type_list, type_node;
405
406 gcc_assert (t->type == ERT_TRY);
407
408 /* Ensure to always end up with a type list to normalize further
409 processing, then register each type against the runtime types map. */
410 type_list = type_or_list;
411 if (type_or_list)
412 {
413 if (TREE_CODE (type_or_list) != TREE_LIST)
414 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
415
416 type_node = type_list;
417 for (; type_node; type_node = TREE_CHAIN (type_node))
418 add_type_for_runtime (TREE_VALUE (type_node));
419 }
420
421 c = ggc_cleared_alloc<eh_catch_d> ();
422 c->type_list = type_list;
423 l = t->u.eh_try.last_catch;
424 c->prev_catch = l;
425 if (l)
426 l->next_catch = c;
427 else
428 t->u.eh_try.first_catch = c;
429 t->u.eh_try.last_catch = c;
430
431 return c;
432 }
433
434 eh_region
435 gen_eh_region_allowed (eh_region outer, tree allowed)
436 {
437 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
438 region->u.allowed.type_list = allowed;
439
440 for (; allowed ; allowed = TREE_CHAIN (allowed))
441 add_type_for_runtime (TREE_VALUE (allowed));
442
443 return region;
444 }
445
446 eh_region
447 gen_eh_region_must_not_throw (eh_region outer)
448 {
449 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
450 }
451
452 eh_landing_pad
453 gen_eh_landing_pad (eh_region region)
454 {
455 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
456
457 lp->next_lp = region->landing_pads;
458 lp->region = region;
459 lp->index = vec_safe_length (cfun->eh->lp_array);
460 region->landing_pads = lp;
461
462 vec_safe_push (cfun->eh->lp_array, lp);
463
464 return lp;
465 }
466
467 eh_region
468 get_eh_region_from_number_fn (struct function *ifun, int i)
469 {
470 return (*ifun->eh->region_array)[i];
471 }
472
473 eh_region
474 get_eh_region_from_number (int i)
475 {
476 return get_eh_region_from_number_fn (cfun, i);
477 }
478
479 eh_landing_pad
480 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
481 {
482 return (*ifun->eh->lp_array)[i];
483 }
484
485 eh_landing_pad
486 get_eh_landing_pad_from_number (int i)
487 {
488 return get_eh_landing_pad_from_number_fn (cfun, i);
489 }
490
491 eh_region
492 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
493 {
494 if (i < 0)
495 return (*ifun->eh->region_array)[-i];
496 else if (i == 0)
497 return NULL;
498 else
499 {
500 eh_landing_pad lp;
501 lp = (*ifun->eh->lp_array)[i];
502 return lp->region;
503 }
504 }
505
506 eh_region
507 get_eh_region_from_lp_number (int i)
508 {
509 return get_eh_region_from_lp_number_fn (cfun, i);
510 }
511 \f
512 /* Returns true if the current function has exception handling regions. */
513
514 bool
515 current_function_has_exception_handlers (void)
516 {
517 return cfun->eh->region_tree != NULL;
518 }
519 \f
520 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
521 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
522
523 struct duplicate_eh_regions_data
524 {
525 duplicate_eh_regions_map label_map;
526 void *label_map_data;
527 hash_map<void *, void *> *eh_map;
528 };
529
530 static void
531 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
532 eh_region old_r, eh_region outer)
533 {
534 eh_landing_pad old_lp, new_lp;
535 eh_region new_r;
536
537 new_r = gen_eh_region (old_r->type, outer);
538 gcc_assert (!data->eh_map->put (old_r, new_r));
539
540 switch (old_r->type)
541 {
542 case ERT_CLEANUP:
543 break;
544
545 case ERT_TRY:
546 {
547 eh_catch oc, nc;
548 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
549 {
550 /* We should be doing all our region duplication before and
551 during inlining, which is before filter lists are created. */
552 gcc_assert (oc->filter_list == NULL);
553 nc = gen_eh_region_catch (new_r, oc->type_list);
554 nc->label = data->label_map (oc->label, data->label_map_data);
555 }
556 }
557 break;
558
559 case ERT_ALLOWED_EXCEPTIONS:
560 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
561 if (old_r->u.allowed.label)
562 new_r->u.allowed.label
563 = data->label_map (old_r->u.allowed.label, data->label_map_data);
564 else
565 new_r->u.allowed.label = NULL_TREE;
566 break;
567
568 case ERT_MUST_NOT_THROW:
569 new_r->u.must_not_throw.failure_loc =
570 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
571 new_r->u.must_not_throw.failure_decl =
572 old_r->u.must_not_throw.failure_decl;
573 break;
574 }
575
576 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
577 {
578 /* Don't bother copying unused landing pads. */
579 if (old_lp->post_landing_pad == NULL)
580 continue;
581
582 new_lp = gen_eh_landing_pad (new_r);
583 gcc_assert (!data->eh_map->put (old_lp, new_lp));
584
585 new_lp->post_landing_pad
586 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
587 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
588 }
589
590 /* Make sure to preserve the original use of __cxa_end_cleanup. */
591 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
592
593 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
594 duplicate_eh_regions_1 (data, old_r, new_r);
595 }
596
597 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
598 the current function and root the tree below OUTER_REGION.
599 The special case of COPY_REGION of NULL means all regions.
600 Remap labels using MAP/MAP_DATA callback. Return a pointer map
601 that allows the caller to remap uses of both EH regions and
602 EH landing pads. */
603
604 hash_map<void *, void *> *
605 duplicate_eh_regions (struct function *ifun,
606 eh_region copy_region, int outer_lp,
607 duplicate_eh_regions_map map, void *map_data)
608 {
609 struct duplicate_eh_regions_data data;
610 eh_region outer_region;
611
612 if (flag_checking)
613 verify_eh_tree (ifun);
614
615 data.label_map = map;
616 data.label_map_data = map_data;
617 data.eh_map = new hash_map<void *, void *>;
618
619 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
620
621 /* Copy all the regions in the subtree. */
622 if (copy_region)
623 duplicate_eh_regions_1 (&data, copy_region, outer_region);
624 else
625 {
626 eh_region r;
627 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
628 duplicate_eh_regions_1 (&data, r, outer_region);
629 }
630
631 if (flag_checking)
632 verify_eh_tree (cfun);
633
634 return data.eh_map;
635 }
636
637 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
638
639 eh_region
640 eh_region_outermost (struct function *ifun, eh_region region_a,
641 eh_region region_b)
642 {
643 sbitmap b_outer;
644
645 gcc_assert (ifun->eh->region_array);
646 gcc_assert (ifun->eh->region_tree);
647
648 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
649 bitmap_clear (b_outer);
650
651 do
652 {
653 bitmap_set_bit (b_outer, region_b->index);
654 region_b = region_b->outer;
655 }
656 while (region_b);
657
658 do
659 {
660 if (bitmap_bit_p (b_outer, region_a->index))
661 break;
662 region_a = region_a->outer;
663 }
664 while (region_a);
665
666 sbitmap_free (b_outer);
667 return region_a;
668 }
669 \f
670 void
671 add_type_for_runtime (tree type)
672 {
673 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
674 if (TREE_CODE (type) == NOP_EXPR)
675 return;
676
677 bool existed = false;
678 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
679 if (!existed)
680 *slot = lang_hooks.eh_runtime_type (type);
681 }
682
683 tree
684 lookup_type_for_runtime (tree type)
685 {
686 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
687 if (TREE_CODE (type) == NOP_EXPR)
688 return type;
689
690 /* We should have always inserted the data earlier. */
691 return *type_to_runtime_map->get (type);
692 }
693
694 \f
695 /* Represent an entry in @TTypes for either catch actions
696 or exception filter actions. */
697 struct ttypes_filter {
698 tree t;
699 int filter;
700 };
701
702 /* Helper for ttypes_filter hashing. */
703
704 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
705 {
706 typedef tree_node *compare_type;
707 static inline hashval_t hash (const ttypes_filter *);
708 static inline bool equal (const ttypes_filter *, const tree_node *);
709 };
710
711 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
712 (a tree) for a @TTypes type node we are thinking about adding. */
713
714 inline bool
715 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
716 {
717 return entry->t == data;
718 }
719
720 inline hashval_t
721 ttypes_filter_hasher::hash (const ttypes_filter *entry)
722 {
723 return TREE_HASH (entry->t);
724 }
725
726 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
727
728
729 /* Helper for ehspec hashing. */
730
731 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
732 {
733 static inline hashval_t hash (const ttypes_filter *);
734 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
735 };
736
737 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
738 exception specification list we are thinking about adding. */
739 /* ??? Currently we use the type lists in the order given. Someone
740 should put these in some canonical order. */
741
742 inline bool
743 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
744 {
745 return type_list_equal (entry->t, data->t);
746 }
747
748 /* Hash function for exception specification lists. */
749
750 inline hashval_t
751 ehspec_hasher::hash (const ttypes_filter *entry)
752 {
753 hashval_t h = 0;
754 tree list;
755
756 for (list = entry->t; list ; list = TREE_CHAIN (list))
757 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
758 return h;
759 }
760
761 typedef hash_table<ehspec_hasher> ehspec_hash_type;
762
763
764 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
765 to speed up the search. Return the filter value to be used. */
766
767 static int
768 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
769 {
770 struct ttypes_filter **slot, *n;
771
772 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
773 INSERT);
774
775 if ((n = *slot) == NULL)
776 {
777 /* Filter value is a 1 based table index. */
778
779 n = XNEW (struct ttypes_filter);
780 n->t = type;
781 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
782 *slot = n;
783
784 vec_safe_push (cfun->eh->ttype_data, type);
785 }
786
787 return n->filter;
788 }
789
790 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
791 to speed up the search. Return the filter value to be used. */
792
793 static int
794 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
795 tree list)
796 {
797 struct ttypes_filter **slot, *n;
798 struct ttypes_filter dummy;
799
800 dummy.t = list;
801 slot = ehspec_hash->find_slot (&dummy, INSERT);
802
803 if ((n = *slot) == NULL)
804 {
805 int len;
806
807 if (targetm.arm_eabi_unwinder)
808 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
809 else
810 len = vec_safe_length (cfun->eh->ehspec_data.other);
811
812 /* Filter value is a -1 based byte index into a uleb128 buffer. */
813
814 n = XNEW (struct ttypes_filter);
815 n->t = list;
816 n->filter = -(len + 1);
817 *slot = n;
818
819 /* Generate a 0 terminated list of filter values. */
820 for (; list ; list = TREE_CHAIN (list))
821 {
822 if (targetm.arm_eabi_unwinder)
823 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
824 else
825 {
826 /* Look up each type in the list and encode its filter
827 value as a uleb128. */
828 push_uleb128 (&cfun->eh->ehspec_data.other,
829 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
830 }
831 }
832 if (targetm.arm_eabi_unwinder)
833 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
834 else
835 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
836 }
837
838 return n->filter;
839 }
840
841 /* Generate the action filter values to be used for CATCH and
842 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
843 we use lots of landing pads, and so every type or list can share
844 the same filter value, which saves table space. */
845
846 void
847 assign_filter_values (void)
848 {
849 int i;
850 eh_region r;
851 eh_catch c;
852
853 vec_alloc (cfun->eh->ttype_data, 16);
854 if (targetm.arm_eabi_unwinder)
855 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
856 else
857 vec_alloc (cfun->eh->ehspec_data.other, 64);
858
859 ehspec_hash_type ehspec (31);
860 ttypes_hash_type ttypes (31);
861
862 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
863 {
864 if (r == NULL)
865 continue;
866
867 switch (r->type)
868 {
869 case ERT_TRY:
870 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
871 {
872 /* Whatever type_list is (NULL or true list), we build a list
873 of filters for the region. */
874 c->filter_list = NULL_TREE;
875
876 if (c->type_list != NULL)
877 {
878 /* Get a filter value for each of the types caught and store
879 them in the region's dedicated list. */
880 tree tp_node = c->type_list;
881
882 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
883 {
884 int flt
885 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
886 tree flt_node = build_int_cst (integer_type_node, flt);
887
888 c->filter_list
889 = tree_cons (NULL_TREE, flt_node, c->filter_list);
890 }
891 }
892 else
893 {
894 /* Get a filter value for the NULL list also since it
895 will need an action record anyway. */
896 int flt = add_ttypes_entry (&ttypes, NULL);
897 tree flt_node = build_int_cst (integer_type_node, flt);
898
899 c->filter_list
900 = tree_cons (NULL_TREE, flt_node, NULL);
901 }
902 }
903 break;
904
905 case ERT_ALLOWED_EXCEPTIONS:
906 r->u.allowed.filter
907 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
908 break;
909
910 default:
911 break;
912 }
913 }
914 }
915
916 /* Emit SEQ into basic block just before INSN (that is assumed to be
917 first instruction of some existing BB and return the newly
918 produced block. */
919 static basic_block
920 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
921 {
922 rtx_insn *last;
923 basic_block bb;
924 edge e;
925 edge_iterator ei;
926
927 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
928 call), we don't want it to go into newly created landing pad or other EH
929 construct. */
930 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
931 if (e->flags & EDGE_FALLTHRU)
932 force_nonfallthru (e);
933 else
934 ei_next (&ei);
935 last = emit_insn_before (seq, insn);
936 if (BARRIER_P (last))
937 last = PREV_INSN (last);
938 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
939 update_bb_for_insn (bb);
940 bb->flags |= BB_SUPERBLOCK;
941 return bb;
942 }
943 \f
944 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
945 at the rtl level. Emit the code required by the target at a landing
946 pad for the given region. */
947
948 void
949 expand_dw2_landing_pad_for_region (eh_region region)
950 {
951 if (targetm.have_exception_receiver ())
952 emit_insn (targetm.gen_exception_receiver ());
953 else if (targetm.have_nonlocal_goto_receiver ())
954 emit_insn (targetm.gen_nonlocal_goto_receiver ());
955 else
956 { /* Nothing */ }
957
958 if (region->exc_ptr_reg)
959 emit_move_insn (region->exc_ptr_reg,
960 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
961 if (region->filter_reg)
962 emit_move_insn (region->filter_reg,
963 gen_rtx_REG (targetm.eh_return_filter_mode (),
964 EH_RETURN_DATA_REGNO (1)));
965 }
966
967 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
968
969 static void
970 dw2_build_landing_pads (void)
971 {
972 int i;
973 eh_landing_pad lp;
974 int e_flags = EDGE_FALLTHRU;
975
976 /* If we're going to partition blocks, we need to be able to add
977 new landing pads later, which means that we need to hold on to
978 the post-landing-pad block. Prevent it from being merged away.
979 We'll remove this bit after partitioning. */
980 if (flag_reorder_blocks_and_partition)
981 e_flags |= EDGE_PRESERVE;
982
983 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
984 {
985 basic_block bb;
986 rtx_insn *seq;
987 edge e;
988
989 if (lp == NULL || lp->post_landing_pad == NULL)
990 continue;
991
992 start_sequence ();
993
994 lp->landing_pad = gen_label_rtx ();
995 emit_label (lp->landing_pad);
996 LABEL_PRESERVE_P (lp->landing_pad) = 1;
997
998 expand_dw2_landing_pad_for_region (lp->region);
999
1000 seq = get_insns ();
1001 end_sequence ();
1002
1003 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1004 e = make_edge (bb, bb->next_bb, e_flags);
1005 e->count = bb->count;
1006 e->probability = REG_BR_PROB_BASE;
1007 if (current_loops)
1008 {
1009 struct loop *loop = bb->next_bb->loop_father;
1010 /* If we created a pre-header block, add the new block to the
1011 outer loop, otherwise to the loop itself. */
1012 if (bb->next_bb == loop->header)
1013 add_bb_to_loop (bb, loop_outer (loop));
1014 else
1015 add_bb_to_loop (bb, loop);
1016 }
1017 }
1018 }
1019
1020 \f
1021 static vec<int> sjlj_lp_call_site_index;
1022
1023 /* Process all active landing pads. Assign each one a compact dispatch
1024 index, and a call-site index. */
1025
1026 static int
1027 sjlj_assign_call_site_values (void)
1028 {
1029 action_hash_type ar_hash (31);
1030 int i, disp_index;
1031 eh_landing_pad lp;
1032
1033 vec_alloc (crtl->eh.action_record_data, 64);
1034
1035 disp_index = 0;
1036 call_site_base = 1;
1037 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1038 if (lp && lp->post_landing_pad)
1039 {
1040 int action, call_site;
1041
1042 /* First: build the action table. */
1043 action = collect_one_action_chain (&ar_hash, lp->region);
1044
1045 /* Next: assign call-site values. If dwarf2 terms, this would be
1046 the region number assigned by convert_to_eh_region_ranges, but
1047 handles no-action and must-not-throw differently. */
1048 /* Map must-not-throw to otherwise unused call-site index 0. */
1049 if (action == -2)
1050 call_site = 0;
1051 /* Map no-action to otherwise unused call-site index -1. */
1052 else if (action == -1)
1053 call_site = -1;
1054 /* Otherwise, look it up in the table. */
1055 else
1056 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1057 sjlj_lp_call_site_index[i] = call_site;
1058
1059 disp_index++;
1060 }
1061
1062 return disp_index;
1063 }
1064
1065 /* Emit code to record the current call-site index before every
1066 insn that can throw. */
1067
1068 static void
1069 sjlj_mark_call_sites (void)
1070 {
1071 int last_call_site = -2;
1072 rtx_insn *insn;
1073 rtx mem;
1074
1075 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1076 {
1077 eh_landing_pad lp;
1078 eh_region r;
1079 bool nothrow;
1080 int this_call_site;
1081 rtx_insn *before, *p;
1082
1083 /* Reset value tracking at extended basic block boundaries. */
1084 if (LABEL_P (insn))
1085 last_call_site = -2;
1086
1087 /* If the function allocates dynamic stack space, the context must
1088 be updated after every allocation/deallocation accordingly. */
1089 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1090 {
1091 rtx buf_addr;
1092
1093 start_sequence ();
1094 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1095 sjlj_fc_jbuf_ofs);
1096 expand_builtin_update_setjmp_buf (buf_addr);
1097 p = get_insns ();
1098 end_sequence ();
1099 emit_insn_before (p, insn);
1100 }
1101
1102 if (! INSN_P (insn))
1103 continue;
1104
1105 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1106 if (nothrow)
1107 continue;
1108 if (lp)
1109 this_call_site = sjlj_lp_call_site_index[lp->index];
1110 else if (r == NULL)
1111 {
1112 /* Calls (and trapping insns) without notes are outside any
1113 exception handling region in this function. Mark them as
1114 no action. */
1115 this_call_site = -1;
1116 }
1117 else
1118 {
1119 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1120 this_call_site = 0;
1121 }
1122
1123 if (this_call_site != -1)
1124 crtl->uses_eh_lsda = 1;
1125
1126 if (this_call_site == last_call_site)
1127 continue;
1128
1129 /* Don't separate a call from it's argument loads. */
1130 before = insn;
1131 if (CALL_P (insn))
1132 before = find_first_parameter_load (insn, NULL);
1133
1134 start_sequence ();
1135 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1136 sjlj_fc_call_site_ofs);
1137 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1138 p = get_insns ();
1139 end_sequence ();
1140
1141 emit_insn_before (p, before);
1142 last_call_site = this_call_site;
1143 }
1144 }
1145
1146 /* Construct the SjLj_Function_Context. */
1147
1148 static void
1149 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1150 {
1151 rtx_insn *fn_begin, *seq;
1152 rtx fc, mem;
1153 bool fn_begin_outside_block;
1154 rtx personality = get_personality_function (current_function_decl);
1155
1156 fc = crtl->eh.sjlj_fc;
1157
1158 start_sequence ();
1159
1160 /* We're storing this libcall's address into memory instead of
1161 calling it directly. Thus, we must call assemble_external_libcall
1162 here, as we can not depend on emit_library_call to do it for us. */
1163 assemble_external_libcall (personality);
1164 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1165 emit_move_insn (mem, personality);
1166
1167 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1168 if (crtl->uses_eh_lsda)
1169 {
1170 char buf[20];
1171 rtx sym;
1172
1173 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1174 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1175 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1176 emit_move_insn (mem, sym);
1177 }
1178 else
1179 emit_move_insn (mem, const0_rtx);
1180
1181 if (dispatch_label)
1182 {
1183 #ifdef DONT_USE_BUILTIN_SETJMP
1184 rtx x;
1185 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1186 TYPE_MODE (integer_type_node), 1,
1187 plus_constant (Pmode, XEXP (fc, 0),
1188 sjlj_fc_jbuf_ofs), Pmode);
1189
1190 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1191 TYPE_MODE (integer_type_node), 0,
1192 dispatch_label, REG_BR_PROB_BASE / 100);
1193 #else
1194 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1195 sjlj_fc_jbuf_ofs),
1196 dispatch_label);
1197 #endif
1198 }
1199
1200 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1201 1, XEXP (fc, 0), Pmode);
1202
1203 seq = get_insns ();
1204 end_sequence ();
1205
1206 /* ??? Instead of doing this at the beginning of the function,
1207 do this in a block that is at loop level 0 and dominates all
1208 can_throw_internal instructions. */
1209
1210 fn_begin_outside_block = true;
1211 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1212 if (NOTE_P (fn_begin))
1213 {
1214 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1215 break;
1216 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1217 fn_begin_outside_block = false;
1218 }
1219
1220 if (fn_begin_outside_block)
1221 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1222 else
1223 emit_insn_after (seq, fn_begin);
1224 }
1225
1226 /* Call back from expand_function_end to know where we should put
1227 the call to unwind_sjlj_unregister_libfunc if needed. */
1228
1229 void
1230 sjlj_emit_function_exit_after (rtx_insn *after)
1231 {
1232 crtl->eh.sjlj_exit_after = after;
1233 }
1234
1235 static void
1236 sjlj_emit_function_exit (void)
1237 {
1238 rtx_insn *seq, *insn;
1239
1240 start_sequence ();
1241
1242 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1243 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1244
1245 seq = get_insns ();
1246 end_sequence ();
1247
1248 /* ??? Really this can be done in any block at loop level 0 that
1249 post-dominates all can_throw_internal instructions. This is
1250 the last possible moment. */
1251
1252 insn = crtl->eh.sjlj_exit_after;
1253 if (LABEL_P (insn))
1254 insn = NEXT_INSN (insn);
1255
1256 emit_insn_after (seq, insn);
1257 }
1258
1259 static void
1260 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1261 {
1262 machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1263 machine_mode filter_mode = targetm.eh_return_filter_mode ();
1264 eh_landing_pad lp;
1265 rtx mem, fc, exc_ptr_reg, filter_reg;
1266 rtx_insn *seq;
1267 basic_block bb;
1268 eh_region r;
1269 edge e;
1270 int i, disp_index;
1271 vec<tree> dispatch_labels = vNULL;
1272
1273 fc = crtl->eh.sjlj_fc;
1274
1275 start_sequence ();
1276
1277 emit_label (dispatch_label);
1278
1279 #ifndef DONT_USE_BUILTIN_SETJMP
1280 expand_builtin_setjmp_receiver (dispatch_label);
1281
1282 /* The caller of expand_builtin_setjmp_receiver is responsible for
1283 making sure that the label doesn't vanish. The only other caller
1284 is the expander for __builtin_setjmp_receiver, which places this
1285 label on the nonlocal_goto_label list. Since we're modeling these
1286 CFG edges more exactly, we can use the forced_labels list instead. */
1287 LABEL_PRESERVE_P (dispatch_label) = 1;
1288 forced_labels
1289 = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1290 #endif
1291
1292 /* Load up exc_ptr and filter values from the function context. */
1293 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1294 if (unwind_word_mode != ptr_mode)
1295 {
1296 #ifdef POINTERS_EXTEND_UNSIGNED
1297 mem = convert_memory_address (ptr_mode, mem);
1298 #else
1299 mem = convert_to_mode (ptr_mode, mem, 0);
1300 #endif
1301 }
1302 exc_ptr_reg = force_reg (ptr_mode, mem);
1303
1304 mem = adjust_address (fc, unwind_word_mode,
1305 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1306 if (unwind_word_mode != filter_mode)
1307 mem = convert_to_mode (filter_mode, mem, 0);
1308 filter_reg = force_reg (filter_mode, mem);
1309
1310 /* Jump to one of the directly reachable regions. */
1311
1312 disp_index = 0;
1313 rtx_code_label *first_reachable_label = NULL;
1314
1315 /* If there's exactly one call site in the function, don't bother
1316 generating a switch statement. */
1317 if (num_dispatch > 1)
1318 dispatch_labels.create (num_dispatch);
1319
1320 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1321 if (lp && lp->post_landing_pad)
1322 {
1323 rtx_insn *seq2;
1324 rtx_code_label *label;
1325
1326 start_sequence ();
1327
1328 lp->landing_pad = dispatch_label;
1329
1330 if (num_dispatch > 1)
1331 {
1332 tree t_label, case_elt, t;
1333
1334 t_label = create_artificial_label (UNKNOWN_LOCATION);
1335 t = build_int_cst (integer_type_node, disp_index);
1336 case_elt = build_case_label (t, NULL, t_label);
1337 dispatch_labels.quick_push (case_elt);
1338 label = jump_target_rtx (t_label);
1339 }
1340 else
1341 label = gen_label_rtx ();
1342
1343 if (disp_index == 0)
1344 first_reachable_label = label;
1345 emit_label (label);
1346
1347 r = lp->region;
1348 if (r->exc_ptr_reg)
1349 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1350 if (r->filter_reg)
1351 emit_move_insn (r->filter_reg, filter_reg);
1352
1353 seq2 = get_insns ();
1354 end_sequence ();
1355
1356 rtx_insn *before = label_rtx (lp->post_landing_pad);
1357 bb = emit_to_new_bb_before (seq2, before);
1358 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1359 e->count = bb->count;
1360 e->probability = REG_BR_PROB_BASE;
1361 if (current_loops)
1362 {
1363 struct loop *loop = bb->next_bb->loop_father;
1364 /* If we created a pre-header block, add the new block to the
1365 outer loop, otherwise to the loop itself. */
1366 if (bb->next_bb == loop->header)
1367 add_bb_to_loop (bb, loop_outer (loop));
1368 else
1369 add_bb_to_loop (bb, loop);
1370 /* ??? For multiple dispatches we will end up with edges
1371 from the loop tree root into this loop, making it a
1372 multiple-entry loop. Discard all affected loops. */
1373 if (num_dispatch > 1)
1374 {
1375 for (loop = bb->loop_father;
1376 loop_outer (loop); loop = loop_outer (loop))
1377 mark_loop_for_removal (loop);
1378 }
1379 }
1380
1381 disp_index++;
1382 }
1383 gcc_assert (disp_index == num_dispatch);
1384
1385 if (num_dispatch > 1)
1386 {
1387 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1388 sjlj_fc_call_site_ofs);
1389 expand_sjlj_dispatch_table (disp, dispatch_labels);
1390 }
1391
1392 seq = get_insns ();
1393 end_sequence ();
1394
1395 bb = emit_to_new_bb_before (seq, first_reachable_label);
1396 if (num_dispatch == 1)
1397 {
1398 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1399 e->count = bb->count;
1400 e->probability = REG_BR_PROB_BASE;
1401 if (current_loops)
1402 {
1403 struct loop *loop = bb->next_bb->loop_father;
1404 /* If we created a pre-header block, add the new block to the
1405 outer loop, otherwise to the loop itself. */
1406 if (bb->next_bb == loop->header)
1407 add_bb_to_loop (bb, loop_outer (loop));
1408 else
1409 add_bb_to_loop (bb, loop);
1410 }
1411 }
1412 else
1413 {
1414 /* We are not wiring up edges here, but as the dispatcher call
1415 is at function begin simply associate the block with the
1416 outermost (non-)loop. */
1417 if (current_loops)
1418 add_bb_to_loop (bb, current_loops->tree_root);
1419 }
1420 }
1421
1422 static void
1423 sjlj_build_landing_pads (void)
1424 {
1425 int num_dispatch;
1426
1427 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1428 if (num_dispatch == 0)
1429 return;
1430 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1431
1432 num_dispatch = sjlj_assign_call_site_values ();
1433 if (num_dispatch > 0)
1434 {
1435 rtx_code_label *dispatch_label = gen_label_rtx ();
1436 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1437 TYPE_MODE (sjlj_fc_type_node),
1438 TYPE_ALIGN (sjlj_fc_type_node));
1439 crtl->eh.sjlj_fc
1440 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1441 int_size_in_bytes (sjlj_fc_type_node),
1442 align);
1443
1444 sjlj_mark_call_sites ();
1445 sjlj_emit_function_enter (dispatch_label);
1446 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1447 sjlj_emit_function_exit ();
1448 }
1449
1450 /* If we do not have any landing pads, we may still need to register a
1451 personality routine and (empty) LSDA to handle must-not-throw regions. */
1452 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1453 {
1454 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1455 TYPE_MODE (sjlj_fc_type_node),
1456 TYPE_ALIGN (sjlj_fc_type_node));
1457 crtl->eh.sjlj_fc
1458 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1459 int_size_in_bytes (sjlj_fc_type_node),
1460 align);
1461
1462 sjlj_mark_call_sites ();
1463 sjlj_emit_function_enter (NULL);
1464 sjlj_emit_function_exit ();
1465 }
1466
1467 sjlj_lp_call_site_index.release ();
1468 }
1469
1470 /* Update the sjlj function context. This function should be called
1471 whenever we allocate or deallocate dynamic stack space. */
1472
1473 void
1474 update_sjlj_context (void)
1475 {
1476 if (!flag_exceptions)
1477 return;
1478
1479 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1480 }
1481
1482 /* After initial rtl generation, call back to finish generating
1483 exception support code. */
1484
1485 void
1486 finish_eh_generation (void)
1487 {
1488 basic_block bb;
1489
1490 /* Construct the landing pads. */
1491 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1492 sjlj_build_landing_pads ();
1493 else
1494 dw2_build_landing_pads ();
1495 break_superblocks ();
1496
1497 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1498 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1499 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1500 commit_edge_insertions ();
1501
1502 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1503 FOR_EACH_BB_FN (bb, cfun)
1504 {
1505 eh_landing_pad lp;
1506 edge_iterator ei;
1507 edge e;
1508
1509 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1510
1511 FOR_EACH_EDGE (e, ei, bb->succs)
1512 if (e->flags & EDGE_EH)
1513 break;
1514
1515 /* We should not have generated any new throwing insns during this
1516 pass, and we should not have lost any EH edges, so we only need
1517 to handle two cases here:
1518 (1) reachable handler and an existing edge to post-landing-pad,
1519 (2) no reachable handler and no edge. */
1520 gcc_assert ((lp != NULL) == (e != NULL));
1521 if (lp != NULL)
1522 {
1523 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1524
1525 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1526 e->flags |= (CALL_P (BB_END (bb))
1527 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1528 : EDGE_ABNORMAL);
1529 }
1530 }
1531 }
1532 \f
1533 /* This section handles removing dead code for flow. */
1534
1535 void
1536 remove_eh_landing_pad (eh_landing_pad lp)
1537 {
1538 eh_landing_pad *pp;
1539
1540 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1541 continue;
1542 *pp = lp->next_lp;
1543
1544 if (lp->post_landing_pad)
1545 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1546 (*cfun->eh->lp_array)[lp->index] = NULL;
1547 }
1548
1549 /* Splice the EH region at PP from the region tree. */
1550
1551 static void
1552 remove_eh_handler_splicer (eh_region *pp)
1553 {
1554 eh_region region = *pp;
1555 eh_landing_pad lp;
1556
1557 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1558 {
1559 if (lp->post_landing_pad)
1560 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1561 (*cfun->eh->lp_array)[lp->index] = NULL;
1562 }
1563
1564 if (region->inner)
1565 {
1566 eh_region p, outer;
1567 outer = region->outer;
1568
1569 *pp = p = region->inner;
1570 do
1571 {
1572 p->outer = outer;
1573 pp = &p->next_peer;
1574 p = *pp;
1575 }
1576 while (p);
1577 }
1578 *pp = region->next_peer;
1579
1580 (*cfun->eh->region_array)[region->index] = NULL;
1581 }
1582
1583 /* Splice a single EH region REGION from the region tree.
1584
1585 To unlink REGION, we need to find the pointer to it with a relatively
1586 expensive search in REGION's outer region. If you are going to
1587 remove a number of handlers, using remove_unreachable_eh_regions may
1588 be a better option. */
1589
1590 void
1591 remove_eh_handler (eh_region region)
1592 {
1593 eh_region *pp, *pp_start, p, outer;
1594
1595 outer = region->outer;
1596 if (outer)
1597 pp_start = &outer->inner;
1598 else
1599 pp_start = &cfun->eh->region_tree;
1600 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1601 continue;
1602
1603 remove_eh_handler_splicer (pp);
1604 }
1605
1606 /* Worker for remove_unreachable_eh_regions.
1607 PP is a pointer to the region to start a region tree depth-first
1608 search from. R_REACHABLE is the set of regions that have to be
1609 preserved. */
1610
1611 static void
1612 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1613 {
1614 while (*pp)
1615 {
1616 eh_region region = *pp;
1617 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1618 if (!bitmap_bit_p (r_reachable, region->index))
1619 remove_eh_handler_splicer (pp);
1620 else
1621 pp = &region->next_peer;
1622 }
1623 }
1624
1625 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1626 Do this by traversing the EH tree top-down and splice out regions that
1627 are not marked. By removing regions from the leaves, we avoid costly
1628 searches in the region tree. */
1629
1630 void
1631 remove_unreachable_eh_regions (sbitmap r_reachable)
1632 {
1633 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1634 }
1635
1636 /* Invokes CALLBACK for every exception handler landing pad label.
1637 Only used by reload hackery; should not be used by new code. */
1638
1639 void
1640 for_each_eh_label (void (*callback) (rtx))
1641 {
1642 eh_landing_pad lp;
1643 int i;
1644
1645 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1646 {
1647 if (lp)
1648 {
1649 rtx_code_label *lab = lp->landing_pad;
1650 if (lab && LABEL_P (lab))
1651 (*callback) (lab);
1652 }
1653 }
1654 }
1655 \f
1656 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1657 call insn.
1658
1659 At the gimple level, we use LP_NR
1660 > 0 : The statement transfers to landing pad LP_NR
1661 = 0 : The statement is outside any EH region
1662 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1663
1664 At the rtl level, we use LP_NR
1665 > 0 : The insn transfers to landing pad LP_NR
1666 = 0 : The insn cannot throw
1667 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1668 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1669 missing note: The insn is outside any EH region.
1670
1671 ??? This difference probably ought to be avoided. We could stand
1672 to record nothrow for arbitrary gimple statements, and so avoid
1673 some moderately complex lookups in stmt_could_throw_p. Perhaps
1674 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1675 no-nonlocal-goto property should be recorded elsewhere as a bit
1676 on the call_insn directly. Perhaps we should make more use of
1677 attaching the trees to call_insns (reachable via symbol_ref in
1678 direct call cases) and just pull the data out of the trees. */
1679
1680 void
1681 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1682 {
1683 rtx value;
1684 if (ecf_flags & ECF_NOTHROW)
1685 value = const0_rtx;
1686 else if (lp_nr != 0)
1687 value = GEN_INT (lp_nr);
1688 else
1689 return;
1690 add_reg_note (insn, REG_EH_REGION, value);
1691 }
1692
1693 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1694 nor perform a non-local goto. Replace the region note if it
1695 already exists. */
1696
1697 void
1698 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1699 {
1700 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1701 rtx intmin = GEN_INT (INT_MIN);
1702
1703 if (note != 0)
1704 XEXP (note, 0) = intmin;
1705 else
1706 add_reg_note (insn, REG_EH_REGION, intmin);
1707 }
1708
1709 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1710 to the contrary. */
1711
1712 bool
1713 insn_could_throw_p (const_rtx insn)
1714 {
1715 if (!flag_exceptions)
1716 return false;
1717 if (CALL_P (insn))
1718 return true;
1719 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1720 return may_trap_p (PATTERN (insn));
1721 return false;
1722 }
1723
1724 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1725 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1726 to look for a note, or the note itself. */
1727
1728 void
1729 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1730 {
1731 rtx_insn *insn;
1732 rtx note = note_or_insn;
1733
1734 if (INSN_P (note_or_insn))
1735 {
1736 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1737 if (note == NULL)
1738 return;
1739 }
1740 note = XEXP (note, 0);
1741
1742 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1743 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1744 && insn_could_throw_p (insn))
1745 add_reg_note (insn, REG_EH_REGION, note);
1746 }
1747
1748 /* Likewise, but iterate backward. */
1749
1750 void
1751 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1752 {
1753 rtx_insn *insn;
1754 rtx note = note_or_insn;
1755
1756 if (INSN_P (note_or_insn))
1757 {
1758 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1759 if (note == NULL)
1760 return;
1761 }
1762 note = XEXP (note, 0);
1763
1764 for (insn = last; insn != first; insn = PREV_INSN (insn))
1765 if (insn_could_throw_p (insn))
1766 add_reg_note (insn, REG_EH_REGION, note);
1767 }
1768
1769
1770 /* Extract all EH information from INSN. Return true if the insn
1771 was marked NOTHROW. */
1772
1773 static bool
1774 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1775 eh_landing_pad *plp)
1776 {
1777 eh_landing_pad lp = NULL;
1778 eh_region r = NULL;
1779 bool ret = false;
1780 rtx note;
1781 int lp_nr;
1782
1783 if (! INSN_P (insn))
1784 goto egress;
1785
1786 if (NONJUMP_INSN_P (insn)
1787 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1788 insn = XVECEXP (PATTERN (insn), 0, 0);
1789
1790 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1791 if (!note)
1792 {
1793 ret = !insn_could_throw_p (insn);
1794 goto egress;
1795 }
1796
1797 lp_nr = INTVAL (XEXP (note, 0));
1798 if (lp_nr == 0 || lp_nr == INT_MIN)
1799 {
1800 ret = true;
1801 goto egress;
1802 }
1803
1804 if (lp_nr < 0)
1805 r = (*cfun->eh->region_array)[-lp_nr];
1806 else
1807 {
1808 lp = (*cfun->eh->lp_array)[lp_nr];
1809 r = lp->region;
1810 }
1811
1812 egress:
1813 *plp = lp;
1814 *pr = r;
1815 return ret;
1816 }
1817
1818 /* Return the landing pad to which INSN may go, or NULL if it does not
1819 have a reachable landing pad within this function. */
1820
1821 eh_landing_pad
1822 get_eh_landing_pad_from_rtx (const_rtx insn)
1823 {
1824 eh_landing_pad lp;
1825 eh_region r;
1826
1827 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1828 return lp;
1829 }
1830
1831 /* Return the region to which INSN may go, or NULL if it does not
1832 have a reachable region within this function. */
1833
1834 eh_region
1835 get_eh_region_from_rtx (const_rtx insn)
1836 {
1837 eh_landing_pad lp;
1838 eh_region r;
1839
1840 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1841 return r;
1842 }
1843
1844 /* Return true if INSN throws and is caught by something in this function. */
1845
1846 bool
1847 can_throw_internal (const_rtx insn)
1848 {
1849 return get_eh_landing_pad_from_rtx (insn) != NULL;
1850 }
1851
1852 /* Return true if INSN throws and escapes from the current function. */
1853
1854 bool
1855 can_throw_external (const_rtx insn)
1856 {
1857 eh_landing_pad lp;
1858 eh_region r;
1859 bool nothrow;
1860
1861 if (! INSN_P (insn))
1862 return false;
1863
1864 if (NONJUMP_INSN_P (insn)
1865 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1866 {
1867 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1868 int i, n = seq->len ();
1869
1870 for (i = 0; i < n; i++)
1871 if (can_throw_external (seq->element (i)))
1872 return true;
1873
1874 return false;
1875 }
1876
1877 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1878
1879 /* If we can't throw, we obviously can't throw external. */
1880 if (nothrow)
1881 return false;
1882
1883 /* If we have an internal landing pad, then we're not external. */
1884 if (lp != NULL)
1885 return false;
1886
1887 /* If we're not within an EH region, then we are external. */
1888 if (r == NULL)
1889 return true;
1890
1891 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1892 which don't always have landing pads. */
1893 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1894 return false;
1895 }
1896
1897 /* Return true if INSN cannot throw at all. */
1898
1899 bool
1900 insn_nothrow_p (const_rtx insn)
1901 {
1902 eh_landing_pad lp;
1903 eh_region r;
1904
1905 if (! INSN_P (insn))
1906 return true;
1907
1908 if (NONJUMP_INSN_P (insn)
1909 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1910 {
1911 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1912 int i, n = seq->len ();
1913
1914 for (i = 0; i < n; i++)
1915 if (!insn_nothrow_p (seq->element (i)))
1916 return false;
1917
1918 return true;
1919 }
1920
1921 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1922 }
1923
1924 /* Return true if INSN can perform a non-local goto. */
1925 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1926
1927 bool
1928 can_nonlocal_goto (const rtx_insn *insn)
1929 {
1930 if (nonlocal_goto_handler_labels && CALL_P (insn))
1931 {
1932 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1933 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1934 return true;
1935 }
1936 return false;
1937 }
1938 \f
1939 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1940
1941 static unsigned int
1942 set_nothrow_function_flags (void)
1943 {
1944 rtx_insn *insn;
1945
1946 crtl->nothrow = 1;
1947
1948 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1949 something that can throw an exception. We specifically exempt
1950 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1951 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1952 is optimistic. */
1953
1954 crtl->all_throwers_are_sibcalls = 1;
1955
1956 /* If we don't know that this implementation of the function will
1957 actually be used, then we must not set TREE_NOTHROW, since
1958 callers must not assume that this function does not throw. */
1959 if (TREE_NOTHROW (current_function_decl))
1960 return 0;
1961
1962 if (! flag_exceptions)
1963 return 0;
1964
1965 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1966 if (can_throw_external (insn))
1967 {
1968 crtl->nothrow = 0;
1969
1970 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1971 {
1972 crtl->all_throwers_are_sibcalls = 0;
1973 return 0;
1974 }
1975 }
1976
1977 if (crtl->nothrow
1978 && (cgraph_node::get (current_function_decl)->get_availability ()
1979 >= AVAIL_AVAILABLE))
1980 {
1981 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1982 struct cgraph_edge *e;
1983 for (e = node->callers; e; e = e->next_caller)
1984 e->can_throw_external = false;
1985 node->set_nothrow_flag (true);
1986
1987 if (dump_file)
1988 fprintf (dump_file, "Marking function nothrow: %s\n\n",
1989 current_function_name ());
1990 }
1991 return 0;
1992 }
1993
1994 namespace {
1995
1996 const pass_data pass_data_set_nothrow_function_flags =
1997 {
1998 RTL_PASS, /* type */
1999 "nothrow", /* name */
2000 OPTGROUP_NONE, /* optinfo_flags */
2001 TV_NONE, /* tv_id */
2002 0, /* properties_required */
2003 0, /* properties_provided */
2004 0, /* properties_destroyed */
2005 0, /* todo_flags_start */
2006 0, /* todo_flags_finish */
2007 };
2008
2009 class pass_set_nothrow_function_flags : public rtl_opt_pass
2010 {
2011 public:
2012 pass_set_nothrow_function_flags (gcc::context *ctxt)
2013 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2014 {}
2015
2016 /* opt_pass methods: */
2017 virtual unsigned int execute (function *)
2018 {
2019 return set_nothrow_function_flags ();
2020 }
2021
2022 }; // class pass_set_nothrow_function_flags
2023
2024 } // anon namespace
2025
2026 rtl_opt_pass *
2027 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2028 {
2029 return new pass_set_nothrow_function_flags (ctxt);
2030 }
2031
2032 \f
2033 /* Various hooks for unwind library. */
2034
2035 /* Expand the EH support builtin functions:
2036 __builtin_eh_pointer and __builtin_eh_filter. */
2037
2038 static eh_region
2039 expand_builtin_eh_common (tree region_nr_t)
2040 {
2041 HOST_WIDE_INT region_nr;
2042 eh_region region;
2043
2044 gcc_assert (tree_fits_shwi_p (region_nr_t));
2045 region_nr = tree_to_shwi (region_nr_t);
2046
2047 region = (*cfun->eh->region_array)[region_nr];
2048
2049 /* ??? We shouldn't have been able to delete a eh region without
2050 deleting all the code that depended on it. */
2051 gcc_assert (region != NULL);
2052
2053 return region;
2054 }
2055
2056 /* Expand to the exc_ptr value from the given eh region. */
2057
2058 rtx
2059 expand_builtin_eh_pointer (tree exp)
2060 {
2061 eh_region region
2062 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2063 if (region->exc_ptr_reg == NULL)
2064 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2065 return region->exc_ptr_reg;
2066 }
2067
2068 /* Expand to the filter value from the given eh region. */
2069
2070 rtx
2071 expand_builtin_eh_filter (tree exp)
2072 {
2073 eh_region region
2074 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2075 if (region->filter_reg == NULL)
2076 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2077 return region->filter_reg;
2078 }
2079
2080 /* Copy the exc_ptr and filter values from one landing pad's registers
2081 to another. This is used to inline the resx statement. */
2082
2083 rtx
2084 expand_builtin_eh_copy_values (tree exp)
2085 {
2086 eh_region dst
2087 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2088 eh_region src
2089 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2090 machine_mode fmode = targetm.eh_return_filter_mode ();
2091
2092 if (dst->exc_ptr_reg == NULL)
2093 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2094 if (src->exc_ptr_reg == NULL)
2095 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2096
2097 if (dst->filter_reg == NULL)
2098 dst->filter_reg = gen_reg_rtx (fmode);
2099 if (src->filter_reg == NULL)
2100 src->filter_reg = gen_reg_rtx (fmode);
2101
2102 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2103 emit_move_insn (dst->filter_reg, src->filter_reg);
2104
2105 return const0_rtx;
2106 }
2107
2108 /* Do any necessary initialization to access arbitrary stack frames.
2109 On the SPARC, this means flushing the register windows. */
2110
2111 void
2112 expand_builtin_unwind_init (void)
2113 {
2114 /* Set this so all the registers get saved in our frame; we need to be
2115 able to copy the saved values for any registers from frames we unwind. */
2116 crtl->saves_all_registers = 1;
2117
2118 SETUP_FRAME_ADDRESSES ();
2119 }
2120
2121 /* Map a non-negative number to an eh return data register number; expands
2122 to -1 if no return data register is associated with the input number.
2123 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2124
2125 rtx
2126 expand_builtin_eh_return_data_regno (tree exp)
2127 {
2128 tree which = CALL_EXPR_ARG (exp, 0);
2129 unsigned HOST_WIDE_INT iwhich;
2130
2131 if (TREE_CODE (which) != INTEGER_CST)
2132 {
2133 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2134 return constm1_rtx;
2135 }
2136
2137 iwhich = tree_to_uhwi (which);
2138 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2139 if (iwhich == INVALID_REGNUM)
2140 return constm1_rtx;
2141
2142 #ifdef DWARF_FRAME_REGNUM
2143 iwhich = DWARF_FRAME_REGNUM (iwhich);
2144 #else
2145 iwhich = DBX_REGISTER_NUMBER (iwhich);
2146 #endif
2147
2148 return GEN_INT (iwhich);
2149 }
2150
2151 /* Given a value extracted from the return address register or stack slot,
2152 return the actual address encoded in that value. */
2153
2154 rtx
2155 expand_builtin_extract_return_addr (tree addr_tree)
2156 {
2157 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2158
2159 if (GET_MODE (addr) != Pmode
2160 && GET_MODE (addr) != VOIDmode)
2161 {
2162 #ifdef POINTERS_EXTEND_UNSIGNED
2163 addr = convert_memory_address (Pmode, addr);
2164 #else
2165 addr = convert_to_mode (Pmode, addr, 0);
2166 #endif
2167 }
2168
2169 /* First mask out any unwanted bits. */
2170 rtx mask = MASK_RETURN_ADDR;
2171 if (mask)
2172 expand_and (Pmode, addr, mask, addr);
2173
2174 /* Then adjust to find the real return address. */
2175 if (RETURN_ADDR_OFFSET)
2176 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2177
2178 return addr;
2179 }
2180
2181 /* Given an actual address in addr_tree, do any necessary encoding
2182 and return the value to be stored in the return address register or
2183 stack slot so the epilogue will return to that address. */
2184
2185 rtx
2186 expand_builtin_frob_return_addr (tree addr_tree)
2187 {
2188 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2189
2190 addr = convert_memory_address (Pmode, addr);
2191
2192 if (RETURN_ADDR_OFFSET)
2193 {
2194 addr = force_reg (Pmode, addr);
2195 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2196 }
2197
2198 return addr;
2199 }
2200
2201 /* Set up the epilogue with the magic bits we'll need to return to the
2202 exception handler. */
2203
2204 void
2205 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2206 tree handler_tree)
2207 {
2208 rtx tmp;
2209
2210 #ifdef EH_RETURN_STACKADJ_RTX
2211 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2212 VOIDmode, EXPAND_NORMAL);
2213 tmp = convert_memory_address (Pmode, tmp);
2214 if (!crtl->eh.ehr_stackadj)
2215 crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2216 else if (tmp != crtl->eh.ehr_stackadj)
2217 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2218 #endif
2219
2220 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2221 VOIDmode, EXPAND_NORMAL);
2222 tmp = convert_memory_address (Pmode, tmp);
2223 if (!crtl->eh.ehr_handler)
2224 crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2225 else if (tmp != crtl->eh.ehr_handler)
2226 emit_move_insn (crtl->eh.ehr_handler, tmp);
2227
2228 if (!crtl->eh.ehr_label)
2229 crtl->eh.ehr_label = gen_label_rtx ();
2230 emit_jump (crtl->eh.ehr_label);
2231 }
2232
2233 /* Expand __builtin_eh_return. This exit path from the function loads up
2234 the eh return data registers, adjusts the stack, and branches to a
2235 given PC other than the normal return address. */
2236
2237 void
2238 expand_eh_return (void)
2239 {
2240 rtx_code_label *around_label;
2241
2242 if (! crtl->eh.ehr_label)
2243 return;
2244
2245 crtl->calls_eh_return = 1;
2246
2247 #ifdef EH_RETURN_STACKADJ_RTX
2248 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2249 #endif
2250
2251 around_label = gen_label_rtx ();
2252 emit_jump (around_label);
2253
2254 emit_label (crtl->eh.ehr_label);
2255 clobber_return_register ();
2256
2257 #ifdef EH_RETURN_STACKADJ_RTX
2258 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2259 #endif
2260
2261 if (targetm.have_eh_return ())
2262 emit_insn (targetm.gen_eh_return (crtl->eh.ehr_handler));
2263 else
2264 {
2265 #ifdef EH_RETURN_HANDLER_RTX
2266 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2267 #else
2268 error ("__builtin_eh_return not supported on this target");
2269 #endif
2270 }
2271
2272 emit_label (around_label);
2273 }
2274
2275 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2276 POINTERS_EXTEND_UNSIGNED and return it. */
2277
2278 rtx
2279 expand_builtin_extend_pointer (tree addr_tree)
2280 {
2281 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2282 int extend;
2283
2284 #ifdef POINTERS_EXTEND_UNSIGNED
2285 extend = POINTERS_EXTEND_UNSIGNED;
2286 #else
2287 /* The previous EH code did an unsigned extend by default, so we do this also
2288 for consistency. */
2289 extend = 1;
2290 #endif
2291
2292 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2293 }
2294 \f
2295 static int
2296 add_action_record (action_hash_type *ar_hash, int filter, int next)
2297 {
2298 struct action_record **slot, *new_ar, tmp;
2299
2300 tmp.filter = filter;
2301 tmp.next = next;
2302 slot = ar_hash->find_slot (&tmp, INSERT);
2303
2304 if ((new_ar = *slot) == NULL)
2305 {
2306 new_ar = XNEW (struct action_record);
2307 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2308 new_ar->filter = filter;
2309 new_ar->next = next;
2310 *slot = new_ar;
2311
2312 /* The filter value goes in untouched. The link to the next
2313 record is a "self-relative" byte offset, or zero to indicate
2314 that there is no next record. So convert the absolute 1 based
2315 indices we've been carrying around into a displacement. */
2316
2317 push_sleb128 (&crtl->eh.action_record_data, filter);
2318 if (next)
2319 next -= crtl->eh.action_record_data->length () + 1;
2320 push_sleb128 (&crtl->eh.action_record_data, next);
2321 }
2322
2323 return new_ar->offset;
2324 }
2325
2326 static int
2327 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2328 {
2329 int next;
2330
2331 /* If we've reached the top of the region chain, then we have
2332 no actions, and require no landing pad. */
2333 if (region == NULL)
2334 return -1;
2335
2336 switch (region->type)
2337 {
2338 case ERT_CLEANUP:
2339 {
2340 eh_region r;
2341 /* A cleanup adds a zero filter to the beginning of the chain, but
2342 there are special cases to look out for. If there are *only*
2343 cleanups along a path, then it compresses to a zero action.
2344 Further, if there are multiple cleanups along a path, we only
2345 need to represent one of them, as that is enough to trigger
2346 entry to the landing pad at runtime. */
2347 next = collect_one_action_chain (ar_hash, region->outer);
2348 if (next <= 0)
2349 return 0;
2350 for (r = region->outer; r ; r = r->outer)
2351 if (r->type == ERT_CLEANUP)
2352 return next;
2353 return add_action_record (ar_hash, 0, next);
2354 }
2355
2356 case ERT_TRY:
2357 {
2358 eh_catch c;
2359
2360 /* Process the associated catch regions in reverse order.
2361 If there's a catch-all handler, then we don't need to
2362 search outer regions. Use a magic -3 value to record
2363 that we haven't done the outer search. */
2364 next = -3;
2365 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2366 {
2367 if (c->type_list == NULL)
2368 {
2369 /* Retrieve the filter from the head of the filter list
2370 where we have stored it (see assign_filter_values). */
2371 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2372 next = add_action_record (ar_hash, filter, 0);
2373 }
2374 else
2375 {
2376 /* Once the outer search is done, trigger an action record for
2377 each filter we have. */
2378 tree flt_node;
2379
2380 if (next == -3)
2381 {
2382 next = collect_one_action_chain (ar_hash, region->outer);
2383
2384 /* If there is no next action, terminate the chain. */
2385 if (next == -1)
2386 next = 0;
2387 /* If all outer actions are cleanups or must_not_throw,
2388 we'll have no action record for it, since we had wanted
2389 to encode these states in the call-site record directly.
2390 Add a cleanup action to the chain to catch these. */
2391 else if (next <= 0)
2392 next = add_action_record (ar_hash, 0, 0);
2393 }
2394
2395 flt_node = c->filter_list;
2396 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2397 {
2398 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2399 next = add_action_record (ar_hash, filter, next);
2400 }
2401 }
2402 }
2403 return next;
2404 }
2405
2406 case ERT_ALLOWED_EXCEPTIONS:
2407 /* An exception specification adds its filter to the
2408 beginning of the chain. */
2409 next = collect_one_action_chain (ar_hash, region->outer);
2410
2411 /* If there is no next action, terminate the chain. */
2412 if (next == -1)
2413 next = 0;
2414 /* If all outer actions are cleanups or must_not_throw,
2415 we'll have no action record for it, since we had wanted
2416 to encode these states in the call-site record directly.
2417 Add a cleanup action to the chain to catch these. */
2418 else if (next <= 0)
2419 next = add_action_record (ar_hash, 0, 0);
2420
2421 return add_action_record (ar_hash, region->u.allowed.filter, next);
2422
2423 case ERT_MUST_NOT_THROW:
2424 /* A must-not-throw region with no inner handlers or cleanups
2425 requires no call-site entry. Note that this differs from
2426 the no handler or cleanup case in that we do require an lsda
2427 to be generated. Return a magic -2 value to record this. */
2428 return -2;
2429 }
2430
2431 gcc_unreachable ();
2432 }
2433
2434 static int
2435 add_call_site (rtx landing_pad, int action, int section)
2436 {
2437 call_site_record record;
2438
2439 record = ggc_alloc<call_site_record_d> ();
2440 record->landing_pad = landing_pad;
2441 record->action = action;
2442
2443 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2444
2445 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2446 }
2447
2448 static rtx_note *
2449 emit_note_eh_region_end (rtx_insn *insn)
2450 {
2451 rtx_insn *next = NEXT_INSN (insn);
2452
2453 /* Make sure we do not split a call and its corresponding
2454 CALL_ARG_LOCATION note. */
2455 if (next && NOTE_P (next)
2456 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2457 insn = next;
2458
2459 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2460 }
2461
2462 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2463 The new note numbers will not refer to region numbers, but
2464 instead to call site entries. */
2465
2466 static unsigned int
2467 convert_to_eh_region_ranges (void)
2468 {
2469 rtx insn;
2470 rtx_insn *iter;
2471 rtx_note *note;
2472 action_hash_type ar_hash (31);
2473 int last_action = -3;
2474 rtx_insn *last_action_insn = NULL;
2475 rtx last_landing_pad = NULL_RTX;
2476 rtx_insn *first_no_action_insn = NULL;
2477 int call_site = 0;
2478 int cur_sec = 0;
2479 rtx_insn *section_switch_note = NULL;
2480 rtx_insn *first_no_action_insn_before_switch = NULL;
2481 rtx_insn *last_no_action_insn_before_switch = NULL;
2482 int saved_call_site_base = call_site_base;
2483
2484 vec_alloc (crtl->eh.action_record_data, 64);
2485
2486 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2487 if (INSN_P (iter))
2488 {
2489 eh_landing_pad lp;
2490 eh_region region;
2491 bool nothrow;
2492 int this_action;
2493 rtx_code_label *this_landing_pad;
2494
2495 insn = iter;
2496 if (NONJUMP_INSN_P (insn)
2497 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2498 insn = XVECEXP (PATTERN (insn), 0, 0);
2499
2500 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2501 if (nothrow)
2502 continue;
2503 if (region)
2504 this_action = collect_one_action_chain (&ar_hash, region);
2505 else
2506 this_action = -1;
2507
2508 /* Existence of catch handlers, or must-not-throw regions
2509 implies that an lsda is needed (even if empty). */
2510 if (this_action != -1)
2511 crtl->uses_eh_lsda = 1;
2512
2513 /* Delay creation of region notes for no-action regions
2514 until we're sure that an lsda will be required. */
2515 else if (last_action == -3)
2516 {
2517 first_no_action_insn = iter;
2518 last_action = -1;
2519 }
2520
2521 if (this_action >= 0)
2522 this_landing_pad = lp->landing_pad;
2523 else
2524 this_landing_pad = NULL;
2525
2526 /* Differing actions or landing pads implies a change in call-site
2527 info, which implies some EH_REGION note should be emitted. */
2528 if (last_action != this_action
2529 || last_landing_pad != this_landing_pad)
2530 {
2531 /* If there is a queued no-action region in the other section
2532 with hot/cold partitioning, emit it now. */
2533 if (first_no_action_insn_before_switch)
2534 {
2535 gcc_assert (this_action != -1
2536 && last_action == (first_no_action_insn
2537 ? -1 : -3));
2538 call_site = add_call_site (NULL_RTX, 0, 0);
2539 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2540 first_no_action_insn_before_switch);
2541 NOTE_EH_HANDLER (note) = call_site;
2542 note
2543 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2544 NOTE_EH_HANDLER (note) = call_site;
2545 gcc_assert (last_action != -3
2546 || (last_action_insn
2547 == last_no_action_insn_before_switch));
2548 first_no_action_insn_before_switch = NULL;
2549 last_no_action_insn_before_switch = NULL;
2550 call_site_base++;
2551 }
2552 /* If we'd not seen a previous action (-3) or the previous
2553 action was must-not-throw (-2), then we do not need an
2554 end note. */
2555 if (last_action >= -1)
2556 {
2557 /* If we delayed the creation of the begin, do it now. */
2558 if (first_no_action_insn)
2559 {
2560 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2561 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2562 first_no_action_insn);
2563 NOTE_EH_HANDLER (note) = call_site;
2564 first_no_action_insn = NULL;
2565 }
2566
2567 note = emit_note_eh_region_end (last_action_insn);
2568 NOTE_EH_HANDLER (note) = call_site;
2569 }
2570
2571 /* If the new action is must-not-throw, then no region notes
2572 are created. */
2573 if (this_action >= -1)
2574 {
2575 call_site = add_call_site (this_landing_pad,
2576 this_action < 0 ? 0 : this_action,
2577 cur_sec);
2578 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2579 NOTE_EH_HANDLER (note) = call_site;
2580 }
2581
2582 last_action = this_action;
2583 last_landing_pad = this_landing_pad;
2584 }
2585 last_action_insn = iter;
2586 }
2587 else if (NOTE_P (iter)
2588 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2589 {
2590 gcc_assert (section_switch_note == NULL_RTX);
2591 gcc_assert (flag_reorder_blocks_and_partition);
2592 section_switch_note = iter;
2593 if (first_no_action_insn)
2594 {
2595 first_no_action_insn_before_switch = first_no_action_insn;
2596 last_no_action_insn_before_switch = last_action_insn;
2597 first_no_action_insn = NULL;
2598 gcc_assert (last_action == -1);
2599 last_action = -3;
2600 }
2601 /* Force closing of current EH region before section switch and
2602 opening a new one afterwards. */
2603 else if (last_action != -3)
2604 last_landing_pad = pc_rtx;
2605 if (crtl->eh.call_site_record_v[cur_sec])
2606 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2607 cur_sec++;
2608 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2609 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2610 }
2611
2612 if (last_action >= -1 && ! first_no_action_insn)
2613 {
2614 note = emit_note_eh_region_end (last_action_insn);
2615 NOTE_EH_HANDLER (note) = call_site;
2616 }
2617
2618 call_site_base = saved_call_site_base;
2619
2620 return 0;
2621 }
2622
2623 namespace {
2624
2625 const pass_data pass_data_convert_to_eh_region_ranges =
2626 {
2627 RTL_PASS, /* type */
2628 "eh_ranges", /* name */
2629 OPTGROUP_NONE, /* optinfo_flags */
2630 TV_NONE, /* tv_id */
2631 0, /* properties_required */
2632 0, /* properties_provided */
2633 0, /* properties_destroyed */
2634 0, /* todo_flags_start */
2635 0, /* todo_flags_finish */
2636 };
2637
2638 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2639 {
2640 public:
2641 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2642 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2643 {}
2644
2645 /* opt_pass methods: */
2646 virtual bool gate (function *);
2647 virtual unsigned int execute (function *)
2648 {
2649 return convert_to_eh_region_ranges ();
2650 }
2651
2652 }; // class pass_convert_to_eh_region_ranges
2653
2654 bool
2655 pass_convert_to_eh_region_ranges::gate (function *)
2656 {
2657 /* Nothing to do for SJLJ exceptions or if no regions created. */
2658 if (cfun->eh->region_tree == NULL)
2659 return false;
2660 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2661 return false;
2662 return true;
2663 }
2664
2665 } // anon namespace
2666
2667 rtl_opt_pass *
2668 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2669 {
2670 return new pass_convert_to_eh_region_ranges (ctxt);
2671 }
2672 \f
2673 static void
2674 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2675 {
2676 do
2677 {
2678 unsigned char byte = value & 0x7f;
2679 value >>= 7;
2680 if (value)
2681 byte |= 0x80;
2682 vec_safe_push (*data_area, byte);
2683 }
2684 while (value);
2685 }
2686
2687 static void
2688 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2689 {
2690 unsigned char byte;
2691 int more;
2692
2693 do
2694 {
2695 byte = value & 0x7f;
2696 value >>= 7;
2697 more = ! ((value == 0 && (byte & 0x40) == 0)
2698 || (value == -1 && (byte & 0x40) != 0));
2699 if (more)
2700 byte |= 0x80;
2701 vec_safe_push (*data_area, byte);
2702 }
2703 while (more);
2704 }
2705
2706 \f
2707 #ifndef HAVE_AS_LEB128
2708 static int
2709 dw2_size_of_call_site_table (int section)
2710 {
2711 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2712 int size = n * (4 + 4 + 4);
2713 int i;
2714
2715 for (i = 0; i < n; ++i)
2716 {
2717 struct call_site_record_d *cs =
2718 (*crtl->eh.call_site_record_v[section])[i];
2719 size += size_of_uleb128 (cs->action);
2720 }
2721
2722 return size;
2723 }
2724
2725 static int
2726 sjlj_size_of_call_site_table (void)
2727 {
2728 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2729 int size = 0;
2730 int i;
2731
2732 for (i = 0; i < n; ++i)
2733 {
2734 struct call_site_record_d *cs =
2735 (*crtl->eh.call_site_record_v[0])[i];
2736 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2737 size += size_of_uleb128 (cs->action);
2738 }
2739
2740 return size;
2741 }
2742 #endif
2743
2744 static void
2745 dw2_output_call_site_table (int cs_format, int section)
2746 {
2747 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2748 int i;
2749 const char *begin;
2750
2751 if (section == 0)
2752 begin = current_function_func_begin_label;
2753 else if (first_function_block_is_cold)
2754 begin = crtl->subsections.hot_section_label;
2755 else
2756 begin = crtl->subsections.cold_section_label;
2757
2758 for (i = 0; i < n; ++i)
2759 {
2760 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2761 char reg_start_lab[32];
2762 char reg_end_lab[32];
2763 char landing_pad_lab[32];
2764
2765 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2766 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2767
2768 if (cs->landing_pad)
2769 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2770 CODE_LABEL_NUMBER (cs->landing_pad));
2771
2772 /* ??? Perhaps use insn length scaling if the assembler supports
2773 generic arithmetic. */
2774 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2775 data4 if the function is small enough. */
2776 if (cs_format == DW_EH_PE_uleb128)
2777 {
2778 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2779 "region %d start", i);
2780 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2781 "length");
2782 if (cs->landing_pad)
2783 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2784 "landing pad");
2785 else
2786 dw2_asm_output_data_uleb128 (0, "landing pad");
2787 }
2788 else
2789 {
2790 dw2_asm_output_delta (4, reg_start_lab, begin,
2791 "region %d start", i);
2792 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2793 if (cs->landing_pad)
2794 dw2_asm_output_delta (4, landing_pad_lab, begin,
2795 "landing pad");
2796 else
2797 dw2_asm_output_data (4, 0, "landing pad");
2798 }
2799 dw2_asm_output_data_uleb128 (cs->action, "action");
2800 }
2801
2802 call_site_base += n;
2803 }
2804
2805 static void
2806 sjlj_output_call_site_table (void)
2807 {
2808 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2809 int i;
2810
2811 for (i = 0; i < n; ++i)
2812 {
2813 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2814
2815 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2816 "region %d landing pad", i);
2817 dw2_asm_output_data_uleb128 (cs->action, "action");
2818 }
2819
2820 call_site_base += n;
2821 }
2822
2823 /* Switch to the section that should be used for exception tables. */
2824
2825 static void
2826 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2827 {
2828 section *s;
2829
2830 if (exception_section)
2831 s = exception_section;
2832 else
2833 {
2834 int flags;
2835
2836 if (EH_TABLES_CAN_BE_READ_ONLY)
2837 {
2838 int tt_format =
2839 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2840 flags = ((! flag_pic
2841 || ((tt_format & 0x70) != DW_EH_PE_absptr
2842 && (tt_format & 0x70) != DW_EH_PE_aligned))
2843 ? 0 : SECTION_WRITE);
2844 }
2845 else
2846 flags = SECTION_WRITE;
2847
2848 /* Compute the section and cache it into exception_section,
2849 unless it depends on the function name. */
2850 if (targetm_common.have_named_sections)
2851 {
2852 #ifdef HAVE_LD_EH_GC_SECTIONS
2853 if (flag_function_sections
2854 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2855 {
2856 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2857 /* The EH table must match the code section, so only mark
2858 it linkonce if we have COMDAT groups to tie them together. */
2859 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2860 flags |= SECTION_LINKONCE;
2861 sprintf (section_name, ".gcc_except_table.%s", fnname);
2862 s = get_section (section_name, flags, current_function_decl);
2863 free (section_name);
2864 }
2865 else
2866 #endif
2867 exception_section
2868 = s = get_section (".gcc_except_table", flags, NULL);
2869 }
2870 else
2871 exception_section
2872 = s = flags == SECTION_WRITE ? data_section : readonly_data_section;
2873 }
2874
2875 switch_to_section (s);
2876 }
2877
2878
2879 /* Output a reference from an exception table to the type_info object TYPE.
2880 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2881 the value. */
2882
2883 static void
2884 output_ttype (tree type, int tt_format, int tt_format_size)
2885 {
2886 rtx value;
2887 bool is_public = true;
2888
2889 if (type == NULL_TREE)
2890 value = const0_rtx;
2891 else
2892 {
2893 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2894 runtime types so TYPE should already be a runtime type
2895 reference. When pass_ipa_free_lang data is made a default
2896 pass, we can then remove the call to lookup_type_for_runtime
2897 below. */
2898 if (TYPE_P (type))
2899 type = lookup_type_for_runtime (type);
2900
2901 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2902
2903 /* Let cgraph know that the rtti decl is used. Not all of the
2904 paths below go through assemble_integer, which would take
2905 care of this for us. */
2906 STRIP_NOPS (type);
2907 if (TREE_CODE (type) == ADDR_EXPR)
2908 {
2909 type = TREE_OPERAND (type, 0);
2910 if (TREE_CODE (type) == VAR_DECL)
2911 is_public = TREE_PUBLIC (type);
2912 }
2913 else
2914 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2915 }
2916
2917 /* Allow the target to override the type table entry format. */
2918 if (targetm.asm_out.ttype (value))
2919 return;
2920
2921 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2922 assemble_integer (value, tt_format_size,
2923 tt_format_size * BITS_PER_UNIT, 1);
2924 else
2925 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2926 }
2927
2928 static void
2929 output_one_function_exception_table (int section)
2930 {
2931 int tt_format, cs_format, lp_format, i;
2932 #ifdef HAVE_AS_LEB128
2933 char ttype_label[32];
2934 char cs_after_size_label[32];
2935 char cs_end_label[32];
2936 #else
2937 int call_site_len;
2938 #endif
2939 int have_tt_data;
2940 int tt_format_size = 0;
2941
2942 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2943 || (targetm.arm_eabi_unwinder
2944 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2945 : vec_safe_length (cfun->eh->ehspec_data.other)));
2946
2947 /* Indicate the format of the @TType entries. */
2948 if (! have_tt_data)
2949 tt_format = DW_EH_PE_omit;
2950 else
2951 {
2952 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2953 #ifdef HAVE_AS_LEB128
2954 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2955 section ? "LLSDATTC" : "LLSDATT",
2956 current_function_funcdef_no);
2957 #endif
2958 tt_format_size = size_of_encoded_value (tt_format);
2959
2960 assemble_align (tt_format_size * BITS_PER_UNIT);
2961 }
2962
2963 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2964 current_function_funcdef_no);
2965
2966 /* The LSDA header. */
2967
2968 /* Indicate the format of the landing pad start pointer. An omitted
2969 field implies @LPStart == @Start. */
2970 /* Currently we always put @LPStart == @Start. This field would
2971 be most useful in moving the landing pads completely out of
2972 line to another section, but it could also be used to minimize
2973 the size of uleb128 landing pad offsets. */
2974 lp_format = DW_EH_PE_omit;
2975 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2976 eh_data_format_name (lp_format));
2977
2978 /* @LPStart pointer would go here. */
2979
2980 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2981 eh_data_format_name (tt_format));
2982
2983 #ifndef HAVE_AS_LEB128
2984 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2985 call_site_len = sjlj_size_of_call_site_table ();
2986 else
2987 call_site_len = dw2_size_of_call_site_table (section);
2988 #endif
2989
2990 /* A pc-relative 4-byte displacement to the @TType data. */
2991 if (have_tt_data)
2992 {
2993 #ifdef HAVE_AS_LEB128
2994 char ttype_after_disp_label[32];
2995 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
2996 section ? "LLSDATTDC" : "LLSDATTD",
2997 current_function_funcdef_no);
2998 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
2999 "@TType base offset");
3000 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3001 #else
3002 /* Ug. Alignment queers things. */
3003 unsigned int before_disp, after_disp, last_disp, disp;
3004
3005 before_disp = 1 + 1;
3006 after_disp = (1 + size_of_uleb128 (call_site_len)
3007 + call_site_len
3008 + vec_safe_length (crtl->eh.action_record_data)
3009 + (vec_safe_length (cfun->eh->ttype_data)
3010 * tt_format_size));
3011
3012 disp = after_disp;
3013 do
3014 {
3015 unsigned int disp_size, pad;
3016
3017 last_disp = disp;
3018 disp_size = size_of_uleb128 (disp);
3019 pad = before_disp + disp_size + after_disp;
3020 if (pad % tt_format_size)
3021 pad = tt_format_size - (pad % tt_format_size);
3022 else
3023 pad = 0;
3024 disp = after_disp + pad;
3025 }
3026 while (disp != last_disp);
3027
3028 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3029 #endif
3030 }
3031
3032 /* Indicate the format of the call-site offsets. */
3033 #ifdef HAVE_AS_LEB128
3034 cs_format = DW_EH_PE_uleb128;
3035 #else
3036 cs_format = DW_EH_PE_udata4;
3037 #endif
3038 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3039 eh_data_format_name (cs_format));
3040
3041 #ifdef HAVE_AS_LEB128
3042 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3043 section ? "LLSDACSBC" : "LLSDACSB",
3044 current_function_funcdef_no);
3045 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3046 section ? "LLSDACSEC" : "LLSDACSE",
3047 current_function_funcdef_no);
3048 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3049 "Call-site table length");
3050 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3051 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3052 sjlj_output_call_site_table ();
3053 else
3054 dw2_output_call_site_table (cs_format, section);
3055 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3056 #else
3057 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3058 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3059 sjlj_output_call_site_table ();
3060 else
3061 dw2_output_call_site_table (cs_format, section);
3062 #endif
3063
3064 /* ??? Decode and interpret the data for flag_debug_asm. */
3065 {
3066 uchar uc;
3067 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3068 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3069 }
3070
3071 if (have_tt_data)
3072 assemble_align (tt_format_size * BITS_PER_UNIT);
3073
3074 i = vec_safe_length (cfun->eh->ttype_data);
3075 while (i-- > 0)
3076 {
3077 tree type = (*cfun->eh->ttype_data)[i];
3078 output_ttype (type, tt_format, tt_format_size);
3079 }
3080
3081 #ifdef HAVE_AS_LEB128
3082 if (have_tt_data)
3083 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3084 #endif
3085
3086 /* ??? Decode and interpret the data for flag_debug_asm. */
3087 if (targetm.arm_eabi_unwinder)
3088 {
3089 tree type;
3090 for (i = 0;
3091 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3092 output_ttype (type, tt_format, tt_format_size);
3093 }
3094 else
3095 {
3096 uchar uc;
3097 for (i = 0;
3098 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3099 dw2_asm_output_data (1, uc,
3100 i ? NULL : "Exception specification table");
3101 }
3102 }
3103
3104 void
3105 output_function_exception_table (const char *fnname)
3106 {
3107 rtx personality = get_personality_function (current_function_decl);
3108
3109 /* Not all functions need anything. */
3110 if (! crtl->uses_eh_lsda)
3111 return;
3112
3113 if (personality)
3114 {
3115 assemble_external_libcall (personality);
3116
3117 if (targetm.asm_out.emit_except_personality)
3118 targetm.asm_out.emit_except_personality (personality);
3119 }
3120
3121 switch_to_exception_section (fnname);
3122
3123 /* If the target wants a label to begin the table, emit it here. */
3124 targetm.asm_out.emit_except_table_label (asm_out_file);
3125
3126 output_one_function_exception_table (0);
3127 if (crtl->eh.call_site_record_v[1])
3128 output_one_function_exception_table (1);
3129
3130 switch_to_section (current_function_section ());
3131 }
3132
3133 void
3134 set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3135 {
3136 fun->eh->throw_stmt_table = table;
3137 }
3138
3139 hash_map<gimple *, int> *
3140 get_eh_throw_stmt_table (struct function *fun)
3141 {
3142 return fun->eh->throw_stmt_table;
3143 }
3144 \f
3145 /* Determine if the function needs an EH personality function. */
3146
3147 enum eh_personality_kind
3148 function_needs_eh_personality (struct function *fn)
3149 {
3150 enum eh_personality_kind kind = eh_personality_none;
3151 eh_region i;
3152
3153 FOR_ALL_EH_REGION_FN (i, fn)
3154 {
3155 switch (i->type)
3156 {
3157 case ERT_CLEANUP:
3158 /* Can do with any personality including the generic C one. */
3159 kind = eh_personality_any;
3160 break;
3161
3162 case ERT_TRY:
3163 case ERT_ALLOWED_EXCEPTIONS:
3164 /* Always needs a EH personality function. The generic C
3165 personality doesn't handle these even for empty type lists. */
3166 return eh_personality_lang;
3167
3168 case ERT_MUST_NOT_THROW:
3169 /* Always needs a EH personality function. The language may specify
3170 what abort routine that must be used, e.g. std::terminate. */
3171 return eh_personality_lang;
3172 }
3173 }
3174
3175 return kind;
3176 }
3177 \f
3178 /* Dump EH information to OUT. */
3179
3180 void
3181 dump_eh_tree (FILE * out, struct function *fun)
3182 {
3183 eh_region i;
3184 int depth = 0;
3185 static const char *const type_name[] = {
3186 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3187 };
3188
3189 i = fun->eh->region_tree;
3190 if (!i)
3191 return;
3192
3193 fprintf (out, "Eh tree:\n");
3194 while (1)
3195 {
3196 fprintf (out, " %*s %i %s", depth * 2, "",
3197 i->index, type_name[(int) i->type]);
3198
3199 if (i->landing_pads)
3200 {
3201 eh_landing_pad lp;
3202
3203 fprintf (out, " land:");
3204 if (current_ir_type () == IR_GIMPLE)
3205 {
3206 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3207 {
3208 fprintf (out, "{%i,", lp->index);
3209 print_generic_expr (out, lp->post_landing_pad, 0);
3210 fputc ('}', out);
3211 if (lp->next_lp)
3212 fputc (',', out);
3213 }
3214 }
3215 else
3216 {
3217 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3218 {
3219 fprintf (out, "{%i,", lp->index);
3220 if (lp->landing_pad)
3221 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3222 NOTE_P (lp->landing_pad) ? "(del)" : "");
3223 else
3224 fprintf (out, "(nil),");
3225 if (lp->post_landing_pad)
3226 {
3227 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3228 fprintf (out, "%i%s}", INSN_UID (lab),
3229 NOTE_P (lab) ? "(del)" : "");
3230 }
3231 else
3232 fprintf (out, "(nil)}");
3233 if (lp->next_lp)
3234 fputc (',', out);
3235 }
3236 }
3237 }
3238
3239 switch (i->type)
3240 {
3241 case ERT_CLEANUP:
3242 case ERT_MUST_NOT_THROW:
3243 break;
3244
3245 case ERT_TRY:
3246 {
3247 eh_catch c;
3248 fprintf (out, " catch:");
3249 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3250 {
3251 fputc ('{', out);
3252 if (c->label)
3253 {
3254 fprintf (out, "lab:");
3255 print_generic_expr (out, c->label, 0);
3256 fputc (';', out);
3257 }
3258 print_generic_expr (out, c->type_list, 0);
3259 fputc ('}', out);
3260 if (c->next_catch)
3261 fputc (',', out);
3262 }
3263 }
3264 break;
3265
3266 case ERT_ALLOWED_EXCEPTIONS:
3267 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3268 print_generic_expr (out, i->u.allowed.type_list, 0);
3269 break;
3270 }
3271 fputc ('\n', out);
3272
3273 /* If there are sub-regions, process them. */
3274 if (i->inner)
3275 i = i->inner, depth++;
3276 /* If there are peers, process them. */
3277 else if (i->next_peer)
3278 i = i->next_peer;
3279 /* Otherwise, step back up the tree to the next peer. */
3280 else
3281 {
3282 do
3283 {
3284 i = i->outer;
3285 depth--;
3286 if (i == NULL)
3287 return;
3288 }
3289 while (i->next_peer == NULL);
3290 i = i->next_peer;
3291 }
3292 }
3293 }
3294
3295 /* Dump the EH tree for FN on stderr. */
3296
3297 DEBUG_FUNCTION void
3298 debug_eh_tree (struct function *fn)
3299 {
3300 dump_eh_tree (stderr, fn);
3301 }
3302
3303 /* Verify invariants on EH datastructures. */
3304
3305 DEBUG_FUNCTION void
3306 verify_eh_tree (struct function *fun)
3307 {
3308 eh_region r, outer;
3309 int nvisited_lp, nvisited_r;
3310 int count_lp, count_r, depth, i;
3311 eh_landing_pad lp;
3312 bool err = false;
3313
3314 if (!fun->eh->region_tree)
3315 return;
3316
3317 count_r = 0;
3318 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3319 if (r)
3320 {
3321 if (r->index == i)
3322 count_r++;
3323 else
3324 {
3325 error ("region_array is corrupted for region %i", r->index);
3326 err = true;
3327 }
3328 }
3329
3330 count_lp = 0;
3331 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3332 if (lp)
3333 {
3334 if (lp->index == i)
3335 count_lp++;
3336 else
3337 {
3338 error ("lp_array is corrupted for lp %i", lp->index);
3339 err = true;
3340 }
3341 }
3342
3343 depth = nvisited_lp = nvisited_r = 0;
3344 outer = NULL;
3345 r = fun->eh->region_tree;
3346 while (1)
3347 {
3348 if ((*fun->eh->region_array)[r->index] != r)
3349 {
3350 error ("region_array is corrupted for region %i", r->index);
3351 err = true;
3352 }
3353 if (r->outer != outer)
3354 {
3355 error ("outer block of region %i is wrong", r->index);
3356 err = true;
3357 }
3358 if (depth < 0)
3359 {
3360 error ("negative nesting depth of region %i", r->index);
3361 err = true;
3362 }
3363 nvisited_r++;
3364
3365 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3366 {
3367 if ((*fun->eh->lp_array)[lp->index] != lp)
3368 {
3369 error ("lp_array is corrupted for lp %i", lp->index);
3370 err = true;
3371 }
3372 if (lp->region != r)
3373 {
3374 error ("region of lp %i is wrong", lp->index);
3375 err = true;
3376 }
3377 nvisited_lp++;
3378 }
3379
3380 if (r->inner)
3381 outer = r, r = r->inner, depth++;
3382 else if (r->next_peer)
3383 r = r->next_peer;
3384 else
3385 {
3386 do
3387 {
3388 r = r->outer;
3389 if (r == NULL)
3390 goto region_done;
3391 depth--;
3392 outer = r->outer;
3393 }
3394 while (r->next_peer == NULL);
3395 r = r->next_peer;
3396 }
3397 }
3398 region_done:
3399 if (depth != 0)
3400 {
3401 error ("tree list ends on depth %i", depth);
3402 err = true;
3403 }
3404 if (count_r != nvisited_r)
3405 {
3406 error ("region_array does not match region_tree");
3407 err = true;
3408 }
3409 if (count_lp != nvisited_lp)
3410 {
3411 error ("lp_array does not match region_tree");
3412 err = true;
3413 }
3414
3415 if (err)
3416 {
3417 dump_eh_tree (stderr, fun);
3418 internal_error ("verify_eh_tree failed");
3419 }
3420 }
3421 \f
3422 #include "gt-except.h"