]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/except.c
gcc/
[thirdparty/gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "rtl.h"
117 #include "alias.h"
118 #include "symtab.h"
119 #include "tree.h"
120 #include "fold-const.h"
121 #include "stringpool.h"
122 #include "stor-layout.h"
123 #include "flags.h"
124 #include "hard-reg-set.h"
125 #include "function.h"
126 #include "insn-codes.h"
127 #include "optabs.h"
128 #include "insn-config.h"
129 #include "expmed.h"
130 #include "dojump.h"
131 #include "explow.h"
132 #include "calls.h"
133 #include "emit-rtl.h"
134 #include "varasm.h"
135 #include "stmt.h"
136 #include "expr.h"
137 #include "libfuncs.h"
138 #include "except.h"
139 #include "output.h"
140 #include "dwarf2asm.h"
141 #include "dwarf2out.h"
142 #include "dwarf2.h"
143 #include "toplev.h"
144 #include "intl.h"
145 #include "tm_p.h"
146 #include "target.h"
147 #include "common/common-target.h"
148 #include "langhooks.h"
149 #include "predict.h"
150 #include "dominance.h"
151 #include "cfg.h"
152 #include "cfgrtl.h"
153 #include "basic-block.h"
154 #include "plugin-api.h"
155 #include "ipa-ref.h"
156 #include "cgraph.h"
157 #include "diagnostic.h"
158 #include "tree-pretty-print.h"
159 #include "tree-pass.h"
160 #include "cfgloop.h"
161 #include "builtins.h"
162
163 static GTY(()) int call_site_base;
164
165 struct tree_hash_traits : default_hashmap_traits
166 {
167 static hashval_t hash (tree t) { return TREE_HASH (t); }
168 };
169
170 static GTY (()) hash_map<tree, tree, tree_hash_traits> *type_to_runtime_map;
171
172 /* Describe the SjLj_Function_Context structure. */
173 static GTY(()) tree sjlj_fc_type_node;
174 static int sjlj_fc_call_site_ofs;
175 static int sjlj_fc_data_ofs;
176 static int sjlj_fc_personality_ofs;
177 static int sjlj_fc_lsda_ofs;
178 static int sjlj_fc_jbuf_ofs;
179 \f
180
181 struct GTY(()) call_site_record_d
182 {
183 rtx landing_pad;
184 int action;
185 };
186
187 /* In the following structure and associated functions,
188 we represent entries in the action table as 1-based indices.
189 Special cases are:
190
191 0: null action record, non-null landing pad; implies cleanups
192 -1: null action record, null landing pad; implies no action
193 -2: no call-site entry; implies must_not_throw
194 -3: we have yet to process outer regions
195
196 Further, no special cases apply to the "next" field of the record.
197 For next, 0 means end of list. */
198
199 struct action_record
200 {
201 int offset;
202 int filter;
203 int next;
204 };
205
206 /* Hashtable helpers. */
207
208 struct action_record_hasher : free_ptr_hash <action_record>
209 {
210 static inline hashval_t hash (const action_record *);
211 static inline bool equal (const action_record *, const action_record *);
212 };
213
214 inline hashval_t
215 action_record_hasher::hash (const action_record *entry)
216 {
217 return entry->next * 1009 + entry->filter;
218 }
219
220 inline bool
221 action_record_hasher::equal (const action_record *entry,
222 const action_record *data)
223 {
224 return entry->filter == data->filter && entry->next == data->next;
225 }
226
227 typedef hash_table<action_record_hasher> action_hash_type;
228 \f
229 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
230 eh_landing_pad *);
231
232 static void dw2_build_landing_pads (void);
233
234 static int collect_one_action_chain (action_hash_type *, eh_region);
235 static int add_call_site (rtx, int, int);
236
237 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
238 static void push_sleb128 (vec<uchar, va_gc> **, int);
239 #ifndef HAVE_AS_LEB128
240 static int dw2_size_of_call_site_table (int);
241 static int sjlj_size_of_call_site_table (void);
242 #endif
243 static void dw2_output_call_site_table (int, int);
244 static void sjlj_output_call_site_table (void);
245
246 \f
247 void
248 init_eh (void)
249 {
250 if (! flag_exceptions)
251 return;
252
253 type_to_runtime_map
254 = hash_map<tree, tree, tree_hash_traits>::create_ggc (31);
255
256 /* Create the SjLj_Function_Context structure. This should match
257 the definition in unwind-sjlj.c. */
258 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
259 {
260 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
261
262 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
263
264 f_prev = build_decl (BUILTINS_LOCATION,
265 FIELD_DECL, get_identifier ("__prev"),
266 build_pointer_type (sjlj_fc_type_node));
267 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
268
269 f_cs = build_decl (BUILTINS_LOCATION,
270 FIELD_DECL, get_identifier ("__call_site"),
271 integer_type_node);
272 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
273
274 tmp = build_index_type (size_int (4 - 1));
275 tmp = build_array_type (lang_hooks.types.type_for_mode
276 (targetm.unwind_word_mode (), 1),
277 tmp);
278 f_data = build_decl (BUILTINS_LOCATION,
279 FIELD_DECL, get_identifier ("__data"), tmp);
280 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
281
282 f_per = build_decl (BUILTINS_LOCATION,
283 FIELD_DECL, get_identifier ("__personality"),
284 ptr_type_node);
285 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
286
287 f_lsda = build_decl (BUILTINS_LOCATION,
288 FIELD_DECL, get_identifier ("__lsda"),
289 ptr_type_node);
290 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
291
292 #ifdef DONT_USE_BUILTIN_SETJMP
293 #ifdef JMP_BUF_SIZE
294 tmp = size_int (JMP_BUF_SIZE - 1);
295 #else
296 /* Should be large enough for most systems, if it is not,
297 JMP_BUF_SIZE should be defined with the proper value. It will
298 also tend to be larger than necessary for most systems, a more
299 optimal port will define JMP_BUF_SIZE. */
300 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
301 #endif
302 #else
303 /* Compute a minimally sized jump buffer. We need room to store at
304 least 3 pointers - stack pointer, frame pointer and return address.
305 Plus for some targets we need room for an extra pointer - in the
306 case of MIPS this is the global pointer. This makes a total of four
307 pointers, but to be safe we actually allocate room for 5.
308
309 If pointers are smaller than words then we allocate enough room for
310 5 words, just in case the backend needs this much room. For more
311 discussion on this issue see:
312 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
313 if (POINTER_SIZE > BITS_PER_WORD)
314 tmp = size_int (5 - 1);
315 else
316 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
317 #endif
318
319 tmp = build_index_type (tmp);
320 tmp = build_array_type (ptr_type_node, tmp);
321 f_jbuf = build_decl (BUILTINS_LOCATION,
322 FIELD_DECL, get_identifier ("__jbuf"), tmp);
323 #ifdef DONT_USE_BUILTIN_SETJMP
324 /* We don't know what the alignment requirements of the
325 runtime's jmp_buf has. Overestimate. */
326 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
327 DECL_USER_ALIGN (f_jbuf) = 1;
328 #endif
329 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
330
331 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
332 TREE_CHAIN (f_prev) = f_cs;
333 TREE_CHAIN (f_cs) = f_data;
334 TREE_CHAIN (f_data) = f_per;
335 TREE_CHAIN (f_per) = f_lsda;
336 TREE_CHAIN (f_lsda) = f_jbuf;
337
338 layout_type (sjlj_fc_type_node);
339
340 /* Cache the interesting field offsets so that we have
341 easy access from rtl. */
342 sjlj_fc_call_site_ofs
343 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
344 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
345 sjlj_fc_data_ofs
346 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
347 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
348 sjlj_fc_personality_ofs
349 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
350 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
351 sjlj_fc_lsda_ofs
352 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
353 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
354 sjlj_fc_jbuf_ofs
355 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
356 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
357 }
358 }
359
360 void
361 init_eh_for_function (void)
362 {
363 cfun->eh = ggc_cleared_alloc<eh_status> ();
364
365 /* Make sure zero'th entries are used. */
366 vec_safe_push (cfun->eh->region_array, (eh_region)0);
367 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
368 }
369 \f
370 /* Routines to generate the exception tree somewhat directly.
371 These are used from tree-eh.c when processing exception related
372 nodes during tree optimization. */
373
374 static eh_region
375 gen_eh_region (enum eh_region_type type, eh_region outer)
376 {
377 eh_region new_eh;
378
379 /* Insert a new blank region as a leaf in the tree. */
380 new_eh = ggc_cleared_alloc<eh_region_d> ();
381 new_eh->type = type;
382 new_eh->outer = outer;
383 if (outer)
384 {
385 new_eh->next_peer = outer->inner;
386 outer->inner = new_eh;
387 }
388 else
389 {
390 new_eh->next_peer = cfun->eh->region_tree;
391 cfun->eh->region_tree = new_eh;
392 }
393
394 new_eh->index = vec_safe_length (cfun->eh->region_array);
395 vec_safe_push (cfun->eh->region_array, new_eh);
396
397 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
398 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
399 new_eh->use_cxa_end_cleanup = true;
400
401 return new_eh;
402 }
403
404 eh_region
405 gen_eh_region_cleanup (eh_region outer)
406 {
407 return gen_eh_region (ERT_CLEANUP, outer);
408 }
409
410 eh_region
411 gen_eh_region_try (eh_region outer)
412 {
413 return gen_eh_region (ERT_TRY, outer);
414 }
415
416 eh_catch
417 gen_eh_region_catch (eh_region t, tree type_or_list)
418 {
419 eh_catch c, l;
420 tree type_list, type_node;
421
422 gcc_assert (t->type == ERT_TRY);
423
424 /* Ensure to always end up with a type list to normalize further
425 processing, then register each type against the runtime types map. */
426 type_list = type_or_list;
427 if (type_or_list)
428 {
429 if (TREE_CODE (type_or_list) != TREE_LIST)
430 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
431
432 type_node = type_list;
433 for (; type_node; type_node = TREE_CHAIN (type_node))
434 add_type_for_runtime (TREE_VALUE (type_node));
435 }
436
437 c = ggc_cleared_alloc<eh_catch_d> ();
438 c->type_list = type_list;
439 l = t->u.eh_try.last_catch;
440 c->prev_catch = l;
441 if (l)
442 l->next_catch = c;
443 else
444 t->u.eh_try.first_catch = c;
445 t->u.eh_try.last_catch = c;
446
447 return c;
448 }
449
450 eh_region
451 gen_eh_region_allowed (eh_region outer, tree allowed)
452 {
453 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
454 region->u.allowed.type_list = allowed;
455
456 for (; allowed ; allowed = TREE_CHAIN (allowed))
457 add_type_for_runtime (TREE_VALUE (allowed));
458
459 return region;
460 }
461
462 eh_region
463 gen_eh_region_must_not_throw (eh_region outer)
464 {
465 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
466 }
467
468 eh_landing_pad
469 gen_eh_landing_pad (eh_region region)
470 {
471 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
472
473 lp->next_lp = region->landing_pads;
474 lp->region = region;
475 lp->index = vec_safe_length (cfun->eh->lp_array);
476 region->landing_pads = lp;
477
478 vec_safe_push (cfun->eh->lp_array, lp);
479
480 return lp;
481 }
482
483 eh_region
484 get_eh_region_from_number_fn (struct function *ifun, int i)
485 {
486 return (*ifun->eh->region_array)[i];
487 }
488
489 eh_region
490 get_eh_region_from_number (int i)
491 {
492 return get_eh_region_from_number_fn (cfun, i);
493 }
494
495 eh_landing_pad
496 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
497 {
498 return (*ifun->eh->lp_array)[i];
499 }
500
501 eh_landing_pad
502 get_eh_landing_pad_from_number (int i)
503 {
504 return get_eh_landing_pad_from_number_fn (cfun, i);
505 }
506
507 eh_region
508 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
509 {
510 if (i < 0)
511 return (*ifun->eh->region_array)[-i];
512 else if (i == 0)
513 return NULL;
514 else
515 {
516 eh_landing_pad lp;
517 lp = (*ifun->eh->lp_array)[i];
518 return lp->region;
519 }
520 }
521
522 eh_region
523 get_eh_region_from_lp_number (int i)
524 {
525 return get_eh_region_from_lp_number_fn (cfun, i);
526 }
527 \f
528 /* Returns true if the current function has exception handling regions. */
529
530 bool
531 current_function_has_exception_handlers (void)
532 {
533 return cfun->eh->region_tree != NULL;
534 }
535 \f
536 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
537 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
538
539 struct duplicate_eh_regions_data
540 {
541 duplicate_eh_regions_map label_map;
542 void *label_map_data;
543 hash_map<void *, void *> *eh_map;
544 };
545
546 static void
547 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
548 eh_region old_r, eh_region outer)
549 {
550 eh_landing_pad old_lp, new_lp;
551 eh_region new_r;
552
553 new_r = gen_eh_region (old_r->type, outer);
554 gcc_assert (!data->eh_map->put (old_r, new_r));
555
556 switch (old_r->type)
557 {
558 case ERT_CLEANUP:
559 break;
560
561 case ERT_TRY:
562 {
563 eh_catch oc, nc;
564 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
565 {
566 /* We should be doing all our region duplication before and
567 during inlining, which is before filter lists are created. */
568 gcc_assert (oc->filter_list == NULL);
569 nc = gen_eh_region_catch (new_r, oc->type_list);
570 nc->label = data->label_map (oc->label, data->label_map_data);
571 }
572 }
573 break;
574
575 case ERT_ALLOWED_EXCEPTIONS:
576 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
577 if (old_r->u.allowed.label)
578 new_r->u.allowed.label
579 = data->label_map (old_r->u.allowed.label, data->label_map_data);
580 else
581 new_r->u.allowed.label = NULL_TREE;
582 break;
583
584 case ERT_MUST_NOT_THROW:
585 new_r->u.must_not_throw.failure_loc =
586 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
587 new_r->u.must_not_throw.failure_decl =
588 old_r->u.must_not_throw.failure_decl;
589 break;
590 }
591
592 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
593 {
594 /* Don't bother copying unused landing pads. */
595 if (old_lp->post_landing_pad == NULL)
596 continue;
597
598 new_lp = gen_eh_landing_pad (new_r);
599 gcc_assert (!data->eh_map->put (old_lp, new_lp));
600
601 new_lp->post_landing_pad
602 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
603 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
604 }
605
606 /* Make sure to preserve the original use of __cxa_end_cleanup. */
607 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
608
609 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
610 duplicate_eh_regions_1 (data, old_r, new_r);
611 }
612
613 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
614 the current function and root the tree below OUTER_REGION.
615 The special case of COPY_REGION of NULL means all regions.
616 Remap labels using MAP/MAP_DATA callback. Return a pointer map
617 that allows the caller to remap uses of both EH regions and
618 EH landing pads. */
619
620 hash_map<void *, void *> *
621 duplicate_eh_regions (struct function *ifun,
622 eh_region copy_region, int outer_lp,
623 duplicate_eh_regions_map map, void *map_data)
624 {
625 struct duplicate_eh_regions_data data;
626 eh_region outer_region;
627
628 #ifdef ENABLE_CHECKING
629 verify_eh_tree (ifun);
630 #endif
631
632 data.label_map = map;
633 data.label_map_data = map_data;
634 data.eh_map = new hash_map<void *, void *>;
635
636 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
637
638 /* Copy all the regions in the subtree. */
639 if (copy_region)
640 duplicate_eh_regions_1 (&data, copy_region, outer_region);
641 else
642 {
643 eh_region r;
644 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
645 duplicate_eh_regions_1 (&data, r, outer_region);
646 }
647
648 #ifdef ENABLE_CHECKING
649 verify_eh_tree (cfun);
650 #endif
651
652 return data.eh_map;
653 }
654
655 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
656
657 eh_region
658 eh_region_outermost (struct function *ifun, eh_region region_a,
659 eh_region region_b)
660 {
661 sbitmap b_outer;
662
663 gcc_assert (ifun->eh->region_array);
664 gcc_assert (ifun->eh->region_tree);
665
666 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
667 bitmap_clear (b_outer);
668
669 do
670 {
671 bitmap_set_bit (b_outer, region_b->index);
672 region_b = region_b->outer;
673 }
674 while (region_b);
675
676 do
677 {
678 if (bitmap_bit_p (b_outer, region_a->index))
679 break;
680 region_a = region_a->outer;
681 }
682 while (region_a);
683
684 sbitmap_free (b_outer);
685 return region_a;
686 }
687 \f
688 void
689 add_type_for_runtime (tree type)
690 {
691 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
692 if (TREE_CODE (type) == NOP_EXPR)
693 return;
694
695 bool existed = false;
696 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
697 if (!existed)
698 *slot = lang_hooks.eh_runtime_type (type);
699 }
700
701 tree
702 lookup_type_for_runtime (tree type)
703 {
704 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
705 if (TREE_CODE (type) == NOP_EXPR)
706 return type;
707
708 /* We should have always inserted the data earlier. */
709 return *type_to_runtime_map->get (type);
710 }
711
712 \f
713 /* Represent an entry in @TTypes for either catch actions
714 or exception filter actions. */
715 struct ttypes_filter {
716 tree t;
717 int filter;
718 };
719
720 /* Helper for ttypes_filter hashing. */
721
722 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
723 {
724 typedef tree_node *compare_type;
725 static inline hashval_t hash (const ttypes_filter *);
726 static inline bool equal (const ttypes_filter *, const tree_node *);
727 };
728
729 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
730 (a tree) for a @TTypes type node we are thinking about adding. */
731
732 inline bool
733 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
734 {
735 return entry->t == data;
736 }
737
738 inline hashval_t
739 ttypes_filter_hasher::hash (const ttypes_filter *entry)
740 {
741 return TREE_HASH (entry->t);
742 }
743
744 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
745
746
747 /* Helper for ehspec hashing. */
748
749 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
750 {
751 static inline hashval_t hash (const ttypes_filter *);
752 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
753 };
754
755 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
756 exception specification list we are thinking about adding. */
757 /* ??? Currently we use the type lists in the order given. Someone
758 should put these in some canonical order. */
759
760 inline bool
761 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
762 {
763 return type_list_equal (entry->t, data->t);
764 }
765
766 /* Hash function for exception specification lists. */
767
768 inline hashval_t
769 ehspec_hasher::hash (const ttypes_filter *entry)
770 {
771 hashval_t h = 0;
772 tree list;
773
774 for (list = entry->t; list ; list = TREE_CHAIN (list))
775 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
776 return h;
777 }
778
779 typedef hash_table<ehspec_hasher> ehspec_hash_type;
780
781
782 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
783 to speed up the search. Return the filter value to be used. */
784
785 static int
786 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
787 {
788 struct ttypes_filter **slot, *n;
789
790 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
791 INSERT);
792
793 if ((n = *slot) == NULL)
794 {
795 /* Filter value is a 1 based table index. */
796
797 n = XNEW (struct ttypes_filter);
798 n->t = type;
799 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
800 *slot = n;
801
802 vec_safe_push (cfun->eh->ttype_data, type);
803 }
804
805 return n->filter;
806 }
807
808 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
809 to speed up the search. Return the filter value to be used. */
810
811 static int
812 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
813 tree list)
814 {
815 struct ttypes_filter **slot, *n;
816 struct ttypes_filter dummy;
817
818 dummy.t = list;
819 slot = ehspec_hash->find_slot (&dummy, INSERT);
820
821 if ((n = *slot) == NULL)
822 {
823 int len;
824
825 if (targetm.arm_eabi_unwinder)
826 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
827 else
828 len = vec_safe_length (cfun->eh->ehspec_data.other);
829
830 /* Filter value is a -1 based byte index into a uleb128 buffer. */
831
832 n = XNEW (struct ttypes_filter);
833 n->t = list;
834 n->filter = -(len + 1);
835 *slot = n;
836
837 /* Generate a 0 terminated list of filter values. */
838 for (; list ; list = TREE_CHAIN (list))
839 {
840 if (targetm.arm_eabi_unwinder)
841 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
842 else
843 {
844 /* Look up each type in the list and encode its filter
845 value as a uleb128. */
846 push_uleb128 (&cfun->eh->ehspec_data.other,
847 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
848 }
849 }
850 if (targetm.arm_eabi_unwinder)
851 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
852 else
853 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
854 }
855
856 return n->filter;
857 }
858
859 /* Generate the action filter values to be used for CATCH and
860 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
861 we use lots of landing pads, and so every type or list can share
862 the same filter value, which saves table space. */
863
864 void
865 assign_filter_values (void)
866 {
867 int i;
868 eh_region r;
869 eh_catch c;
870
871 vec_alloc (cfun->eh->ttype_data, 16);
872 if (targetm.arm_eabi_unwinder)
873 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
874 else
875 vec_alloc (cfun->eh->ehspec_data.other, 64);
876
877 ehspec_hash_type ehspec (31);
878 ttypes_hash_type ttypes (31);
879
880 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
881 {
882 if (r == NULL)
883 continue;
884
885 switch (r->type)
886 {
887 case ERT_TRY:
888 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
889 {
890 /* Whatever type_list is (NULL or true list), we build a list
891 of filters for the region. */
892 c->filter_list = NULL_TREE;
893
894 if (c->type_list != NULL)
895 {
896 /* Get a filter value for each of the types caught and store
897 them in the region's dedicated list. */
898 tree tp_node = c->type_list;
899
900 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
901 {
902 int flt
903 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
904 tree flt_node = build_int_cst (integer_type_node, flt);
905
906 c->filter_list
907 = tree_cons (NULL_TREE, flt_node, c->filter_list);
908 }
909 }
910 else
911 {
912 /* Get a filter value for the NULL list also since it
913 will need an action record anyway. */
914 int flt = add_ttypes_entry (&ttypes, NULL);
915 tree flt_node = build_int_cst (integer_type_node, flt);
916
917 c->filter_list
918 = tree_cons (NULL_TREE, flt_node, NULL);
919 }
920 }
921 break;
922
923 case ERT_ALLOWED_EXCEPTIONS:
924 r->u.allowed.filter
925 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
926 break;
927
928 default:
929 break;
930 }
931 }
932 }
933
934 /* Emit SEQ into basic block just before INSN (that is assumed to be
935 first instruction of some existing BB and return the newly
936 produced block. */
937 static basic_block
938 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
939 {
940 rtx_insn *last;
941 basic_block bb;
942 edge e;
943 edge_iterator ei;
944
945 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
946 call), we don't want it to go into newly created landing pad or other EH
947 construct. */
948 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
949 if (e->flags & EDGE_FALLTHRU)
950 force_nonfallthru (e);
951 else
952 ei_next (&ei);
953 last = emit_insn_before (seq, insn);
954 if (BARRIER_P (last))
955 last = PREV_INSN (last);
956 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
957 update_bb_for_insn (bb);
958 bb->flags |= BB_SUPERBLOCK;
959 return bb;
960 }
961 \f
962 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
963 at the rtl level. Emit the code required by the target at a landing
964 pad for the given region. */
965
966 void
967 expand_dw2_landing_pad_for_region (eh_region region)
968 {
969 #ifdef HAVE_exception_receiver
970 if (HAVE_exception_receiver)
971 emit_insn (gen_exception_receiver ());
972 else
973 #endif
974 #ifdef HAVE_nonlocal_goto_receiver
975 if (HAVE_nonlocal_goto_receiver)
976 emit_insn (gen_nonlocal_goto_receiver ());
977 else
978 #endif
979 { /* Nothing */ }
980
981 if (region->exc_ptr_reg)
982 emit_move_insn (region->exc_ptr_reg,
983 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
984 if (region->filter_reg)
985 emit_move_insn (region->filter_reg,
986 gen_rtx_REG (targetm.eh_return_filter_mode (),
987 EH_RETURN_DATA_REGNO (1)));
988 }
989
990 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
991
992 static void
993 dw2_build_landing_pads (void)
994 {
995 int i;
996 eh_landing_pad lp;
997 int e_flags = EDGE_FALLTHRU;
998
999 /* If we're going to partition blocks, we need to be able to add
1000 new landing pads later, which means that we need to hold on to
1001 the post-landing-pad block. Prevent it from being merged away.
1002 We'll remove this bit after partitioning. */
1003 if (flag_reorder_blocks_and_partition)
1004 e_flags |= EDGE_PRESERVE;
1005
1006 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1007 {
1008 basic_block bb;
1009 rtx_insn *seq;
1010 edge e;
1011
1012 if (lp == NULL || lp->post_landing_pad == NULL)
1013 continue;
1014
1015 start_sequence ();
1016
1017 lp->landing_pad = gen_label_rtx ();
1018 emit_label (lp->landing_pad);
1019 LABEL_PRESERVE_P (lp->landing_pad) = 1;
1020
1021 expand_dw2_landing_pad_for_region (lp->region);
1022
1023 seq = get_insns ();
1024 end_sequence ();
1025
1026 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1027 e = make_edge (bb, bb->next_bb, e_flags);
1028 e->count = bb->count;
1029 e->probability = REG_BR_PROB_BASE;
1030 if (current_loops)
1031 {
1032 struct loop *loop = bb->next_bb->loop_father;
1033 /* If we created a pre-header block, add the new block to the
1034 outer loop, otherwise to the loop itself. */
1035 if (bb->next_bb == loop->header)
1036 add_bb_to_loop (bb, loop_outer (loop));
1037 else
1038 add_bb_to_loop (bb, loop);
1039 }
1040 }
1041 }
1042
1043 \f
1044 static vec<int> sjlj_lp_call_site_index;
1045
1046 /* Process all active landing pads. Assign each one a compact dispatch
1047 index, and a call-site index. */
1048
1049 static int
1050 sjlj_assign_call_site_values (void)
1051 {
1052 action_hash_type ar_hash (31);
1053 int i, disp_index;
1054 eh_landing_pad lp;
1055
1056 vec_alloc (crtl->eh.action_record_data, 64);
1057
1058 disp_index = 0;
1059 call_site_base = 1;
1060 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1061 if (lp && lp->post_landing_pad)
1062 {
1063 int action, call_site;
1064
1065 /* First: build the action table. */
1066 action = collect_one_action_chain (&ar_hash, lp->region);
1067
1068 /* Next: assign call-site values. If dwarf2 terms, this would be
1069 the region number assigned by convert_to_eh_region_ranges, but
1070 handles no-action and must-not-throw differently. */
1071 /* Map must-not-throw to otherwise unused call-site index 0. */
1072 if (action == -2)
1073 call_site = 0;
1074 /* Map no-action to otherwise unused call-site index -1. */
1075 else if (action == -1)
1076 call_site = -1;
1077 /* Otherwise, look it up in the table. */
1078 else
1079 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1080 sjlj_lp_call_site_index[i] = call_site;
1081
1082 disp_index++;
1083 }
1084
1085 return disp_index;
1086 }
1087
1088 /* Emit code to record the current call-site index before every
1089 insn that can throw. */
1090
1091 static void
1092 sjlj_mark_call_sites (void)
1093 {
1094 int last_call_site = -2;
1095 rtx_insn *insn;
1096 rtx mem;
1097
1098 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1099 {
1100 eh_landing_pad lp;
1101 eh_region r;
1102 bool nothrow;
1103 int this_call_site;
1104 rtx_insn *before, *p;
1105
1106 /* Reset value tracking at extended basic block boundaries. */
1107 if (LABEL_P (insn))
1108 last_call_site = -2;
1109
1110 /* If the function allocates dynamic stack space, the context must
1111 be updated after every allocation/deallocation accordingly. */
1112 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1113 {
1114 rtx buf_addr;
1115
1116 start_sequence ();
1117 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1118 sjlj_fc_jbuf_ofs);
1119 expand_builtin_update_setjmp_buf (buf_addr);
1120 p = get_insns ();
1121 end_sequence ();
1122 emit_insn_before (p, insn);
1123 }
1124
1125 if (! INSN_P (insn))
1126 continue;
1127
1128 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1129 if (nothrow)
1130 continue;
1131 if (lp)
1132 this_call_site = sjlj_lp_call_site_index[lp->index];
1133 else if (r == NULL)
1134 {
1135 /* Calls (and trapping insns) without notes are outside any
1136 exception handling region in this function. Mark them as
1137 no action. */
1138 this_call_site = -1;
1139 }
1140 else
1141 {
1142 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1143 this_call_site = 0;
1144 }
1145
1146 if (this_call_site != -1)
1147 crtl->uses_eh_lsda = 1;
1148
1149 if (this_call_site == last_call_site)
1150 continue;
1151
1152 /* Don't separate a call from it's argument loads. */
1153 before = insn;
1154 if (CALL_P (insn))
1155 before = find_first_parameter_load (insn, NULL);
1156
1157 start_sequence ();
1158 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1159 sjlj_fc_call_site_ofs);
1160 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1161 p = get_insns ();
1162 end_sequence ();
1163
1164 emit_insn_before (p, before);
1165 last_call_site = this_call_site;
1166 }
1167 }
1168
1169 /* Construct the SjLj_Function_Context. */
1170
1171 static void
1172 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1173 {
1174 rtx_insn *fn_begin, *seq;
1175 rtx fc, mem;
1176 bool fn_begin_outside_block;
1177 rtx personality = get_personality_function (current_function_decl);
1178
1179 fc = crtl->eh.sjlj_fc;
1180
1181 start_sequence ();
1182
1183 /* We're storing this libcall's address into memory instead of
1184 calling it directly. Thus, we must call assemble_external_libcall
1185 here, as we can not depend on emit_library_call to do it for us. */
1186 assemble_external_libcall (personality);
1187 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1188 emit_move_insn (mem, personality);
1189
1190 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1191 if (crtl->uses_eh_lsda)
1192 {
1193 char buf[20];
1194 rtx sym;
1195
1196 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1197 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1198 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1199 emit_move_insn (mem, sym);
1200 }
1201 else
1202 emit_move_insn (mem, const0_rtx);
1203
1204 if (dispatch_label)
1205 {
1206 #ifdef DONT_USE_BUILTIN_SETJMP
1207 rtx x;
1208 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1209 TYPE_MODE (integer_type_node), 1,
1210 plus_constant (Pmode, XEXP (fc, 0),
1211 sjlj_fc_jbuf_ofs), Pmode);
1212
1213 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1214 TYPE_MODE (integer_type_node), 0,
1215 dispatch_label, REG_BR_PROB_BASE / 100);
1216 #else
1217 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1218 sjlj_fc_jbuf_ofs),
1219 dispatch_label);
1220 #endif
1221 }
1222
1223 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1224 1, XEXP (fc, 0), Pmode);
1225
1226 seq = get_insns ();
1227 end_sequence ();
1228
1229 /* ??? Instead of doing this at the beginning of the function,
1230 do this in a block that is at loop level 0 and dominates all
1231 can_throw_internal instructions. */
1232
1233 fn_begin_outside_block = true;
1234 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1235 if (NOTE_P (fn_begin))
1236 {
1237 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1238 break;
1239 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1240 fn_begin_outside_block = false;
1241 }
1242
1243 if (fn_begin_outside_block)
1244 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1245 else
1246 emit_insn_after (seq, fn_begin);
1247 }
1248
1249 /* Call back from expand_function_end to know where we should put
1250 the call to unwind_sjlj_unregister_libfunc if needed. */
1251
1252 void
1253 sjlj_emit_function_exit_after (rtx_insn *after)
1254 {
1255 crtl->eh.sjlj_exit_after = after;
1256 }
1257
1258 static void
1259 sjlj_emit_function_exit (void)
1260 {
1261 rtx_insn *seq, *insn;
1262
1263 start_sequence ();
1264
1265 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1266 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1267
1268 seq = get_insns ();
1269 end_sequence ();
1270
1271 /* ??? Really this can be done in any block at loop level 0 that
1272 post-dominates all can_throw_internal instructions. This is
1273 the last possible moment. */
1274
1275 insn = crtl->eh.sjlj_exit_after;
1276 if (LABEL_P (insn))
1277 insn = NEXT_INSN (insn);
1278
1279 emit_insn_after (seq, insn);
1280 }
1281
1282 static void
1283 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1284 {
1285 machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1286 machine_mode filter_mode = targetm.eh_return_filter_mode ();
1287 eh_landing_pad lp;
1288 rtx mem, fc, exc_ptr_reg, filter_reg;
1289 rtx_insn *seq;
1290 basic_block bb;
1291 eh_region r;
1292 edge e;
1293 int i, disp_index;
1294 vec<tree> dispatch_labels = vNULL;
1295
1296 fc = crtl->eh.sjlj_fc;
1297
1298 start_sequence ();
1299
1300 emit_label (dispatch_label);
1301
1302 #ifndef DONT_USE_BUILTIN_SETJMP
1303 expand_builtin_setjmp_receiver (dispatch_label);
1304
1305 /* The caller of expand_builtin_setjmp_receiver is responsible for
1306 making sure that the label doesn't vanish. The only other caller
1307 is the expander for __builtin_setjmp_receiver, which places this
1308 label on the nonlocal_goto_label list. Since we're modeling these
1309 CFG edges more exactly, we can use the forced_labels list instead. */
1310 LABEL_PRESERVE_P (dispatch_label) = 1;
1311 forced_labels
1312 = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1313 #endif
1314
1315 /* Load up exc_ptr and filter values from the function context. */
1316 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1317 if (unwind_word_mode != ptr_mode)
1318 {
1319 #ifdef POINTERS_EXTEND_UNSIGNED
1320 mem = convert_memory_address (ptr_mode, mem);
1321 #else
1322 mem = convert_to_mode (ptr_mode, mem, 0);
1323 #endif
1324 }
1325 exc_ptr_reg = force_reg (ptr_mode, mem);
1326
1327 mem = adjust_address (fc, unwind_word_mode,
1328 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1329 if (unwind_word_mode != filter_mode)
1330 mem = convert_to_mode (filter_mode, mem, 0);
1331 filter_reg = force_reg (filter_mode, mem);
1332
1333 /* Jump to one of the directly reachable regions. */
1334
1335 disp_index = 0;
1336 rtx_code_label *first_reachable_label = NULL;
1337
1338 /* If there's exactly one call site in the function, don't bother
1339 generating a switch statement. */
1340 if (num_dispatch > 1)
1341 dispatch_labels.create (num_dispatch);
1342
1343 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1344 if (lp && lp->post_landing_pad)
1345 {
1346 rtx_insn *seq2;
1347 rtx_code_label *label;
1348
1349 start_sequence ();
1350
1351 lp->landing_pad = dispatch_label;
1352
1353 if (num_dispatch > 1)
1354 {
1355 tree t_label, case_elt, t;
1356
1357 t_label = create_artificial_label (UNKNOWN_LOCATION);
1358 t = build_int_cst (integer_type_node, disp_index);
1359 case_elt = build_case_label (t, NULL, t_label);
1360 dispatch_labels.quick_push (case_elt);
1361 label = jump_target_rtx (t_label);
1362 }
1363 else
1364 label = gen_label_rtx ();
1365
1366 if (disp_index == 0)
1367 first_reachable_label = label;
1368 emit_label (label);
1369
1370 r = lp->region;
1371 if (r->exc_ptr_reg)
1372 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1373 if (r->filter_reg)
1374 emit_move_insn (r->filter_reg, filter_reg);
1375
1376 seq2 = get_insns ();
1377 end_sequence ();
1378
1379 rtx_insn *before = label_rtx (lp->post_landing_pad);
1380 bb = emit_to_new_bb_before (seq2, before);
1381 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1382 e->count = bb->count;
1383 e->probability = REG_BR_PROB_BASE;
1384 if (current_loops)
1385 {
1386 struct loop *loop = bb->next_bb->loop_father;
1387 /* If we created a pre-header block, add the new block to the
1388 outer loop, otherwise to the loop itself. */
1389 if (bb->next_bb == loop->header)
1390 add_bb_to_loop (bb, loop_outer (loop));
1391 else
1392 add_bb_to_loop (bb, loop);
1393 /* ??? For multiple dispatches we will end up with edges
1394 from the loop tree root into this loop, making it a
1395 multiple-entry loop. Discard all affected loops. */
1396 if (num_dispatch > 1)
1397 {
1398 for (loop = bb->loop_father;
1399 loop_outer (loop); loop = loop_outer (loop))
1400 mark_loop_for_removal (loop);
1401 }
1402 }
1403
1404 disp_index++;
1405 }
1406 gcc_assert (disp_index == num_dispatch);
1407
1408 if (num_dispatch > 1)
1409 {
1410 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1411 sjlj_fc_call_site_ofs);
1412 expand_sjlj_dispatch_table (disp, dispatch_labels);
1413 }
1414
1415 seq = get_insns ();
1416 end_sequence ();
1417
1418 bb = emit_to_new_bb_before (seq, first_reachable_label);
1419 if (num_dispatch == 1)
1420 {
1421 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1422 e->count = bb->count;
1423 e->probability = REG_BR_PROB_BASE;
1424 if (current_loops)
1425 {
1426 struct loop *loop = bb->next_bb->loop_father;
1427 /* If we created a pre-header block, add the new block to the
1428 outer loop, otherwise to the loop itself. */
1429 if (bb->next_bb == loop->header)
1430 add_bb_to_loop (bb, loop_outer (loop));
1431 else
1432 add_bb_to_loop (bb, loop);
1433 }
1434 }
1435 else
1436 {
1437 /* We are not wiring up edges here, but as the dispatcher call
1438 is at function begin simply associate the block with the
1439 outermost (non-)loop. */
1440 if (current_loops)
1441 add_bb_to_loop (bb, current_loops->tree_root);
1442 }
1443 }
1444
1445 static void
1446 sjlj_build_landing_pads (void)
1447 {
1448 int num_dispatch;
1449
1450 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1451 if (num_dispatch == 0)
1452 return;
1453 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1454
1455 num_dispatch = sjlj_assign_call_site_values ();
1456 if (num_dispatch > 0)
1457 {
1458 rtx_code_label *dispatch_label = gen_label_rtx ();
1459 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1460 TYPE_MODE (sjlj_fc_type_node),
1461 TYPE_ALIGN (sjlj_fc_type_node));
1462 crtl->eh.sjlj_fc
1463 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1464 int_size_in_bytes (sjlj_fc_type_node),
1465 align);
1466
1467 sjlj_mark_call_sites ();
1468 sjlj_emit_function_enter (dispatch_label);
1469 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1470 sjlj_emit_function_exit ();
1471 }
1472
1473 /* If we do not have any landing pads, we may still need to register a
1474 personality routine and (empty) LSDA to handle must-not-throw regions. */
1475 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1476 {
1477 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1478 TYPE_MODE (sjlj_fc_type_node),
1479 TYPE_ALIGN (sjlj_fc_type_node));
1480 crtl->eh.sjlj_fc
1481 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1482 int_size_in_bytes (sjlj_fc_type_node),
1483 align);
1484
1485 sjlj_mark_call_sites ();
1486 sjlj_emit_function_enter (NULL);
1487 sjlj_emit_function_exit ();
1488 }
1489
1490 sjlj_lp_call_site_index.release ();
1491 }
1492
1493 /* Update the sjlj function context. This function should be called
1494 whenever we allocate or deallocate dynamic stack space. */
1495
1496 void
1497 update_sjlj_context (void)
1498 {
1499 if (!flag_exceptions)
1500 return;
1501
1502 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1503 }
1504
1505 /* After initial rtl generation, call back to finish generating
1506 exception support code. */
1507
1508 void
1509 finish_eh_generation (void)
1510 {
1511 basic_block bb;
1512
1513 /* Construct the landing pads. */
1514 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1515 sjlj_build_landing_pads ();
1516 else
1517 dw2_build_landing_pads ();
1518 break_superblocks ();
1519
1520 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1521 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1522 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1523 commit_edge_insertions ();
1524
1525 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1526 FOR_EACH_BB_FN (bb, cfun)
1527 {
1528 eh_landing_pad lp;
1529 edge_iterator ei;
1530 edge e;
1531
1532 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1533
1534 FOR_EACH_EDGE (e, ei, bb->succs)
1535 if (e->flags & EDGE_EH)
1536 break;
1537
1538 /* We should not have generated any new throwing insns during this
1539 pass, and we should not have lost any EH edges, so we only need
1540 to handle two cases here:
1541 (1) reachable handler and an existing edge to post-landing-pad,
1542 (2) no reachable handler and no edge. */
1543 gcc_assert ((lp != NULL) == (e != NULL));
1544 if (lp != NULL)
1545 {
1546 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1547
1548 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1549 e->flags |= (CALL_P (BB_END (bb))
1550 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1551 : EDGE_ABNORMAL);
1552 }
1553 }
1554 }
1555 \f
1556 /* This section handles removing dead code for flow. */
1557
1558 void
1559 remove_eh_landing_pad (eh_landing_pad lp)
1560 {
1561 eh_landing_pad *pp;
1562
1563 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1564 continue;
1565 *pp = lp->next_lp;
1566
1567 if (lp->post_landing_pad)
1568 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1569 (*cfun->eh->lp_array)[lp->index] = NULL;
1570 }
1571
1572 /* Splice the EH region at PP from the region tree. */
1573
1574 static void
1575 remove_eh_handler_splicer (eh_region *pp)
1576 {
1577 eh_region region = *pp;
1578 eh_landing_pad lp;
1579
1580 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1581 {
1582 if (lp->post_landing_pad)
1583 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1584 (*cfun->eh->lp_array)[lp->index] = NULL;
1585 }
1586
1587 if (region->inner)
1588 {
1589 eh_region p, outer;
1590 outer = region->outer;
1591
1592 *pp = p = region->inner;
1593 do
1594 {
1595 p->outer = outer;
1596 pp = &p->next_peer;
1597 p = *pp;
1598 }
1599 while (p);
1600 }
1601 *pp = region->next_peer;
1602
1603 (*cfun->eh->region_array)[region->index] = NULL;
1604 }
1605
1606 /* Splice a single EH region REGION from the region tree.
1607
1608 To unlink REGION, we need to find the pointer to it with a relatively
1609 expensive search in REGION's outer region. If you are going to
1610 remove a number of handlers, using remove_unreachable_eh_regions may
1611 be a better option. */
1612
1613 void
1614 remove_eh_handler (eh_region region)
1615 {
1616 eh_region *pp, *pp_start, p, outer;
1617
1618 outer = region->outer;
1619 if (outer)
1620 pp_start = &outer->inner;
1621 else
1622 pp_start = &cfun->eh->region_tree;
1623 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1624 continue;
1625
1626 remove_eh_handler_splicer (pp);
1627 }
1628
1629 /* Worker for remove_unreachable_eh_regions.
1630 PP is a pointer to the region to start a region tree depth-first
1631 search from. R_REACHABLE is the set of regions that have to be
1632 preserved. */
1633
1634 static void
1635 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1636 {
1637 while (*pp)
1638 {
1639 eh_region region = *pp;
1640 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1641 if (!bitmap_bit_p (r_reachable, region->index))
1642 remove_eh_handler_splicer (pp);
1643 else
1644 pp = &region->next_peer;
1645 }
1646 }
1647
1648 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1649 Do this by traversing the EH tree top-down and splice out regions that
1650 are not marked. By removing regions from the leaves, we avoid costly
1651 searches in the region tree. */
1652
1653 void
1654 remove_unreachable_eh_regions (sbitmap r_reachable)
1655 {
1656 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1657 }
1658
1659 /* Invokes CALLBACK for every exception handler landing pad label.
1660 Only used by reload hackery; should not be used by new code. */
1661
1662 void
1663 for_each_eh_label (void (*callback) (rtx))
1664 {
1665 eh_landing_pad lp;
1666 int i;
1667
1668 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1669 {
1670 if (lp)
1671 {
1672 rtx_code_label *lab = lp->landing_pad;
1673 if (lab && LABEL_P (lab))
1674 (*callback) (lab);
1675 }
1676 }
1677 }
1678 \f
1679 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1680 call insn.
1681
1682 At the gimple level, we use LP_NR
1683 > 0 : The statement transfers to landing pad LP_NR
1684 = 0 : The statement is outside any EH region
1685 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1686
1687 At the rtl level, we use LP_NR
1688 > 0 : The insn transfers to landing pad LP_NR
1689 = 0 : The insn cannot throw
1690 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1691 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1692 missing note: The insn is outside any EH region.
1693
1694 ??? This difference probably ought to be avoided. We could stand
1695 to record nothrow for arbitrary gimple statements, and so avoid
1696 some moderately complex lookups in stmt_could_throw_p. Perhaps
1697 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1698 no-nonlocal-goto property should be recorded elsewhere as a bit
1699 on the call_insn directly. Perhaps we should make more use of
1700 attaching the trees to call_insns (reachable via symbol_ref in
1701 direct call cases) and just pull the data out of the trees. */
1702
1703 void
1704 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1705 {
1706 rtx value;
1707 if (ecf_flags & ECF_NOTHROW)
1708 value = const0_rtx;
1709 else if (lp_nr != 0)
1710 value = GEN_INT (lp_nr);
1711 else
1712 return;
1713 add_reg_note (insn, REG_EH_REGION, value);
1714 }
1715
1716 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1717 nor perform a non-local goto. Replace the region note if it
1718 already exists. */
1719
1720 void
1721 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1722 {
1723 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1724 rtx intmin = GEN_INT (INT_MIN);
1725
1726 if (note != 0)
1727 XEXP (note, 0) = intmin;
1728 else
1729 add_reg_note (insn, REG_EH_REGION, intmin);
1730 }
1731
1732 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1733 to the contrary. */
1734
1735 bool
1736 insn_could_throw_p (const_rtx insn)
1737 {
1738 if (!flag_exceptions)
1739 return false;
1740 if (CALL_P (insn))
1741 return true;
1742 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1743 return may_trap_p (PATTERN (insn));
1744 return false;
1745 }
1746
1747 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1748 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1749 to look for a note, or the note itself. */
1750
1751 void
1752 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1753 {
1754 rtx_insn *insn;
1755 rtx note = note_or_insn;
1756
1757 if (INSN_P (note_or_insn))
1758 {
1759 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1760 if (note == NULL)
1761 return;
1762 }
1763 note = XEXP (note, 0);
1764
1765 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1766 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1767 && insn_could_throw_p (insn))
1768 add_reg_note (insn, REG_EH_REGION, note);
1769 }
1770
1771 /* Likewise, but iterate backward. */
1772
1773 void
1774 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1775 {
1776 rtx_insn *insn;
1777 rtx note = note_or_insn;
1778
1779 if (INSN_P (note_or_insn))
1780 {
1781 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1782 if (note == NULL)
1783 return;
1784 }
1785 note = XEXP (note, 0);
1786
1787 for (insn = last; insn != first; insn = PREV_INSN (insn))
1788 if (insn_could_throw_p (insn))
1789 add_reg_note (insn, REG_EH_REGION, note);
1790 }
1791
1792
1793 /* Extract all EH information from INSN. Return true if the insn
1794 was marked NOTHROW. */
1795
1796 static bool
1797 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1798 eh_landing_pad *plp)
1799 {
1800 eh_landing_pad lp = NULL;
1801 eh_region r = NULL;
1802 bool ret = false;
1803 rtx note;
1804 int lp_nr;
1805
1806 if (! INSN_P (insn))
1807 goto egress;
1808
1809 if (NONJUMP_INSN_P (insn)
1810 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1811 insn = XVECEXP (PATTERN (insn), 0, 0);
1812
1813 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1814 if (!note)
1815 {
1816 ret = !insn_could_throw_p (insn);
1817 goto egress;
1818 }
1819
1820 lp_nr = INTVAL (XEXP (note, 0));
1821 if (lp_nr == 0 || lp_nr == INT_MIN)
1822 {
1823 ret = true;
1824 goto egress;
1825 }
1826
1827 if (lp_nr < 0)
1828 r = (*cfun->eh->region_array)[-lp_nr];
1829 else
1830 {
1831 lp = (*cfun->eh->lp_array)[lp_nr];
1832 r = lp->region;
1833 }
1834
1835 egress:
1836 *plp = lp;
1837 *pr = r;
1838 return ret;
1839 }
1840
1841 /* Return the landing pad to which INSN may go, or NULL if it does not
1842 have a reachable landing pad within this function. */
1843
1844 eh_landing_pad
1845 get_eh_landing_pad_from_rtx (const_rtx insn)
1846 {
1847 eh_landing_pad lp;
1848 eh_region r;
1849
1850 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1851 return lp;
1852 }
1853
1854 /* Return the region to which INSN may go, or NULL if it does not
1855 have a reachable region within this function. */
1856
1857 eh_region
1858 get_eh_region_from_rtx (const_rtx insn)
1859 {
1860 eh_landing_pad lp;
1861 eh_region r;
1862
1863 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1864 return r;
1865 }
1866
1867 /* Return true if INSN throws and is caught by something in this function. */
1868
1869 bool
1870 can_throw_internal (const_rtx insn)
1871 {
1872 return get_eh_landing_pad_from_rtx (insn) != NULL;
1873 }
1874
1875 /* Return true if INSN throws and escapes from the current function. */
1876
1877 bool
1878 can_throw_external (const_rtx insn)
1879 {
1880 eh_landing_pad lp;
1881 eh_region r;
1882 bool nothrow;
1883
1884 if (! INSN_P (insn))
1885 return false;
1886
1887 if (NONJUMP_INSN_P (insn)
1888 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1889 {
1890 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1891 int i, n = seq->len ();
1892
1893 for (i = 0; i < n; i++)
1894 if (can_throw_external (seq->element (i)))
1895 return true;
1896
1897 return false;
1898 }
1899
1900 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1901
1902 /* If we can't throw, we obviously can't throw external. */
1903 if (nothrow)
1904 return false;
1905
1906 /* If we have an internal landing pad, then we're not external. */
1907 if (lp != NULL)
1908 return false;
1909
1910 /* If we're not within an EH region, then we are external. */
1911 if (r == NULL)
1912 return true;
1913
1914 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1915 which don't always have landing pads. */
1916 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1917 return false;
1918 }
1919
1920 /* Return true if INSN cannot throw at all. */
1921
1922 bool
1923 insn_nothrow_p (const_rtx insn)
1924 {
1925 eh_landing_pad lp;
1926 eh_region r;
1927
1928 if (! INSN_P (insn))
1929 return true;
1930
1931 if (NONJUMP_INSN_P (insn)
1932 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1933 {
1934 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1935 int i, n = seq->len ();
1936
1937 for (i = 0; i < n; i++)
1938 if (!insn_nothrow_p (seq->element (i)))
1939 return false;
1940
1941 return true;
1942 }
1943
1944 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1945 }
1946
1947 /* Return true if INSN can perform a non-local goto. */
1948 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1949
1950 bool
1951 can_nonlocal_goto (const rtx_insn *insn)
1952 {
1953 if (nonlocal_goto_handler_labels && CALL_P (insn))
1954 {
1955 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1956 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1957 return true;
1958 }
1959 return false;
1960 }
1961 \f
1962 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1963
1964 static unsigned int
1965 set_nothrow_function_flags (void)
1966 {
1967 rtx_insn *insn;
1968
1969 crtl->nothrow = 1;
1970
1971 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1972 something that can throw an exception. We specifically exempt
1973 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1974 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1975 is optimistic. */
1976
1977 crtl->all_throwers_are_sibcalls = 1;
1978
1979 /* If we don't know that this implementation of the function will
1980 actually be used, then we must not set TREE_NOTHROW, since
1981 callers must not assume that this function does not throw. */
1982 if (TREE_NOTHROW (current_function_decl))
1983 return 0;
1984
1985 if (! flag_exceptions)
1986 return 0;
1987
1988 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1989 if (can_throw_external (insn))
1990 {
1991 crtl->nothrow = 0;
1992
1993 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1994 {
1995 crtl->all_throwers_are_sibcalls = 0;
1996 return 0;
1997 }
1998 }
1999
2000 if (crtl->nothrow
2001 && (cgraph_node::get (current_function_decl)->get_availability ()
2002 >= AVAIL_AVAILABLE))
2003 {
2004 struct cgraph_node *node = cgraph_node::get (current_function_decl);
2005 struct cgraph_edge *e;
2006 for (e = node->callers; e; e = e->next_caller)
2007 e->can_throw_external = false;
2008 node->set_nothrow_flag (true);
2009
2010 if (dump_file)
2011 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2012 current_function_name ());
2013 }
2014 return 0;
2015 }
2016
2017 namespace {
2018
2019 const pass_data pass_data_set_nothrow_function_flags =
2020 {
2021 RTL_PASS, /* type */
2022 "nothrow", /* name */
2023 OPTGROUP_NONE, /* optinfo_flags */
2024 TV_NONE, /* tv_id */
2025 0, /* properties_required */
2026 0, /* properties_provided */
2027 0, /* properties_destroyed */
2028 0, /* todo_flags_start */
2029 0, /* todo_flags_finish */
2030 };
2031
2032 class pass_set_nothrow_function_flags : public rtl_opt_pass
2033 {
2034 public:
2035 pass_set_nothrow_function_flags (gcc::context *ctxt)
2036 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2037 {}
2038
2039 /* opt_pass methods: */
2040 virtual unsigned int execute (function *)
2041 {
2042 return set_nothrow_function_flags ();
2043 }
2044
2045 }; // class pass_set_nothrow_function_flags
2046
2047 } // anon namespace
2048
2049 rtl_opt_pass *
2050 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2051 {
2052 return new pass_set_nothrow_function_flags (ctxt);
2053 }
2054
2055 \f
2056 /* Various hooks for unwind library. */
2057
2058 /* Expand the EH support builtin functions:
2059 __builtin_eh_pointer and __builtin_eh_filter. */
2060
2061 static eh_region
2062 expand_builtin_eh_common (tree region_nr_t)
2063 {
2064 HOST_WIDE_INT region_nr;
2065 eh_region region;
2066
2067 gcc_assert (tree_fits_shwi_p (region_nr_t));
2068 region_nr = tree_to_shwi (region_nr_t);
2069
2070 region = (*cfun->eh->region_array)[region_nr];
2071
2072 /* ??? We shouldn't have been able to delete a eh region without
2073 deleting all the code that depended on it. */
2074 gcc_assert (region != NULL);
2075
2076 return region;
2077 }
2078
2079 /* Expand to the exc_ptr value from the given eh region. */
2080
2081 rtx
2082 expand_builtin_eh_pointer (tree exp)
2083 {
2084 eh_region region
2085 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2086 if (region->exc_ptr_reg == NULL)
2087 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2088 return region->exc_ptr_reg;
2089 }
2090
2091 /* Expand to the filter value from the given eh region. */
2092
2093 rtx
2094 expand_builtin_eh_filter (tree exp)
2095 {
2096 eh_region region
2097 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2098 if (region->filter_reg == NULL)
2099 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2100 return region->filter_reg;
2101 }
2102
2103 /* Copy the exc_ptr and filter values from one landing pad's registers
2104 to another. This is used to inline the resx statement. */
2105
2106 rtx
2107 expand_builtin_eh_copy_values (tree exp)
2108 {
2109 eh_region dst
2110 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2111 eh_region src
2112 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2113 machine_mode fmode = targetm.eh_return_filter_mode ();
2114
2115 if (dst->exc_ptr_reg == NULL)
2116 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2117 if (src->exc_ptr_reg == NULL)
2118 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2119
2120 if (dst->filter_reg == NULL)
2121 dst->filter_reg = gen_reg_rtx (fmode);
2122 if (src->filter_reg == NULL)
2123 src->filter_reg = gen_reg_rtx (fmode);
2124
2125 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2126 emit_move_insn (dst->filter_reg, src->filter_reg);
2127
2128 return const0_rtx;
2129 }
2130
2131 /* Do any necessary initialization to access arbitrary stack frames.
2132 On the SPARC, this means flushing the register windows. */
2133
2134 void
2135 expand_builtin_unwind_init (void)
2136 {
2137 /* Set this so all the registers get saved in our frame; we need to be
2138 able to copy the saved values for any registers from frames we unwind. */
2139 crtl->saves_all_registers = 1;
2140
2141 #ifdef SETUP_FRAME_ADDRESSES
2142 SETUP_FRAME_ADDRESSES ();
2143 #endif
2144 }
2145
2146 /* Map a non-negative number to an eh return data register number; expands
2147 to -1 if no return data register is associated with the input number.
2148 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2149
2150 rtx
2151 expand_builtin_eh_return_data_regno (tree exp)
2152 {
2153 tree which = CALL_EXPR_ARG (exp, 0);
2154 unsigned HOST_WIDE_INT iwhich;
2155
2156 if (TREE_CODE (which) != INTEGER_CST)
2157 {
2158 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2159 return constm1_rtx;
2160 }
2161
2162 iwhich = tree_to_uhwi (which);
2163 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2164 if (iwhich == INVALID_REGNUM)
2165 return constm1_rtx;
2166
2167 #ifdef DWARF_FRAME_REGNUM
2168 iwhich = DWARF_FRAME_REGNUM (iwhich);
2169 #else
2170 iwhich = DBX_REGISTER_NUMBER (iwhich);
2171 #endif
2172
2173 return GEN_INT (iwhich);
2174 }
2175
2176 /* Given a value extracted from the return address register or stack slot,
2177 return the actual address encoded in that value. */
2178
2179 rtx
2180 expand_builtin_extract_return_addr (tree addr_tree)
2181 {
2182 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2183
2184 if (GET_MODE (addr) != Pmode
2185 && GET_MODE (addr) != VOIDmode)
2186 {
2187 #ifdef POINTERS_EXTEND_UNSIGNED
2188 addr = convert_memory_address (Pmode, addr);
2189 #else
2190 addr = convert_to_mode (Pmode, addr, 0);
2191 #endif
2192 }
2193
2194 /* First mask out any unwanted bits. */
2195 rtx mask = MASK_RETURN_ADDR;
2196 if (mask)
2197 expand_and (Pmode, addr, mask, addr);
2198
2199 /* Then adjust to find the real return address. */
2200 if (RETURN_ADDR_OFFSET)
2201 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2202
2203 return addr;
2204 }
2205
2206 /* Given an actual address in addr_tree, do any necessary encoding
2207 and return the value to be stored in the return address register or
2208 stack slot so the epilogue will return to that address. */
2209
2210 rtx
2211 expand_builtin_frob_return_addr (tree addr_tree)
2212 {
2213 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2214
2215 addr = convert_memory_address (Pmode, addr);
2216
2217 if (RETURN_ADDR_OFFSET)
2218 {
2219 addr = force_reg (Pmode, addr);
2220 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2221 }
2222
2223 return addr;
2224 }
2225
2226 /* Set up the epilogue with the magic bits we'll need to return to the
2227 exception handler. */
2228
2229 void
2230 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2231 tree handler_tree)
2232 {
2233 rtx tmp;
2234
2235 #ifdef EH_RETURN_STACKADJ_RTX
2236 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2237 VOIDmode, EXPAND_NORMAL);
2238 tmp = convert_memory_address (Pmode, tmp);
2239 if (!crtl->eh.ehr_stackadj)
2240 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2241 else if (tmp != crtl->eh.ehr_stackadj)
2242 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2243 #endif
2244
2245 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2246 VOIDmode, EXPAND_NORMAL);
2247 tmp = convert_memory_address (Pmode, tmp);
2248 if (!crtl->eh.ehr_handler)
2249 crtl->eh.ehr_handler = copy_to_reg (tmp);
2250 else if (tmp != crtl->eh.ehr_handler)
2251 emit_move_insn (crtl->eh.ehr_handler, tmp);
2252
2253 if (!crtl->eh.ehr_label)
2254 crtl->eh.ehr_label = gen_label_rtx ();
2255 emit_jump (crtl->eh.ehr_label);
2256 }
2257
2258 /* Expand __builtin_eh_return. This exit path from the function loads up
2259 the eh return data registers, adjusts the stack, and branches to a
2260 given PC other than the normal return address. */
2261
2262 void
2263 expand_eh_return (void)
2264 {
2265 rtx_code_label *around_label;
2266
2267 if (! crtl->eh.ehr_label)
2268 return;
2269
2270 crtl->calls_eh_return = 1;
2271
2272 #ifdef EH_RETURN_STACKADJ_RTX
2273 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2274 #endif
2275
2276 around_label = gen_label_rtx ();
2277 emit_jump (around_label);
2278
2279 emit_label (crtl->eh.ehr_label);
2280 clobber_return_register ();
2281
2282 #ifdef EH_RETURN_STACKADJ_RTX
2283 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2284 #endif
2285
2286 #ifdef HAVE_eh_return
2287 if (HAVE_eh_return)
2288 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2289 else
2290 #endif
2291 {
2292 #ifdef EH_RETURN_HANDLER_RTX
2293 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2294 #else
2295 error ("__builtin_eh_return not supported on this target");
2296 #endif
2297 }
2298
2299 emit_label (around_label);
2300 }
2301
2302 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2303 POINTERS_EXTEND_UNSIGNED and return it. */
2304
2305 rtx
2306 expand_builtin_extend_pointer (tree addr_tree)
2307 {
2308 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2309 int extend;
2310
2311 #ifdef POINTERS_EXTEND_UNSIGNED
2312 extend = POINTERS_EXTEND_UNSIGNED;
2313 #else
2314 /* The previous EH code did an unsigned extend by default, so we do this also
2315 for consistency. */
2316 extend = 1;
2317 #endif
2318
2319 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2320 }
2321 \f
2322 static int
2323 add_action_record (action_hash_type *ar_hash, int filter, int next)
2324 {
2325 struct action_record **slot, *new_ar, tmp;
2326
2327 tmp.filter = filter;
2328 tmp.next = next;
2329 slot = ar_hash->find_slot (&tmp, INSERT);
2330
2331 if ((new_ar = *slot) == NULL)
2332 {
2333 new_ar = XNEW (struct action_record);
2334 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2335 new_ar->filter = filter;
2336 new_ar->next = next;
2337 *slot = new_ar;
2338
2339 /* The filter value goes in untouched. The link to the next
2340 record is a "self-relative" byte offset, or zero to indicate
2341 that there is no next record. So convert the absolute 1 based
2342 indices we've been carrying around into a displacement. */
2343
2344 push_sleb128 (&crtl->eh.action_record_data, filter);
2345 if (next)
2346 next -= crtl->eh.action_record_data->length () + 1;
2347 push_sleb128 (&crtl->eh.action_record_data, next);
2348 }
2349
2350 return new_ar->offset;
2351 }
2352
2353 static int
2354 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2355 {
2356 int next;
2357
2358 /* If we've reached the top of the region chain, then we have
2359 no actions, and require no landing pad. */
2360 if (region == NULL)
2361 return -1;
2362
2363 switch (region->type)
2364 {
2365 case ERT_CLEANUP:
2366 {
2367 eh_region r;
2368 /* A cleanup adds a zero filter to the beginning of the chain, but
2369 there are special cases to look out for. If there are *only*
2370 cleanups along a path, then it compresses to a zero action.
2371 Further, if there are multiple cleanups along a path, we only
2372 need to represent one of them, as that is enough to trigger
2373 entry to the landing pad at runtime. */
2374 next = collect_one_action_chain (ar_hash, region->outer);
2375 if (next <= 0)
2376 return 0;
2377 for (r = region->outer; r ; r = r->outer)
2378 if (r->type == ERT_CLEANUP)
2379 return next;
2380 return add_action_record (ar_hash, 0, next);
2381 }
2382
2383 case ERT_TRY:
2384 {
2385 eh_catch c;
2386
2387 /* Process the associated catch regions in reverse order.
2388 If there's a catch-all handler, then we don't need to
2389 search outer regions. Use a magic -3 value to record
2390 that we haven't done the outer search. */
2391 next = -3;
2392 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2393 {
2394 if (c->type_list == NULL)
2395 {
2396 /* Retrieve the filter from the head of the filter list
2397 where we have stored it (see assign_filter_values). */
2398 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2399 next = add_action_record (ar_hash, filter, 0);
2400 }
2401 else
2402 {
2403 /* Once the outer search is done, trigger an action record for
2404 each filter we have. */
2405 tree flt_node;
2406
2407 if (next == -3)
2408 {
2409 next = collect_one_action_chain (ar_hash, region->outer);
2410
2411 /* If there is no next action, terminate the chain. */
2412 if (next == -1)
2413 next = 0;
2414 /* If all outer actions are cleanups or must_not_throw,
2415 we'll have no action record for it, since we had wanted
2416 to encode these states in the call-site record directly.
2417 Add a cleanup action to the chain to catch these. */
2418 else if (next <= 0)
2419 next = add_action_record (ar_hash, 0, 0);
2420 }
2421
2422 flt_node = c->filter_list;
2423 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2424 {
2425 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2426 next = add_action_record (ar_hash, filter, next);
2427 }
2428 }
2429 }
2430 return next;
2431 }
2432
2433 case ERT_ALLOWED_EXCEPTIONS:
2434 /* An exception specification adds its filter to the
2435 beginning of the chain. */
2436 next = collect_one_action_chain (ar_hash, region->outer);
2437
2438 /* If there is no next action, terminate the chain. */
2439 if (next == -1)
2440 next = 0;
2441 /* If all outer actions are cleanups or must_not_throw,
2442 we'll have no action record for it, since we had wanted
2443 to encode these states in the call-site record directly.
2444 Add a cleanup action to the chain to catch these. */
2445 else if (next <= 0)
2446 next = add_action_record (ar_hash, 0, 0);
2447
2448 return add_action_record (ar_hash, region->u.allowed.filter, next);
2449
2450 case ERT_MUST_NOT_THROW:
2451 /* A must-not-throw region with no inner handlers or cleanups
2452 requires no call-site entry. Note that this differs from
2453 the no handler or cleanup case in that we do require an lsda
2454 to be generated. Return a magic -2 value to record this. */
2455 return -2;
2456 }
2457
2458 gcc_unreachable ();
2459 }
2460
2461 static int
2462 add_call_site (rtx landing_pad, int action, int section)
2463 {
2464 call_site_record record;
2465
2466 record = ggc_alloc<call_site_record_d> ();
2467 record->landing_pad = landing_pad;
2468 record->action = action;
2469
2470 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2471
2472 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2473 }
2474
2475 static rtx_note *
2476 emit_note_eh_region_end (rtx_insn *insn)
2477 {
2478 rtx_insn *next = NEXT_INSN (insn);
2479
2480 /* Make sure we do not split a call and its corresponding
2481 CALL_ARG_LOCATION note. */
2482 if (next && NOTE_P (next)
2483 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2484 insn = next;
2485
2486 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2487 }
2488
2489 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2490 The new note numbers will not refer to region numbers, but
2491 instead to call site entries. */
2492
2493 static unsigned int
2494 convert_to_eh_region_ranges (void)
2495 {
2496 rtx insn;
2497 rtx_insn *iter;
2498 rtx_note *note;
2499 action_hash_type ar_hash (31);
2500 int last_action = -3;
2501 rtx_insn *last_action_insn = NULL;
2502 rtx last_landing_pad = NULL_RTX;
2503 rtx_insn *first_no_action_insn = NULL;
2504 int call_site = 0;
2505 int cur_sec = 0;
2506 rtx_insn *section_switch_note = NULL;
2507 rtx_insn *first_no_action_insn_before_switch = NULL;
2508 rtx_insn *last_no_action_insn_before_switch = NULL;
2509 int saved_call_site_base = call_site_base;
2510
2511 vec_alloc (crtl->eh.action_record_data, 64);
2512
2513 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2514 if (INSN_P (iter))
2515 {
2516 eh_landing_pad lp;
2517 eh_region region;
2518 bool nothrow;
2519 int this_action;
2520 rtx_code_label *this_landing_pad;
2521
2522 insn = iter;
2523 if (NONJUMP_INSN_P (insn)
2524 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2525 insn = XVECEXP (PATTERN (insn), 0, 0);
2526
2527 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2528 if (nothrow)
2529 continue;
2530 if (region)
2531 this_action = collect_one_action_chain (&ar_hash, region);
2532 else
2533 this_action = -1;
2534
2535 /* Existence of catch handlers, or must-not-throw regions
2536 implies that an lsda is needed (even if empty). */
2537 if (this_action != -1)
2538 crtl->uses_eh_lsda = 1;
2539
2540 /* Delay creation of region notes for no-action regions
2541 until we're sure that an lsda will be required. */
2542 else if (last_action == -3)
2543 {
2544 first_no_action_insn = iter;
2545 last_action = -1;
2546 }
2547
2548 if (this_action >= 0)
2549 this_landing_pad = lp->landing_pad;
2550 else
2551 this_landing_pad = NULL;
2552
2553 /* Differing actions or landing pads implies a change in call-site
2554 info, which implies some EH_REGION note should be emitted. */
2555 if (last_action != this_action
2556 || last_landing_pad != this_landing_pad)
2557 {
2558 /* If there is a queued no-action region in the other section
2559 with hot/cold partitioning, emit it now. */
2560 if (first_no_action_insn_before_switch)
2561 {
2562 gcc_assert (this_action != -1
2563 && last_action == (first_no_action_insn
2564 ? -1 : -3));
2565 call_site = add_call_site (NULL_RTX, 0, 0);
2566 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2567 first_no_action_insn_before_switch);
2568 NOTE_EH_HANDLER (note) = call_site;
2569 note
2570 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2571 NOTE_EH_HANDLER (note) = call_site;
2572 gcc_assert (last_action != -3
2573 || (last_action_insn
2574 == last_no_action_insn_before_switch));
2575 first_no_action_insn_before_switch = NULL;
2576 last_no_action_insn_before_switch = NULL;
2577 call_site_base++;
2578 }
2579 /* If we'd not seen a previous action (-3) or the previous
2580 action was must-not-throw (-2), then we do not need an
2581 end note. */
2582 if (last_action >= -1)
2583 {
2584 /* If we delayed the creation of the begin, do it now. */
2585 if (first_no_action_insn)
2586 {
2587 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2588 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2589 first_no_action_insn);
2590 NOTE_EH_HANDLER (note) = call_site;
2591 first_no_action_insn = NULL;
2592 }
2593
2594 note = emit_note_eh_region_end (last_action_insn);
2595 NOTE_EH_HANDLER (note) = call_site;
2596 }
2597
2598 /* If the new action is must-not-throw, then no region notes
2599 are created. */
2600 if (this_action >= -1)
2601 {
2602 call_site = add_call_site (this_landing_pad,
2603 this_action < 0 ? 0 : this_action,
2604 cur_sec);
2605 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2606 NOTE_EH_HANDLER (note) = call_site;
2607 }
2608
2609 last_action = this_action;
2610 last_landing_pad = this_landing_pad;
2611 }
2612 last_action_insn = iter;
2613 }
2614 else if (NOTE_P (iter)
2615 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2616 {
2617 gcc_assert (section_switch_note == NULL_RTX);
2618 gcc_assert (flag_reorder_blocks_and_partition);
2619 section_switch_note = iter;
2620 if (first_no_action_insn)
2621 {
2622 first_no_action_insn_before_switch = first_no_action_insn;
2623 last_no_action_insn_before_switch = last_action_insn;
2624 first_no_action_insn = NULL;
2625 gcc_assert (last_action == -1);
2626 last_action = -3;
2627 }
2628 /* Force closing of current EH region before section switch and
2629 opening a new one afterwards. */
2630 else if (last_action != -3)
2631 last_landing_pad = pc_rtx;
2632 if (crtl->eh.call_site_record_v[cur_sec])
2633 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2634 cur_sec++;
2635 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2636 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2637 }
2638
2639 if (last_action >= -1 && ! first_no_action_insn)
2640 {
2641 note = emit_note_eh_region_end (last_action_insn);
2642 NOTE_EH_HANDLER (note) = call_site;
2643 }
2644
2645 call_site_base = saved_call_site_base;
2646
2647 return 0;
2648 }
2649
2650 namespace {
2651
2652 const pass_data pass_data_convert_to_eh_region_ranges =
2653 {
2654 RTL_PASS, /* type */
2655 "eh_ranges", /* name */
2656 OPTGROUP_NONE, /* optinfo_flags */
2657 TV_NONE, /* tv_id */
2658 0, /* properties_required */
2659 0, /* properties_provided */
2660 0, /* properties_destroyed */
2661 0, /* todo_flags_start */
2662 0, /* todo_flags_finish */
2663 };
2664
2665 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2666 {
2667 public:
2668 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2669 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2670 {}
2671
2672 /* opt_pass methods: */
2673 virtual bool gate (function *);
2674 virtual unsigned int execute (function *)
2675 {
2676 return convert_to_eh_region_ranges ();
2677 }
2678
2679 }; // class pass_convert_to_eh_region_ranges
2680
2681 bool
2682 pass_convert_to_eh_region_ranges::gate (function *)
2683 {
2684 /* Nothing to do for SJLJ exceptions or if no regions created. */
2685 if (cfun->eh->region_tree == NULL)
2686 return false;
2687 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2688 return false;
2689 return true;
2690 }
2691
2692 } // anon namespace
2693
2694 rtl_opt_pass *
2695 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2696 {
2697 return new pass_convert_to_eh_region_ranges (ctxt);
2698 }
2699 \f
2700 static void
2701 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2702 {
2703 do
2704 {
2705 unsigned char byte = value & 0x7f;
2706 value >>= 7;
2707 if (value)
2708 byte |= 0x80;
2709 vec_safe_push (*data_area, byte);
2710 }
2711 while (value);
2712 }
2713
2714 static void
2715 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2716 {
2717 unsigned char byte;
2718 int more;
2719
2720 do
2721 {
2722 byte = value & 0x7f;
2723 value >>= 7;
2724 more = ! ((value == 0 && (byte & 0x40) == 0)
2725 || (value == -1 && (byte & 0x40) != 0));
2726 if (more)
2727 byte |= 0x80;
2728 vec_safe_push (*data_area, byte);
2729 }
2730 while (more);
2731 }
2732
2733 \f
2734 #ifndef HAVE_AS_LEB128
2735 static int
2736 dw2_size_of_call_site_table (int section)
2737 {
2738 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2739 int size = n * (4 + 4 + 4);
2740 int i;
2741
2742 for (i = 0; i < n; ++i)
2743 {
2744 struct call_site_record_d *cs =
2745 (*crtl->eh.call_site_record_v[section])[i];
2746 size += size_of_uleb128 (cs->action);
2747 }
2748
2749 return size;
2750 }
2751
2752 static int
2753 sjlj_size_of_call_site_table (void)
2754 {
2755 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2756 int size = 0;
2757 int i;
2758
2759 for (i = 0; i < n; ++i)
2760 {
2761 struct call_site_record_d *cs =
2762 (*crtl->eh.call_site_record_v[0])[i];
2763 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2764 size += size_of_uleb128 (cs->action);
2765 }
2766
2767 return size;
2768 }
2769 #endif
2770
2771 static void
2772 dw2_output_call_site_table (int cs_format, int section)
2773 {
2774 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2775 int i;
2776 const char *begin;
2777
2778 if (section == 0)
2779 begin = current_function_func_begin_label;
2780 else if (first_function_block_is_cold)
2781 begin = crtl->subsections.hot_section_label;
2782 else
2783 begin = crtl->subsections.cold_section_label;
2784
2785 for (i = 0; i < n; ++i)
2786 {
2787 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2788 char reg_start_lab[32];
2789 char reg_end_lab[32];
2790 char landing_pad_lab[32];
2791
2792 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2793 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2794
2795 if (cs->landing_pad)
2796 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2797 CODE_LABEL_NUMBER (cs->landing_pad));
2798
2799 /* ??? Perhaps use insn length scaling if the assembler supports
2800 generic arithmetic. */
2801 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2802 data4 if the function is small enough. */
2803 if (cs_format == DW_EH_PE_uleb128)
2804 {
2805 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2806 "region %d start", i);
2807 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2808 "length");
2809 if (cs->landing_pad)
2810 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2811 "landing pad");
2812 else
2813 dw2_asm_output_data_uleb128 (0, "landing pad");
2814 }
2815 else
2816 {
2817 dw2_asm_output_delta (4, reg_start_lab, begin,
2818 "region %d start", i);
2819 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2820 if (cs->landing_pad)
2821 dw2_asm_output_delta (4, landing_pad_lab, begin,
2822 "landing pad");
2823 else
2824 dw2_asm_output_data (4, 0, "landing pad");
2825 }
2826 dw2_asm_output_data_uleb128 (cs->action, "action");
2827 }
2828
2829 call_site_base += n;
2830 }
2831
2832 static void
2833 sjlj_output_call_site_table (void)
2834 {
2835 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2836 int i;
2837
2838 for (i = 0; i < n; ++i)
2839 {
2840 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2841
2842 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2843 "region %d landing pad", i);
2844 dw2_asm_output_data_uleb128 (cs->action, "action");
2845 }
2846
2847 call_site_base += n;
2848 }
2849
2850 /* Switch to the section that should be used for exception tables. */
2851
2852 static void
2853 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2854 {
2855 section *s;
2856
2857 if (exception_section)
2858 s = exception_section;
2859 else
2860 {
2861 /* Compute the section and cache it into exception_section,
2862 unless it depends on the function name. */
2863 if (targetm_common.have_named_sections)
2864 {
2865 int flags;
2866
2867 if (EH_TABLES_CAN_BE_READ_ONLY)
2868 {
2869 int tt_format =
2870 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2871 flags = ((! flag_pic
2872 || ((tt_format & 0x70) != DW_EH_PE_absptr
2873 && (tt_format & 0x70) != DW_EH_PE_aligned))
2874 ? 0 : SECTION_WRITE);
2875 }
2876 else
2877 flags = SECTION_WRITE;
2878
2879 #ifdef HAVE_LD_EH_GC_SECTIONS
2880 if (flag_function_sections
2881 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2882 {
2883 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2884 /* The EH table must match the code section, so only mark
2885 it linkonce if we have COMDAT groups to tie them together. */
2886 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2887 flags |= SECTION_LINKONCE;
2888 sprintf (section_name, ".gcc_except_table.%s", fnname);
2889 s = get_section (section_name, flags, current_function_decl);
2890 free (section_name);
2891 }
2892 else
2893 #endif
2894 exception_section
2895 = s = get_section (".gcc_except_table", flags, NULL);
2896 }
2897 else
2898 exception_section
2899 = s = flag_pic ? data_section : readonly_data_section;
2900 }
2901
2902 switch_to_section (s);
2903 }
2904
2905
2906 /* Output a reference from an exception table to the type_info object TYPE.
2907 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2908 the value. */
2909
2910 static void
2911 output_ttype (tree type, int tt_format, int tt_format_size)
2912 {
2913 rtx value;
2914 bool is_public = true;
2915
2916 if (type == NULL_TREE)
2917 value = const0_rtx;
2918 else
2919 {
2920 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2921 runtime types so TYPE should already be a runtime type
2922 reference. When pass_ipa_free_lang data is made a default
2923 pass, we can then remove the call to lookup_type_for_runtime
2924 below. */
2925 if (TYPE_P (type))
2926 type = lookup_type_for_runtime (type);
2927
2928 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2929
2930 /* Let cgraph know that the rtti decl is used. Not all of the
2931 paths below go through assemble_integer, which would take
2932 care of this for us. */
2933 STRIP_NOPS (type);
2934 if (TREE_CODE (type) == ADDR_EXPR)
2935 {
2936 type = TREE_OPERAND (type, 0);
2937 if (TREE_CODE (type) == VAR_DECL)
2938 is_public = TREE_PUBLIC (type);
2939 }
2940 else
2941 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2942 }
2943
2944 /* Allow the target to override the type table entry format. */
2945 if (targetm.asm_out.ttype (value))
2946 return;
2947
2948 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2949 assemble_integer (value, tt_format_size,
2950 tt_format_size * BITS_PER_UNIT, 1);
2951 else
2952 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2953 }
2954
2955 static void
2956 output_one_function_exception_table (int section)
2957 {
2958 int tt_format, cs_format, lp_format, i;
2959 #ifdef HAVE_AS_LEB128
2960 char ttype_label[32];
2961 char cs_after_size_label[32];
2962 char cs_end_label[32];
2963 #else
2964 int call_site_len;
2965 #endif
2966 int have_tt_data;
2967 int tt_format_size = 0;
2968
2969 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2970 || (targetm.arm_eabi_unwinder
2971 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2972 : vec_safe_length (cfun->eh->ehspec_data.other)));
2973
2974 /* Indicate the format of the @TType entries. */
2975 if (! have_tt_data)
2976 tt_format = DW_EH_PE_omit;
2977 else
2978 {
2979 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2980 #ifdef HAVE_AS_LEB128
2981 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2982 section ? "LLSDATTC" : "LLSDATT",
2983 current_function_funcdef_no);
2984 #endif
2985 tt_format_size = size_of_encoded_value (tt_format);
2986
2987 assemble_align (tt_format_size * BITS_PER_UNIT);
2988 }
2989
2990 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2991 current_function_funcdef_no);
2992
2993 /* The LSDA header. */
2994
2995 /* Indicate the format of the landing pad start pointer. An omitted
2996 field implies @LPStart == @Start. */
2997 /* Currently we always put @LPStart == @Start. This field would
2998 be most useful in moving the landing pads completely out of
2999 line to another section, but it could also be used to minimize
3000 the size of uleb128 landing pad offsets. */
3001 lp_format = DW_EH_PE_omit;
3002 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3003 eh_data_format_name (lp_format));
3004
3005 /* @LPStart pointer would go here. */
3006
3007 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3008 eh_data_format_name (tt_format));
3009
3010 #ifndef HAVE_AS_LEB128
3011 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3012 call_site_len = sjlj_size_of_call_site_table ();
3013 else
3014 call_site_len = dw2_size_of_call_site_table (section);
3015 #endif
3016
3017 /* A pc-relative 4-byte displacement to the @TType data. */
3018 if (have_tt_data)
3019 {
3020 #ifdef HAVE_AS_LEB128
3021 char ttype_after_disp_label[32];
3022 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3023 section ? "LLSDATTDC" : "LLSDATTD",
3024 current_function_funcdef_no);
3025 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3026 "@TType base offset");
3027 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3028 #else
3029 /* Ug. Alignment queers things. */
3030 unsigned int before_disp, after_disp, last_disp, disp;
3031
3032 before_disp = 1 + 1;
3033 after_disp = (1 + size_of_uleb128 (call_site_len)
3034 + call_site_len
3035 + vec_safe_length (crtl->eh.action_record_data)
3036 + (vec_safe_length (cfun->eh->ttype_data)
3037 * tt_format_size));
3038
3039 disp = after_disp;
3040 do
3041 {
3042 unsigned int disp_size, pad;
3043
3044 last_disp = disp;
3045 disp_size = size_of_uleb128 (disp);
3046 pad = before_disp + disp_size + after_disp;
3047 if (pad % tt_format_size)
3048 pad = tt_format_size - (pad % tt_format_size);
3049 else
3050 pad = 0;
3051 disp = after_disp + pad;
3052 }
3053 while (disp != last_disp);
3054
3055 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3056 #endif
3057 }
3058
3059 /* Indicate the format of the call-site offsets. */
3060 #ifdef HAVE_AS_LEB128
3061 cs_format = DW_EH_PE_uleb128;
3062 #else
3063 cs_format = DW_EH_PE_udata4;
3064 #endif
3065 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3066 eh_data_format_name (cs_format));
3067
3068 #ifdef HAVE_AS_LEB128
3069 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3070 section ? "LLSDACSBC" : "LLSDACSB",
3071 current_function_funcdef_no);
3072 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3073 section ? "LLSDACSEC" : "LLSDACSE",
3074 current_function_funcdef_no);
3075 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3076 "Call-site table length");
3077 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3078 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3079 sjlj_output_call_site_table ();
3080 else
3081 dw2_output_call_site_table (cs_format, section);
3082 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3083 #else
3084 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3085 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3086 sjlj_output_call_site_table ();
3087 else
3088 dw2_output_call_site_table (cs_format, section);
3089 #endif
3090
3091 /* ??? Decode and interpret the data for flag_debug_asm. */
3092 {
3093 uchar uc;
3094 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3095 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3096 }
3097
3098 if (have_tt_data)
3099 assemble_align (tt_format_size * BITS_PER_UNIT);
3100
3101 i = vec_safe_length (cfun->eh->ttype_data);
3102 while (i-- > 0)
3103 {
3104 tree type = (*cfun->eh->ttype_data)[i];
3105 output_ttype (type, tt_format, tt_format_size);
3106 }
3107
3108 #ifdef HAVE_AS_LEB128
3109 if (have_tt_data)
3110 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3111 #endif
3112
3113 /* ??? Decode and interpret the data for flag_debug_asm. */
3114 if (targetm.arm_eabi_unwinder)
3115 {
3116 tree type;
3117 for (i = 0;
3118 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3119 output_ttype (type, tt_format, tt_format_size);
3120 }
3121 else
3122 {
3123 uchar uc;
3124 for (i = 0;
3125 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3126 dw2_asm_output_data (1, uc,
3127 i ? NULL : "Exception specification table");
3128 }
3129 }
3130
3131 void
3132 output_function_exception_table (const char *fnname)
3133 {
3134 rtx personality = get_personality_function (current_function_decl);
3135
3136 /* Not all functions need anything. */
3137 if (! crtl->uses_eh_lsda)
3138 return;
3139
3140 if (personality)
3141 {
3142 assemble_external_libcall (personality);
3143
3144 if (targetm.asm_out.emit_except_personality)
3145 targetm.asm_out.emit_except_personality (personality);
3146 }
3147
3148 switch_to_exception_section (fnname);
3149
3150 /* If the target wants a label to begin the table, emit it here. */
3151 targetm.asm_out.emit_except_table_label (asm_out_file);
3152
3153 output_one_function_exception_table (0);
3154 if (crtl->eh.call_site_record_v[1])
3155 output_one_function_exception_table (1);
3156
3157 switch_to_section (current_function_section ());
3158 }
3159
3160 void
3161 set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
3162 {
3163 fun->eh->throw_stmt_table = table;
3164 }
3165
3166 hash_map<gimple, int> *
3167 get_eh_throw_stmt_table (struct function *fun)
3168 {
3169 return fun->eh->throw_stmt_table;
3170 }
3171 \f
3172 /* Determine if the function needs an EH personality function. */
3173
3174 enum eh_personality_kind
3175 function_needs_eh_personality (struct function *fn)
3176 {
3177 enum eh_personality_kind kind = eh_personality_none;
3178 eh_region i;
3179
3180 FOR_ALL_EH_REGION_FN (i, fn)
3181 {
3182 switch (i->type)
3183 {
3184 case ERT_CLEANUP:
3185 /* Can do with any personality including the generic C one. */
3186 kind = eh_personality_any;
3187 break;
3188
3189 case ERT_TRY:
3190 case ERT_ALLOWED_EXCEPTIONS:
3191 /* Always needs a EH personality function. The generic C
3192 personality doesn't handle these even for empty type lists. */
3193 return eh_personality_lang;
3194
3195 case ERT_MUST_NOT_THROW:
3196 /* Always needs a EH personality function. The language may specify
3197 what abort routine that must be used, e.g. std::terminate. */
3198 return eh_personality_lang;
3199 }
3200 }
3201
3202 return kind;
3203 }
3204 \f
3205 /* Dump EH information to OUT. */
3206
3207 void
3208 dump_eh_tree (FILE * out, struct function *fun)
3209 {
3210 eh_region i;
3211 int depth = 0;
3212 static const char *const type_name[] = {
3213 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3214 };
3215
3216 i = fun->eh->region_tree;
3217 if (!i)
3218 return;
3219
3220 fprintf (out, "Eh tree:\n");
3221 while (1)
3222 {
3223 fprintf (out, " %*s %i %s", depth * 2, "",
3224 i->index, type_name[(int) i->type]);
3225
3226 if (i->landing_pads)
3227 {
3228 eh_landing_pad lp;
3229
3230 fprintf (out, " land:");
3231 if (current_ir_type () == IR_GIMPLE)
3232 {
3233 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3234 {
3235 fprintf (out, "{%i,", lp->index);
3236 print_generic_expr (out, lp->post_landing_pad, 0);
3237 fputc ('}', out);
3238 if (lp->next_lp)
3239 fputc (',', out);
3240 }
3241 }
3242 else
3243 {
3244 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3245 {
3246 fprintf (out, "{%i,", lp->index);
3247 if (lp->landing_pad)
3248 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3249 NOTE_P (lp->landing_pad) ? "(del)" : "");
3250 else
3251 fprintf (out, "(nil),");
3252 if (lp->post_landing_pad)
3253 {
3254 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3255 fprintf (out, "%i%s}", INSN_UID (lab),
3256 NOTE_P (lab) ? "(del)" : "");
3257 }
3258 else
3259 fprintf (out, "(nil)}");
3260 if (lp->next_lp)
3261 fputc (',', out);
3262 }
3263 }
3264 }
3265
3266 switch (i->type)
3267 {
3268 case ERT_CLEANUP:
3269 case ERT_MUST_NOT_THROW:
3270 break;
3271
3272 case ERT_TRY:
3273 {
3274 eh_catch c;
3275 fprintf (out, " catch:");
3276 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3277 {
3278 fputc ('{', out);
3279 if (c->label)
3280 {
3281 fprintf (out, "lab:");
3282 print_generic_expr (out, c->label, 0);
3283 fputc (';', out);
3284 }
3285 print_generic_expr (out, c->type_list, 0);
3286 fputc ('}', out);
3287 if (c->next_catch)
3288 fputc (',', out);
3289 }
3290 }
3291 break;
3292
3293 case ERT_ALLOWED_EXCEPTIONS:
3294 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3295 print_generic_expr (out, i->u.allowed.type_list, 0);
3296 break;
3297 }
3298 fputc ('\n', out);
3299
3300 /* If there are sub-regions, process them. */
3301 if (i->inner)
3302 i = i->inner, depth++;
3303 /* If there are peers, process them. */
3304 else if (i->next_peer)
3305 i = i->next_peer;
3306 /* Otherwise, step back up the tree to the next peer. */
3307 else
3308 {
3309 do
3310 {
3311 i = i->outer;
3312 depth--;
3313 if (i == NULL)
3314 return;
3315 }
3316 while (i->next_peer == NULL);
3317 i = i->next_peer;
3318 }
3319 }
3320 }
3321
3322 /* Dump the EH tree for FN on stderr. */
3323
3324 DEBUG_FUNCTION void
3325 debug_eh_tree (struct function *fn)
3326 {
3327 dump_eh_tree (stderr, fn);
3328 }
3329
3330 /* Verify invariants on EH datastructures. */
3331
3332 DEBUG_FUNCTION void
3333 verify_eh_tree (struct function *fun)
3334 {
3335 eh_region r, outer;
3336 int nvisited_lp, nvisited_r;
3337 int count_lp, count_r, depth, i;
3338 eh_landing_pad lp;
3339 bool err = false;
3340
3341 if (!fun->eh->region_tree)
3342 return;
3343
3344 count_r = 0;
3345 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3346 if (r)
3347 {
3348 if (r->index == i)
3349 count_r++;
3350 else
3351 {
3352 error ("region_array is corrupted for region %i", r->index);
3353 err = true;
3354 }
3355 }
3356
3357 count_lp = 0;
3358 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3359 if (lp)
3360 {
3361 if (lp->index == i)
3362 count_lp++;
3363 else
3364 {
3365 error ("lp_array is corrupted for lp %i", lp->index);
3366 err = true;
3367 }
3368 }
3369
3370 depth = nvisited_lp = nvisited_r = 0;
3371 outer = NULL;
3372 r = fun->eh->region_tree;
3373 while (1)
3374 {
3375 if ((*fun->eh->region_array)[r->index] != r)
3376 {
3377 error ("region_array is corrupted for region %i", r->index);
3378 err = true;
3379 }
3380 if (r->outer != outer)
3381 {
3382 error ("outer block of region %i is wrong", r->index);
3383 err = true;
3384 }
3385 if (depth < 0)
3386 {
3387 error ("negative nesting depth of region %i", r->index);
3388 err = true;
3389 }
3390 nvisited_r++;
3391
3392 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3393 {
3394 if ((*fun->eh->lp_array)[lp->index] != lp)
3395 {
3396 error ("lp_array is corrupted for lp %i", lp->index);
3397 err = true;
3398 }
3399 if (lp->region != r)
3400 {
3401 error ("region of lp %i is wrong", lp->index);
3402 err = true;
3403 }
3404 nvisited_lp++;
3405 }
3406
3407 if (r->inner)
3408 outer = r, r = r->inner, depth++;
3409 else if (r->next_peer)
3410 r = r->next_peer;
3411 else
3412 {
3413 do
3414 {
3415 r = r->outer;
3416 if (r == NULL)
3417 goto region_done;
3418 depth--;
3419 outer = r->outer;
3420 }
3421 while (r->next_peer == NULL);
3422 r = r->next_peer;
3423 }
3424 }
3425 region_done:
3426 if (depth != 0)
3427 {
3428 error ("tree list ends on depth %i", depth);
3429 err = true;
3430 }
3431 if (count_r != nvisited_r)
3432 {
3433 error ("region_array does not match region_tree");
3434 err = true;
3435 }
3436 if (count_lp != nvisited_lp)
3437 {
3438 error ("lp_array does not match region_tree");
3439 err = true;
3440 }
3441
3442 if (err)
3443 {
3444 dump_eh_tree (stderr, fun);
3445 internal_error ("verify_eh_tree failed");
3446 }
3447 }
3448 \f
3449 #include "gt-except.h"