]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/except.c
f85e55abab73b6e0eda786d32e0a1af28e4fc2ca
[thirdparty/gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "tm.h"
116 #include "rtl.h"
117 #include "hash-set.h"
118 #include "machmode.h"
119 #include "vec.h"
120 #include "double-int.h"
121 #include "input.h"
122 #include "alias.h"
123 #include "symtab.h"
124 #include "wide-int.h"
125 #include "inchash.h"
126 #include "real.h"
127 #include "tree.h"
128 #include "fold-const.h"
129 #include "stringpool.h"
130 #include "stor-layout.h"
131 #include "flags.h"
132 #include "hard-reg-set.h"
133 #include "function.h"
134 #include "insn-codes.h"
135 #include "optabs.h"
136 #include "hashtab.h"
137 #include "statistics.h"
138 #include "fixed-value.h"
139 #include "insn-config.h"
140 #include "expmed.h"
141 #include "dojump.h"
142 #include "explow.h"
143 #include "calls.h"
144 #include "emit-rtl.h"
145 #include "varasm.h"
146 #include "stmt.h"
147 #include "expr.h"
148 #include "libfuncs.h"
149 #include "except.h"
150 #include "output.h"
151 #include "dwarf2asm.h"
152 #include "dwarf2out.h"
153 #include "dwarf2.h"
154 #include "toplev.h"
155 #include "hash-table.h"
156 #include "intl.h"
157 #include "tm_p.h"
158 #include "target.h"
159 #include "common/common-target.h"
160 #include "langhooks.h"
161 #include "predict.h"
162 #include "dominance.h"
163 #include "cfg.h"
164 #include "cfgrtl.h"
165 #include "basic-block.h"
166 #include "hash-map.h"
167 #include "is-a.h"
168 #include "plugin-api.h"
169 #include "ipa-ref.h"
170 #include "cgraph.h"
171 #include "diagnostic.h"
172 #include "tree-pretty-print.h"
173 #include "tree-pass.h"
174 #include "cfgloop.h"
175 #include "builtins.h"
176
177 /* Provide defaults for stuff that may not be defined when using
178 sjlj exceptions. */
179 #ifndef EH_RETURN_DATA_REGNO
180 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
181 #endif
182
183 static GTY(()) int call_site_base;
184
185 struct tree_hash_traits : default_hashmap_traits
186 {
187 static hashval_t hash (tree t) { return TREE_HASH (t); }
188 };
189
190 static GTY (()) hash_map<tree, tree, tree_hash_traits> *type_to_runtime_map;
191
192 /* Describe the SjLj_Function_Context structure. */
193 static GTY(()) tree sjlj_fc_type_node;
194 static int sjlj_fc_call_site_ofs;
195 static int sjlj_fc_data_ofs;
196 static int sjlj_fc_personality_ofs;
197 static int sjlj_fc_lsda_ofs;
198 static int sjlj_fc_jbuf_ofs;
199 \f
200
201 struct GTY(()) call_site_record_d
202 {
203 rtx landing_pad;
204 int action;
205 };
206
207 /* In the following structure and associated functions,
208 we represent entries in the action table as 1-based indices.
209 Special cases are:
210
211 0: null action record, non-null landing pad; implies cleanups
212 -1: null action record, null landing pad; implies no action
213 -2: no call-site entry; implies must_not_throw
214 -3: we have yet to process outer regions
215
216 Further, no special cases apply to the "next" field of the record.
217 For next, 0 means end of list. */
218
219 struct action_record
220 {
221 int offset;
222 int filter;
223 int next;
224 };
225
226 /* Hashtable helpers. */
227
228 struct action_record_hasher : typed_free_remove <action_record>
229 {
230 typedef action_record *value_type;
231 typedef action_record *compare_type;
232 static inline hashval_t hash (const action_record *);
233 static inline bool equal (const action_record *, const action_record *);
234 };
235
236 inline hashval_t
237 action_record_hasher::hash (const action_record *entry)
238 {
239 return entry->next * 1009 + entry->filter;
240 }
241
242 inline bool
243 action_record_hasher::equal (const action_record *entry,
244 const action_record *data)
245 {
246 return entry->filter == data->filter && entry->next == data->next;
247 }
248
249 typedef hash_table<action_record_hasher> action_hash_type;
250 \f
251 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
252 eh_landing_pad *);
253
254 static void dw2_build_landing_pads (void);
255
256 static int collect_one_action_chain (action_hash_type *, eh_region);
257 static int add_call_site (rtx, int, int);
258
259 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
260 static void push_sleb128 (vec<uchar, va_gc> **, int);
261 #ifndef HAVE_AS_LEB128
262 static int dw2_size_of_call_site_table (int);
263 static int sjlj_size_of_call_site_table (void);
264 #endif
265 static void dw2_output_call_site_table (int, int);
266 static void sjlj_output_call_site_table (void);
267
268 \f
269 void
270 init_eh (void)
271 {
272 if (! flag_exceptions)
273 return;
274
275 type_to_runtime_map
276 = hash_map<tree, tree, tree_hash_traits>::create_ggc (31);
277
278 /* Create the SjLj_Function_Context structure. This should match
279 the definition in unwind-sjlj.c. */
280 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
281 {
282 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
283
284 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
285
286 f_prev = build_decl (BUILTINS_LOCATION,
287 FIELD_DECL, get_identifier ("__prev"),
288 build_pointer_type (sjlj_fc_type_node));
289 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
290
291 f_cs = build_decl (BUILTINS_LOCATION,
292 FIELD_DECL, get_identifier ("__call_site"),
293 integer_type_node);
294 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
295
296 tmp = build_index_type (size_int (4 - 1));
297 tmp = build_array_type (lang_hooks.types.type_for_mode
298 (targetm.unwind_word_mode (), 1),
299 tmp);
300 f_data = build_decl (BUILTINS_LOCATION,
301 FIELD_DECL, get_identifier ("__data"), tmp);
302 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
303
304 f_per = build_decl (BUILTINS_LOCATION,
305 FIELD_DECL, get_identifier ("__personality"),
306 ptr_type_node);
307 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
308
309 f_lsda = build_decl (BUILTINS_LOCATION,
310 FIELD_DECL, get_identifier ("__lsda"),
311 ptr_type_node);
312 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
313
314 #ifdef DONT_USE_BUILTIN_SETJMP
315 #ifdef JMP_BUF_SIZE
316 tmp = size_int (JMP_BUF_SIZE - 1);
317 #else
318 /* Should be large enough for most systems, if it is not,
319 JMP_BUF_SIZE should be defined with the proper value. It will
320 also tend to be larger than necessary for most systems, a more
321 optimal port will define JMP_BUF_SIZE. */
322 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
323 #endif
324 #else
325 /* Compute a minimally sized jump buffer. We need room to store at
326 least 3 pointers - stack pointer, frame pointer and return address.
327 Plus for some targets we need room for an extra pointer - in the
328 case of MIPS this is the global pointer. This makes a total of four
329 pointers, but to be safe we actually allocate room for 5.
330
331 If pointers are smaller than words then we allocate enough room for
332 5 words, just in case the backend needs this much room. For more
333 discussion on this issue see:
334 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
335 if (POINTER_SIZE > BITS_PER_WORD)
336 tmp = size_int (5 - 1);
337 else
338 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
339 #endif
340
341 tmp = build_index_type (tmp);
342 tmp = build_array_type (ptr_type_node, tmp);
343 f_jbuf = build_decl (BUILTINS_LOCATION,
344 FIELD_DECL, get_identifier ("__jbuf"), tmp);
345 #ifdef DONT_USE_BUILTIN_SETJMP
346 /* We don't know what the alignment requirements of the
347 runtime's jmp_buf has. Overestimate. */
348 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
349 DECL_USER_ALIGN (f_jbuf) = 1;
350 #endif
351 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
352
353 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
354 TREE_CHAIN (f_prev) = f_cs;
355 TREE_CHAIN (f_cs) = f_data;
356 TREE_CHAIN (f_data) = f_per;
357 TREE_CHAIN (f_per) = f_lsda;
358 TREE_CHAIN (f_lsda) = f_jbuf;
359
360 layout_type (sjlj_fc_type_node);
361
362 /* Cache the interesting field offsets so that we have
363 easy access from rtl. */
364 sjlj_fc_call_site_ofs
365 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
366 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
367 sjlj_fc_data_ofs
368 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
369 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
370 sjlj_fc_personality_ofs
371 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
372 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
373 sjlj_fc_lsda_ofs
374 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
375 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
376 sjlj_fc_jbuf_ofs
377 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
378 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
379 }
380 }
381
382 void
383 init_eh_for_function (void)
384 {
385 cfun->eh = ggc_cleared_alloc<eh_status> ();
386
387 /* Make sure zero'th entries are used. */
388 vec_safe_push (cfun->eh->region_array, (eh_region)0);
389 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
390 }
391 \f
392 /* Routines to generate the exception tree somewhat directly.
393 These are used from tree-eh.c when processing exception related
394 nodes during tree optimization. */
395
396 static eh_region
397 gen_eh_region (enum eh_region_type type, eh_region outer)
398 {
399 eh_region new_eh;
400
401 /* Insert a new blank region as a leaf in the tree. */
402 new_eh = ggc_cleared_alloc<eh_region_d> ();
403 new_eh->type = type;
404 new_eh->outer = outer;
405 if (outer)
406 {
407 new_eh->next_peer = outer->inner;
408 outer->inner = new_eh;
409 }
410 else
411 {
412 new_eh->next_peer = cfun->eh->region_tree;
413 cfun->eh->region_tree = new_eh;
414 }
415
416 new_eh->index = vec_safe_length (cfun->eh->region_array);
417 vec_safe_push (cfun->eh->region_array, new_eh);
418
419 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
420 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
421 new_eh->use_cxa_end_cleanup = true;
422
423 return new_eh;
424 }
425
426 eh_region
427 gen_eh_region_cleanup (eh_region outer)
428 {
429 return gen_eh_region (ERT_CLEANUP, outer);
430 }
431
432 eh_region
433 gen_eh_region_try (eh_region outer)
434 {
435 return gen_eh_region (ERT_TRY, outer);
436 }
437
438 eh_catch
439 gen_eh_region_catch (eh_region t, tree type_or_list)
440 {
441 eh_catch c, l;
442 tree type_list, type_node;
443
444 gcc_assert (t->type == ERT_TRY);
445
446 /* Ensure to always end up with a type list to normalize further
447 processing, then register each type against the runtime types map. */
448 type_list = type_or_list;
449 if (type_or_list)
450 {
451 if (TREE_CODE (type_or_list) != TREE_LIST)
452 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
453
454 type_node = type_list;
455 for (; type_node; type_node = TREE_CHAIN (type_node))
456 add_type_for_runtime (TREE_VALUE (type_node));
457 }
458
459 c = ggc_cleared_alloc<eh_catch_d> ();
460 c->type_list = type_list;
461 l = t->u.eh_try.last_catch;
462 c->prev_catch = l;
463 if (l)
464 l->next_catch = c;
465 else
466 t->u.eh_try.first_catch = c;
467 t->u.eh_try.last_catch = c;
468
469 return c;
470 }
471
472 eh_region
473 gen_eh_region_allowed (eh_region outer, tree allowed)
474 {
475 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
476 region->u.allowed.type_list = allowed;
477
478 for (; allowed ; allowed = TREE_CHAIN (allowed))
479 add_type_for_runtime (TREE_VALUE (allowed));
480
481 return region;
482 }
483
484 eh_region
485 gen_eh_region_must_not_throw (eh_region outer)
486 {
487 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
488 }
489
490 eh_landing_pad
491 gen_eh_landing_pad (eh_region region)
492 {
493 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
494
495 lp->next_lp = region->landing_pads;
496 lp->region = region;
497 lp->index = vec_safe_length (cfun->eh->lp_array);
498 region->landing_pads = lp;
499
500 vec_safe_push (cfun->eh->lp_array, lp);
501
502 return lp;
503 }
504
505 eh_region
506 get_eh_region_from_number_fn (struct function *ifun, int i)
507 {
508 return (*ifun->eh->region_array)[i];
509 }
510
511 eh_region
512 get_eh_region_from_number (int i)
513 {
514 return get_eh_region_from_number_fn (cfun, i);
515 }
516
517 eh_landing_pad
518 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
519 {
520 return (*ifun->eh->lp_array)[i];
521 }
522
523 eh_landing_pad
524 get_eh_landing_pad_from_number (int i)
525 {
526 return get_eh_landing_pad_from_number_fn (cfun, i);
527 }
528
529 eh_region
530 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
531 {
532 if (i < 0)
533 return (*ifun->eh->region_array)[-i];
534 else if (i == 0)
535 return NULL;
536 else
537 {
538 eh_landing_pad lp;
539 lp = (*ifun->eh->lp_array)[i];
540 return lp->region;
541 }
542 }
543
544 eh_region
545 get_eh_region_from_lp_number (int i)
546 {
547 return get_eh_region_from_lp_number_fn (cfun, i);
548 }
549 \f
550 /* Returns true if the current function has exception handling regions. */
551
552 bool
553 current_function_has_exception_handlers (void)
554 {
555 return cfun->eh->region_tree != NULL;
556 }
557 \f
558 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
559 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
560
561 struct duplicate_eh_regions_data
562 {
563 duplicate_eh_regions_map label_map;
564 void *label_map_data;
565 hash_map<void *, void *> *eh_map;
566 };
567
568 static void
569 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
570 eh_region old_r, eh_region outer)
571 {
572 eh_landing_pad old_lp, new_lp;
573 eh_region new_r;
574
575 new_r = gen_eh_region (old_r->type, outer);
576 gcc_assert (!data->eh_map->put (old_r, new_r));
577
578 switch (old_r->type)
579 {
580 case ERT_CLEANUP:
581 break;
582
583 case ERT_TRY:
584 {
585 eh_catch oc, nc;
586 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
587 {
588 /* We should be doing all our region duplication before and
589 during inlining, which is before filter lists are created. */
590 gcc_assert (oc->filter_list == NULL);
591 nc = gen_eh_region_catch (new_r, oc->type_list);
592 nc->label = data->label_map (oc->label, data->label_map_data);
593 }
594 }
595 break;
596
597 case ERT_ALLOWED_EXCEPTIONS:
598 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
599 if (old_r->u.allowed.label)
600 new_r->u.allowed.label
601 = data->label_map (old_r->u.allowed.label, data->label_map_data);
602 else
603 new_r->u.allowed.label = NULL_TREE;
604 break;
605
606 case ERT_MUST_NOT_THROW:
607 new_r->u.must_not_throw.failure_loc =
608 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
609 new_r->u.must_not_throw.failure_decl =
610 old_r->u.must_not_throw.failure_decl;
611 break;
612 }
613
614 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
615 {
616 /* Don't bother copying unused landing pads. */
617 if (old_lp->post_landing_pad == NULL)
618 continue;
619
620 new_lp = gen_eh_landing_pad (new_r);
621 gcc_assert (!data->eh_map->put (old_lp, new_lp));
622
623 new_lp->post_landing_pad
624 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
625 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
626 }
627
628 /* Make sure to preserve the original use of __cxa_end_cleanup. */
629 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
630
631 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
632 duplicate_eh_regions_1 (data, old_r, new_r);
633 }
634
635 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
636 the current function and root the tree below OUTER_REGION.
637 The special case of COPY_REGION of NULL means all regions.
638 Remap labels using MAP/MAP_DATA callback. Return a pointer map
639 that allows the caller to remap uses of both EH regions and
640 EH landing pads. */
641
642 hash_map<void *, void *> *
643 duplicate_eh_regions (struct function *ifun,
644 eh_region copy_region, int outer_lp,
645 duplicate_eh_regions_map map, void *map_data)
646 {
647 struct duplicate_eh_regions_data data;
648 eh_region outer_region;
649
650 #ifdef ENABLE_CHECKING
651 verify_eh_tree (ifun);
652 #endif
653
654 data.label_map = map;
655 data.label_map_data = map_data;
656 data.eh_map = new hash_map<void *, void *>;
657
658 outer_region = get_eh_region_from_lp_number (outer_lp);
659
660 /* Copy all the regions in the subtree. */
661 if (copy_region)
662 duplicate_eh_regions_1 (&data, copy_region, outer_region);
663 else
664 {
665 eh_region r;
666 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
667 duplicate_eh_regions_1 (&data, r, outer_region);
668 }
669
670 #ifdef ENABLE_CHECKING
671 verify_eh_tree (cfun);
672 #endif
673
674 return data.eh_map;
675 }
676
677 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
678
679 eh_region
680 eh_region_outermost (struct function *ifun, eh_region region_a,
681 eh_region region_b)
682 {
683 sbitmap b_outer;
684
685 gcc_assert (ifun->eh->region_array);
686 gcc_assert (ifun->eh->region_tree);
687
688 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
689 bitmap_clear (b_outer);
690
691 do
692 {
693 bitmap_set_bit (b_outer, region_b->index);
694 region_b = region_b->outer;
695 }
696 while (region_b);
697
698 do
699 {
700 if (bitmap_bit_p (b_outer, region_a->index))
701 break;
702 region_a = region_a->outer;
703 }
704 while (region_a);
705
706 sbitmap_free (b_outer);
707 return region_a;
708 }
709 \f
710 void
711 add_type_for_runtime (tree type)
712 {
713 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
714 if (TREE_CODE (type) == NOP_EXPR)
715 return;
716
717 bool existed = false;
718 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
719 if (!existed)
720 *slot = lang_hooks.eh_runtime_type (type);
721 }
722
723 tree
724 lookup_type_for_runtime (tree type)
725 {
726 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
727 if (TREE_CODE (type) == NOP_EXPR)
728 return type;
729
730 /* We should have always inserted the data earlier. */
731 return *type_to_runtime_map->get (type);
732 }
733
734 \f
735 /* Represent an entry in @TTypes for either catch actions
736 or exception filter actions. */
737 struct ttypes_filter {
738 tree t;
739 int filter;
740 };
741
742 /* Helper for ttypes_filter hashing. */
743
744 struct ttypes_filter_hasher : typed_free_remove <ttypes_filter>
745 {
746 typedef ttypes_filter *value_type;
747 typedef tree_node *compare_type;
748 static inline hashval_t hash (const ttypes_filter *);
749 static inline bool equal (const ttypes_filter *, const tree_node *);
750 };
751
752 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
753 (a tree) for a @TTypes type node we are thinking about adding. */
754
755 inline bool
756 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
757 {
758 return entry->t == data;
759 }
760
761 inline hashval_t
762 ttypes_filter_hasher::hash (const ttypes_filter *entry)
763 {
764 return TREE_HASH (entry->t);
765 }
766
767 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
768
769
770 /* Helper for ehspec hashing. */
771
772 struct ehspec_hasher : typed_free_remove <ttypes_filter>
773 {
774 typedef ttypes_filter *value_type;
775 typedef ttypes_filter *compare_type;
776 static inline hashval_t hash (const ttypes_filter *);
777 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
778 };
779
780 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
781 exception specification list we are thinking about adding. */
782 /* ??? Currently we use the type lists in the order given. Someone
783 should put these in some canonical order. */
784
785 inline bool
786 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
787 {
788 return type_list_equal (entry->t, data->t);
789 }
790
791 /* Hash function for exception specification lists. */
792
793 inline hashval_t
794 ehspec_hasher::hash (const ttypes_filter *entry)
795 {
796 hashval_t h = 0;
797 tree list;
798
799 for (list = entry->t; list ; list = TREE_CHAIN (list))
800 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
801 return h;
802 }
803
804 typedef hash_table<ehspec_hasher> ehspec_hash_type;
805
806
807 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
808 to speed up the search. Return the filter value to be used. */
809
810 static int
811 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
812 {
813 struct ttypes_filter **slot, *n;
814
815 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
816 INSERT);
817
818 if ((n = *slot) == NULL)
819 {
820 /* Filter value is a 1 based table index. */
821
822 n = XNEW (struct ttypes_filter);
823 n->t = type;
824 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
825 *slot = n;
826
827 vec_safe_push (cfun->eh->ttype_data, type);
828 }
829
830 return n->filter;
831 }
832
833 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
834 to speed up the search. Return the filter value to be used. */
835
836 static int
837 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
838 tree list)
839 {
840 struct ttypes_filter **slot, *n;
841 struct ttypes_filter dummy;
842
843 dummy.t = list;
844 slot = ehspec_hash->find_slot (&dummy, INSERT);
845
846 if ((n = *slot) == NULL)
847 {
848 int len;
849
850 if (targetm.arm_eabi_unwinder)
851 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
852 else
853 len = vec_safe_length (cfun->eh->ehspec_data.other);
854
855 /* Filter value is a -1 based byte index into a uleb128 buffer. */
856
857 n = XNEW (struct ttypes_filter);
858 n->t = list;
859 n->filter = -(len + 1);
860 *slot = n;
861
862 /* Generate a 0 terminated list of filter values. */
863 for (; list ; list = TREE_CHAIN (list))
864 {
865 if (targetm.arm_eabi_unwinder)
866 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
867 else
868 {
869 /* Look up each type in the list and encode its filter
870 value as a uleb128. */
871 push_uleb128 (&cfun->eh->ehspec_data.other,
872 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
873 }
874 }
875 if (targetm.arm_eabi_unwinder)
876 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
877 else
878 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
879 }
880
881 return n->filter;
882 }
883
884 /* Generate the action filter values to be used for CATCH and
885 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
886 we use lots of landing pads, and so every type or list can share
887 the same filter value, which saves table space. */
888
889 void
890 assign_filter_values (void)
891 {
892 int i;
893 eh_region r;
894 eh_catch c;
895
896 vec_alloc (cfun->eh->ttype_data, 16);
897 if (targetm.arm_eabi_unwinder)
898 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
899 else
900 vec_alloc (cfun->eh->ehspec_data.other, 64);
901
902 ehspec_hash_type ehspec (31);
903 ttypes_hash_type ttypes (31);
904
905 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
906 {
907 if (r == NULL)
908 continue;
909
910 switch (r->type)
911 {
912 case ERT_TRY:
913 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
914 {
915 /* Whatever type_list is (NULL or true list), we build a list
916 of filters for the region. */
917 c->filter_list = NULL_TREE;
918
919 if (c->type_list != NULL)
920 {
921 /* Get a filter value for each of the types caught and store
922 them in the region's dedicated list. */
923 tree tp_node = c->type_list;
924
925 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
926 {
927 int flt
928 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
929 tree flt_node = build_int_cst (integer_type_node, flt);
930
931 c->filter_list
932 = tree_cons (NULL_TREE, flt_node, c->filter_list);
933 }
934 }
935 else
936 {
937 /* Get a filter value for the NULL list also since it
938 will need an action record anyway. */
939 int flt = add_ttypes_entry (&ttypes, NULL);
940 tree flt_node = build_int_cst (integer_type_node, flt);
941
942 c->filter_list
943 = tree_cons (NULL_TREE, flt_node, NULL);
944 }
945 }
946 break;
947
948 case ERT_ALLOWED_EXCEPTIONS:
949 r->u.allowed.filter
950 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
951 break;
952
953 default:
954 break;
955 }
956 }
957 }
958
959 /* Emit SEQ into basic block just before INSN (that is assumed to be
960 first instruction of some existing BB and return the newly
961 produced block. */
962 static basic_block
963 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
964 {
965 rtx_insn *last;
966 basic_block bb;
967 edge e;
968 edge_iterator ei;
969
970 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
971 call), we don't want it to go into newly created landing pad or other EH
972 construct. */
973 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
974 if (e->flags & EDGE_FALLTHRU)
975 force_nonfallthru (e);
976 else
977 ei_next (&ei);
978 last = emit_insn_before (seq, insn);
979 if (BARRIER_P (last))
980 last = PREV_INSN (last);
981 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
982 update_bb_for_insn (bb);
983 bb->flags |= BB_SUPERBLOCK;
984 return bb;
985 }
986 \f
987 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
988 at the rtl level. Emit the code required by the target at a landing
989 pad for the given region. */
990
991 void
992 expand_dw2_landing_pad_for_region (eh_region region)
993 {
994 #ifdef HAVE_exception_receiver
995 if (HAVE_exception_receiver)
996 emit_insn (gen_exception_receiver ());
997 else
998 #endif
999 #ifdef HAVE_nonlocal_goto_receiver
1000 if (HAVE_nonlocal_goto_receiver)
1001 emit_insn (gen_nonlocal_goto_receiver ());
1002 else
1003 #endif
1004 { /* Nothing */ }
1005
1006 if (region->exc_ptr_reg)
1007 emit_move_insn (region->exc_ptr_reg,
1008 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1009 if (region->filter_reg)
1010 emit_move_insn (region->filter_reg,
1011 gen_rtx_REG (targetm.eh_return_filter_mode (),
1012 EH_RETURN_DATA_REGNO (1)));
1013 }
1014
1015 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
1016
1017 static void
1018 dw2_build_landing_pads (void)
1019 {
1020 int i;
1021 eh_landing_pad lp;
1022 int e_flags = EDGE_FALLTHRU;
1023
1024 /* If we're going to partition blocks, we need to be able to add
1025 new landing pads later, which means that we need to hold on to
1026 the post-landing-pad block. Prevent it from being merged away.
1027 We'll remove this bit after partitioning. */
1028 if (flag_reorder_blocks_and_partition)
1029 e_flags |= EDGE_PRESERVE;
1030
1031 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1032 {
1033 basic_block bb;
1034 rtx_insn *seq;
1035 edge e;
1036
1037 if (lp == NULL || lp->post_landing_pad == NULL)
1038 continue;
1039
1040 start_sequence ();
1041
1042 lp->landing_pad = gen_label_rtx ();
1043 emit_label (lp->landing_pad);
1044 LABEL_PRESERVE_P (lp->landing_pad) = 1;
1045
1046 expand_dw2_landing_pad_for_region (lp->region);
1047
1048 seq = get_insns ();
1049 end_sequence ();
1050
1051 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1052 e = make_edge (bb, bb->next_bb, e_flags);
1053 e->count = bb->count;
1054 e->probability = REG_BR_PROB_BASE;
1055 if (current_loops)
1056 {
1057 struct loop *loop = bb->next_bb->loop_father;
1058 /* If we created a pre-header block, add the new block to the
1059 outer loop, otherwise to the loop itself. */
1060 if (bb->next_bb == loop->header)
1061 add_bb_to_loop (bb, loop_outer (loop));
1062 else
1063 add_bb_to_loop (bb, loop);
1064 }
1065 }
1066 }
1067
1068 \f
1069 static vec<int> sjlj_lp_call_site_index;
1070
1071 /* Process all active landing pads. Assign each one a compact dispatch
1072 index, and a call-site index. */
1073
1074 static int
1075 sjlj_assign_call_site_values (void)
1076 {
1077 action_hash_type ar_hash (31);
1078 int i, disp_index;
1079 eh_landing_pad lp;
1080
1081 vec_alloc (crtl->eh.action_record_data, 64);
1082
1083 disp_index = 0;
1084 call_site_base = 1;
1085 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1086 if (lp && lp->post_landing_pad)
1087 {
1088 int action, call_site;
1089
1090 /* First: build the action table. */
1091 action = collect_one_action_chain (&ar_hash, lp->region);
1092
1093 /* Next: assign call-site values. If dwarf2 terms, this would be
1094 the region number assigned by convert_to_eh_region_ranges, but
1095 handles no-action and must-not-throw differently. */
1096 /* Map must-not-throw to otherwise unused call-site index 0. */
1097 if (action == -2)
1098 call_site = 0;
1099 /* Map no-action to otherwise unused call-site index -1. */
1100 else if (action == -1)
1101 call_site = -1;
1102 /* Otherwise, look it up in the table. */
1103 else
1104 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1105 sjlj_lp_call_site_index[i] = call_site;
1106
1107 disp_index++;
1108 }
1109
1110 return disp_index;
1111 }
1112
1113 /* Emit code to record the current call-site index before every
1114 insn that can throw. */
1115
1116 static void
1117 sjlj_mark_call_sites (void)
1118 {
1119 int last_call_site = -2;
1120 rtx_insn *insn;
1121 rtx mem;
1122
1123 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1124 {
1125 eh_landing_pad lp;
1126 eh_region r;
1127 bool nothrow;
1128 int this_call_site;
1129 rtx_insn *before, *p;
1130
1131 /* Reset value tracking at extended basic block boundaries. */
1132 if (LABEL_P (insn))
1133 last_call_site = -2;
1134
1135 if (! INSN_P (insn))
1136 continue;
1137
1138 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1139 if (nothrow)
1140 continue;
1141 if (lp)
1142 this_call_site = sjlj_lp_call_site_index[lp->index];
1143 else if (r == NULL)
1144 {
1145 /* Calls (and trapping insns) without notes are outside any
1146 exception handling region in this function. Mark them as
1147 no action. */
1148 this_call_site = -1;
1149 }
1150 else
1151 {
1152 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1153 this_call_site = 0;
1154 }
1155
1156 if (this_call_site != -1)
1157 crtl->uses_eh_lsda = 1;
1158
1159 if (this_call_site == last_call_site)
1160 continue;
1161
1162 /* Don't separate a call from it's argument loads. */
1163 before = insn;
1164 if (CALL_P (insn))
1165 before = find_first_parameter_load (insn, NULL);
1166
1167 start_sequence ();
1168 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1169 sjlj_fc_call_site_ofs);
1170 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1171 p = get_insns ();
1172 end_sequence ();
1173
1174 emit_insn_before (p, before);
1175 last_call_site = this_call_site;
1176 }
1177 }
1178
1179 /* Construct the SjLj_Function_Context. */
1180
1181 static void
1182 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1183 {
1184 rtx_insn *fn_begin, *seq;
1185 rtx fc, mem;
1186 bool fn_begin_outside_block;
1187 rtx personality = get_personality_function (current_function_decl);
1188
1189 fc = crtl->eh.sjlj_fc;
1190
1191 start_sequence ();
1192
1193 /* We're storing this libcall's address into memory instead of
1194 calling it directly. Thus, we must call assemble_external_libcall
1195 here, as we can not depend on emit_library_call to do it for us. */
1196 assemble_external_libcall (personality);
1197 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1198 emit_move_insn (mem, personality);
1199
1200 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1201 if (crtl->uses_eh_lsda)
1202 {
1203 char buf[20];
1204 rtx sym;
1205
1206 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1207 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1208 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1209 emit_move_insn (mem, sym);
1210 }
1211 else
1212 emit_move_insn (mem, const0_rtx);
1213
1214 if (dispatch_label)
1215 {
1216 #ifdef DONT_USE_BUILTIN_SETJMP
1217 rtx x;
1218 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1219 TYPE_MODE (integer_type_node), 1,
1220 plus_constant (Pmode, XEXP (fc, 0),
1221 sjlj_fc_jbuf_ofs), Pmode);
1222
1223 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1224 TYPE_MODE (integer_type_node), 0,
1225 dispatch_label, REG_BR_PROB_BASE / 100);
1226 #else
1227 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1228 sjlj_fc_jbuf_ofs),
1229 dispatch_label);
1230 #endif
1231 }
1232
1233 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1234 1, XEXP (fc, 0), Pmode);
1235
1236 seq = get_insns ();
1237 end_sequence ();
1238
1239 /* ??? Instead of doing this at the beginning of the function,
1240 do this in a block that is at loop level 0 and dominates all
1241 can_throw_internal instructions. */
1242
1243 fn_begin_outside_block = true;
1244 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1245 if (NOTE_P (fn_begin))
1246 {
1247 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1248 break;
1249 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1250 fn_begin_outside_block = false;
1251 }
1252
1253 if (fn_begin_outside_block)
1254 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1255 else
1256 emit_insn_after (seq, fn_begin);
1257 }
1258
1259 /* Call back from expand_function_end to know where we should put
1260 the call to unwind_sjlj_unregister_libfunc if needed. */
1261
1262 void
1263 sjlj_emit_function_exit_after (rtx_insn *after)
1264 {
1265 crtl->eh.sjlj_exit_after = after;
1266 }
1267
1268 static void
1269 sjlj_emit_function_exit (void)
1270 {
1271 rtx_insn *seq, *insn;
1272
1273 start_sequence ();
1274
1275 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1276 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1277
1278 seq = get_insns ();
1279 end_sequence ();
1280
1281 /* ??? Really this can be done in any block at loop level 0 that
1282 post-dominates all can_throw_internal instructions. This is
1283 the last possible moment. */
1284
1285 insn = crtl->eh.sjlj_exit_after;
1286 if (LABEL_P (insn))
1287 insn = NEXT_INSN (insn);
1288
1289 emit_insn_after (seq, insn);
1290 }
1291
1292 static void
1293 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1294 {
1295 machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1296 machine_mode filter_mode = targetm.eh_return_filter_mode ();
1297 eh_landing_pad lp;
1298 rtx mem, fc, before, exc_ptr_reg, filter_reg;
1299 rtx_insn *seq;
1300 rtx first_reachable_label;
1301 basic_block bb;
1302 eh_region r;
1303 edge e;
1304 int i, disp_index;
1305 vec<tree> dispatch_labels = vNULL;
1306
1307 fc = crtl->eh.sjlj_fc;
1308
1309 start_sequence ();
1310
1311 emit_label (dispatch_label);
1312
1313 #ifndef DONT_USE_BUILTIN_SETJMP
1314 expand_builtin_setjmp_receiver (dispatch_label);
1315
1316 /* The caller of expand_builtin_setjmp_receiver is responsible for
1317 making sure that the label doesn't vanish. The only other caller
1318 is the expander for __builtin_setjmp_receiver, which places this
1319 label on the nonlocal_goto_label list. Since we're modeling these
1320 CFG edges more exactly, we can use the forced_labels list instead. */
1321 LABEL_PRESERVE_P (dispatch_label) = 1;
1322 forced_labels
1323 = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1324 #endif
1325
1326 /* Load up exc_ptr and filter values from the function context. */
1327 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1328 if (unwind_word_mode != ptr_mode)
1329 {
1330 #ifdef POINTERS_EXTEND_UNSIGNED
1331 mem = convert_memory_address (ptr_mode, mem);
1332 #else
1333 mem = convert_to_mode (ptr_mode, mem, 0);
1334 #endif
1335 }
1336 exc_ptr_reg = force_reg (ptr_mode, mem);
1337
1338 mem = adjust_address (fc, unwind_word_mode,
1339 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1340 if (unwind_word_mode != filter_mode)
1341 mem = convert_to_mode (filter_mode, mem, 0);
1342 filter_reg = force_reg (filter_mode, mem);
1343
1344 /* Jump to one of the directly reachable regions. */
1345
1346 disp_index = 0;
1347 first_reachable_label = NULL;
1348
1349 /* If there's exactly one call site in the function, don't bother
1350 generating a switch statement. */
1351 if (num_dispatch > 1)
1352 dispatch_labels.create (num_dispatch);
1353
1354 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1355 if (lp && lp->post_landing_pad)
1356 {
1357 rtx_insn *seq2;
1358 rtx label;
1359
1360 start_sequence ();
1361
1362 lp->landing_pad = dispatch_label;
1363
1364 if (num_dispatch > 1)
1365 {
1366 tree t_label, case_elt, t;
1367
1368 t_label = create_artificial_label (UNKNOWN_LOCATION);
1369 t = build_int_cst (integer_type_node, disp_index);
1370 case_elt = build_case_label (t, NULL, t_label);
1371 dispatch_labels.quick_push (case_elt);
1372 label = label_rtx (t_label);
1373 }
1374 else
1375 label = gen_label_rtx ();
1376
1377 if (disp_index == 0)
1378 first_reachable_label = label;
1379 emit_label (label);
1380
1381 r = lp->region;
1382 if (r->exc_ptr_reg)
1383 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1384 if (r->filter_reg)
1385 emit_move_insn (r->filter_reg, filter_reg);
1386
1387 seq2 = get_insns ();
1388 end_sequence ();
1389
1390 before = label_rtx (lp->post_landing_pad);
1391 bb = emit_to_new_bb_before (seq2, before);
1392 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1393 e->count = bb->count;
1394 e->probability = REG_BR_PROB_BASE;
1395 if (current_loops)
1396 {
1397 struct loop *loop = bb->next_bb->loop_father;
1398 /* If we created a pre-header block, add the new block to the
1399 outer loop, otherwise to the loop itself. */
1400 if (bb->next_bb == loop->header)
1401 add_bb_to_loop (bb, loop_outer (loop));
1402 else
1403 add_bb_to_loop (bb, loop);
1404 /* ??? For multiple dispatches we will end up with edges
1405 from the loop tree root into this loop, making it a
1406 multiple-entry loop. Discard all affected loops. */
1407 if (num_dispatch > 1)
1408 {
1409 for (loop = bb->loop_father;
1410 loop_outer (loop); loop = loop_outer (loop))
1411 mark_loop_for_removal (loop);
1412 }
1413 }
1414
1415 disp_index++;
1416 }
1417 gcc_assert (disp_index == num_dispatch);
1418
1419 if (num_dispatch > 1)
1420 {
1421 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1422 sjlj_fc_call_site_ofs);
1423 expand_sjlj_dispatch_table (disp, dispatch_labels);
1424 }
1425
1426 seq = get_insns ();
1427 end_sequence ();
1428
1429 bb = emit_to_new_bb_before (seq, first_reachable_label);
1430 if (num_dispatch == 1)
1431 {
1432 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1433 e->count = bb->count;
1434 e->probability = REG_BR_PROB_BASE;
1435 if (current_loops)
1436 {
1437 struct loop *loop = bb->next_bb->loop_father;
1438 /* If we created a pre-header block, add the new block to the
1439 outer loop, otherwise to the loop itself. */
1440 if (bb->next_bb == loop->header)
1441 add_bb_to_loop (bb, loop_outer (loop));
1442 else
1443 add_bb_to_loop (bb, loop);
1444 }
1445 }
1446 else
1447 {
1448 /* We are not wiring up edges here, but as the dispatcher call
1449 is at function begin simply associate the block with the
1450 outermost (non-)loop. */
1451 if (current_loops)
1452 add_bb_to_loop (bb, current_loops->tree_root);
1453 }
1454 }
1455
1456 static void
1457 sjlj_build_landing_pads (void)
1458 {
1459 int num_dispatch;
1460
1461 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1462 if (num_dispatch == 0)
1463 return;
1464 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1465
1466 num_dispatch = sjlj_assign_call_site_values ();
1467 if (num_dispatch > 0)
1468 {
1469 rtx_code_label *dispatch_label = gen_label_rtx ();
1470 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1471 TYPE_MODE (sjlj_fc_type_node),
1472 TYPE_ALIGN (sjlj_fc_type_node));
1473 crtl->eh.sjlj_fc
1474 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1475 int_size_in_bytes (sjlj_fc_type_node),
1476 align);
1477
1478 sjlj_mark_call_sites ();
1479 sjlj_emit_function_enter (dispatch_label);
1480 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1481 sjlj_emit_function_exit ();
1482 }
1483
1484 /* If we do not have any landing pads, we may still need to register a
1485 personality routine and (empty) LSDA to handle must-not-throw regions. */
1486 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1487 {
1488 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1489 TYPE_MODE (sjlj_fc_type_node),
1490 TYPE_ALIGN (sjlj_fc_type_node));
1491 crtl->eh.sjlj_fc
1492 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1493 int_size_in_bytes (sjlj_fc_type_node),
1494 align);
1495
1496 sjlj_mark_call_sites ();
1497 sjlj_emit_function_enter (NULL);
1498 sjlj_emit_function_exit ();
1499 }
1500
1501 sjlj_lp_call_site_index.release ();
1502 }
1503
1504 /* After initial rtl generation, call back to finish generating
1505 exception support code. */
1506
1507 void
1508 finish_eh_generation (void)
1509 {
1510 basic_block bb;
1511
1512 /* Construct the landing pads. */
1513 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1514 sjlj_build_landing_pads ();
1515 else
1516 dw2_build_landing_pads ();
1517 break_superblocks ();
1518
1519 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1520 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1521 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1522 commit_edge_insertions ();
1523
1524 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1525 FOR_EACH_BB_FN (bb, cfun)
1526 {
1527 eh_landing_pad lp;
1528 edge_iterator ei;
1529 edge e;
1530
1531 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1532
1533 FOR_EACH_EDGE (e, ei, bb->succs)
1534 if (e->flags & EDGE_EH)
1535 break;
1536
1537 /* We should not have generated any new throwing insns during this
1538 pass, and we should not have lost any EH edges, so we only need
1539 to handle two cases here:
1540 (1) reachable handler and an existing edge to post-landing-pad,
1541 (2) no reachable handler and no edge. */
1542 gcc_assert ((lp != NULL) == (e != NULL));
1543 if (lp != NULL)
1544 {
1545 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1546
1547 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1548 e->flags |= (CALL_P (BB_END (bb))
1549 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1550 : EDGE_ABNORMAL);
1551 }
1552 }
1553 }
1554 \f
1555 /* This section handles removing dead code for flow. */
1556
1557 void
1558 remove_eh_landing_pad (eh_landing_pad lp)
1559 {
1560 eh_landing_pad *pp;
1561
1562 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1563 continue;
1564 *pp = lp->next_lp;
1565
1566 if (lp->post_landing_pad)
1567 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1568 (*cfun->eh->lp_array)[lp->index] = NULL;
1569 }
1570
1571 /* Splice the EH region at PP from the region tree. */
1572
1573 static void
1574 remove_eh_handler_splicer (eh_region *pp)
1575 {
1576 eh_region region = *pp;
1577 eh_landing_pad lp;
1578
1579 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1580 {
1581 if (lp->post_landing_pad)
1582 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1583 (*cfun->eh->lp_array)[lp->index] = NULL;
1584 }
1585
1586 if (region->inner)
1587 {
1588 eh_region p, outer;
1589 outer = region->outer;
1590
1591 *pp = p = region->inner;
1592 do
1593 {
1594 p->outer = outer;
1595 pp = &p->next_peer;
1596 p = *pp;
1597 }
1598 while (p);
1599 }
1600 *pp = region->next_peer;
1601
1602 (*cfun->eh->region_array)[region->index] = NULL;
1603 }
1604
1605 /* Splice a single EH region REGION from the region tree.
1606
1607 To unlink REGION, we need to find the pointer to it with a relatively
1608 expensive search in REGION's outer region. If you are going to
1609 remove a number of handlers, using remove_unreachable_eh_regions may
1610 be a better option. */
1611
1612 void
1613 remove_eh_handler (eh_region region)
1614 {
1615 eh_region *pp, *pp_start, p, outer;
1616
1617 outer = region->outer;
1618 if (outer)
1619 pp_start = &outer->inner;
1620 else
1621 pp_start = &cfun->eh->region_tree;
1622 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1623 continue;
1624
1625 remove_eh_handler_splicer (pp);
1626 }
1627
1628 /* Worker for remove_unreachable_eh_regions.
1629 PP is a pointer to the region to start a region tree depth-first
1630 search from. R_REACHABLE is the set of regions that have to be
1631 preserved. */
1632
1633 static void
1634 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1635 {
1636 while (*pp)
1637 {
1638 eh_region region = *pp;
1639 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1640 if (!bitmap_bit_p (r_reachable, region->index))
1641 remove_eh_handler_splicer (pp);
1642 else
1643 pp = &region->next_peer;
1644 }
1645 }
1646
1647 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1648 Do this by traversing the EH tree top-down and splice out regions that
1649 are not marked. By removing regions from the leaves, we avoid costly
1650 searches in the region tree. */
1651
1652 void
1653 remove_unreachable_eh_regions (sbitmap r_reachable)
1654 {
1655 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1656 }
1657
1658 /* Invokes CALLBACK for every exception handler landing pad label.
1659 Only used by reload hackery; should not be used by new code. */
1660
1661 void
1662 for_each_eh_label (void (*callback) (rtx))
1663 {
1664 eh_landing_pad lp;
1665 int i;
1666
1667 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1668 {
1669 if (lp)
1670 {
1671 rtx lab = lp->landing_pad;
1672 if (lab && LABEL_P (lab))
1673 (*callback) (lab);
1674 }
1675 }
1676 }
1677 \f
1678 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1679 call insn.
1680
1681 At the gimple level, we use LP_NR
1682 > 0 : The statement transfers to landing pad LP_NR
1683 = 0 : The statement is outside any EH region
1684 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1685
1686 At the rtl level, we use LP_NR
1687 > 0 : The insn transfers to landing pad LP_NR
1688 = 0 : The insn cannot throw
1689 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1690 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1691 missing note: The insn is outside any EH region.
1692
1693 ??? This difference probably ought to be avoided. We could stand
1694 to record nothrow for arbitrary gimple statements, and so avoid
1695 some moderately complex lookups in stmt_could_throw_p. Perhaps
1696 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1697 no-nonlocal-goto property should be recorded elsewhere as a bit
1698 on the call_insn directly. Perhaps we should make more use of
1699 attaching the trees to call_insns (reachable via symbol_ref in
1700 direct call cases) and just pull the data out of the trees. */
1701
1702 void
1703 make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1704 {
1705 rtx value;
1706 if (ecf_flags & ECF_NOTHROW)
1707 value = const0_rtx;
1708 else if (lp_nr != 0)
1709 value = GEN_INT (lp_nr);
1710 else
1711 return;
1712 add_reg_note (insn, REG_EH_REGION, value);
1713 }
1714
1715 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1716 nor perform a non-local goto. Replace the region note if it
1717 already exists. */
1718
1719 void
1720 make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1721 {
1722 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1723 rtx intmin = GEN_INT (INT_MIN);
1724
1725 if (note != 0)
1726 XEXP (note, 0) = intmin;
1727 else
1728 add_reg_note (insn, REG_EH_REGION, intmin);
1729 }
1730
1731 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1732 to the contrary. */
1733
1734 bool
1735 insn_could_throw_p (const_rtx insn)
1736 {
1737 if (!flag_exceptions)
1738 return false;
1739 if (CALL_P (insn))
1740 return true;
1741 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1742 return may_trap_p (PATTERN (insn));
1743 return false;
1744 }
1745
1746 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1747 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1748 to look for a note, or the note itself. */
1749
1750 void
1751 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1752 {
1753 rtx_insn *insn;
1754 rtx note = note_or_insn;
1755
1756 if (INSN_P (note_or_insn))
1757 {
1758 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1759 if (note == NULL)
1760 return;
1761 }
1762 note = XEXP (note, 0);
1763
1764 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1765 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1766 && insn_could_throw_p (insn))
1767 add_reg_note (insn, REG_EH_REGION, note);
1768 }
1769
1770 /* Likewise, but iterate backward. */
1771
1772 void
1773 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1774 {
1775 rtx_insn *insn;
1776 rtx note = note_or_insn;
1777
1778 if (INSN_P (note_or_insn))
1779 {
1780 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1781 if (note == NULL)
1782 return;
1783 }
1784 note = XEXP (note, 0);
1785
1786 for (insn = last; insn != first; insn = PREV_INSN (insn))
1787 if (insn_could_throw_p (insn))
1788 add_reg_note (insn, REG_EH_REGION, note);
1789 }
1790
1791
1792 /* Extract all EH information from INSN. Return true if the insn
1793 was marked NOTHROW. */
1794
1795 static bool
1796 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1797 eh_landing_pad *plp)
1798 {
1799 eh_landing_pad lp = NULL;
1800 eh_region r = NULL;
1801 bool ret = false;
1802 rtx note;
1803 int lp_nr;
1804
1805 if (! INSN_P (insn))
1806 goto egress;
1807
1808 if (NONJUMP_INSN_P (insn)
1809 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1810 insn = XVECEXP (PATTERN (insn), 0, 0);
1811
1812 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1813 if (!note)
1814 {
1815 ret = !insn_could_throw_p (insn);
1816 goto egress;
1817 }
1818
1819 lp_nr = INTVAL (XEXP (note, 0));
1820 if (lp_nr == 0 || lp_nr == INT_MIN)
1821 {
1822 ret = true;
1823 goto egress;
1824 }
1825
1826 if (lp_nr < 0)
1827 r = (*cfun->eh->region_array)[-lp_nr];
1828 else
1829 {
1830 lp = (*cfun->eh->lp_array)[lp_nr];
1831 r = lp->region;
1832 }
1833
1834 egress:
1835 *plp = lp;
1836 *pr = r;
1837 return ret;
1838 }
1839
1840 /* Return the landing pad to which INSN may go, or NULL if it does not
1841 have a reachable landing pad within this function. */
1842
1843 eh_landing_pad
1844 get_eh_landing_pad_from_rtx (const_rtx insn)
1845 {
1846 eh_landing_pad lp;
1847 eh_region r;
1848
1849 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1850 return lp;
1851 }
1852
1853 /* Return the region to which INSN may go, or NULL if it does not
1854 have a reachable region within this function. */
1855
1856 eh_region
1857 get_eh_region_from_rtx (const_rtx insn)
1858 {
1859 eh_landing_pad lp;
1860 eh_region r;
1861
1862 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1863 return r;
1864 }
1865
1866 /* Return true if INSN throws and is caught by something in this function. */
1867
1868 bool
1869 can_throw_internal (const_rtx insn)
1870 {
1871 return get_eh_landing_pad_from_rtx (insn) != NULL;
1872 }
1873
1874 /* Return true if INSN throws and escapes from the current function. */
1875
1876 bool
1877 can_throw_external (const_rtx insn)
1878 {
1879 eh_landing_pad lp;
1880 eh_region r;
1881 bool nothrow;
1882
1883 if (! INSN_P (insn))
1884 return false;
1885
1886 if (NONJUMP_INSN_P (insn)
1887 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1888 {
1889 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1890 int i, n = seq->len ();
1891
1892 for (i = 0; i < n; i++)
1893 if (can_throw_external (seq->element (i)))
1894 return true;
1895
1896 return false;
1897 }
1898
1899 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1900
1901 /* If we can't throw, we obviously can't throw external. */
1902 if (nothrow)
1903 return false;
1904
1905 /* If we have an internal landing pad, then we're not external. */
1906 if (lp != NULL)
1907 return false;
1908
1909 /* If we're not within an EH region, then we are external. */
1910 if (r == NULL)
1911 return true;
1912
1913 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1914 which don't always have landing pads. */
1915 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1916 return false;
1917 }
1918
1919 /* Return true if INSN cannot throw at all. */
1920
1921 bool
1922 insn_nothrow_p (const_rtx insn)
1923 {
1924 eh_landing_pad lp;
1925 eh_region r;
1926
1927 if (! INSN_P (insn))
1928 return true;
1929
1930 if (NONJUMP_INSN_P (insn)
1931 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1932 {
1933 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1934 int i, n = seq->len ();
1935
1936 for (i = 0; i < n; i++)
1937 if (!insn_nothrow_p (seq->element (i)))
1938 return false;
1939
1940 return true;
1941 }
1942
1943 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1944 }
1945
1946 /* Return true if INSN can perform a non-local goto. */
1947 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1948
1949 bool
1950 can_nonlocal_goto (const_rtx insn)
1951 {
1952 if (nonlocal_goto_handler_labels && CALL_P (insn))
1953 {
1954 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1955 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1956 return true;
1957 }
1958 return false;
1959 }
1960 \f
1961 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1962
1963 static unsigned int
1964 set_nothrow_function_flags (void)
1965 {
1966 rtx_insn *insn;
1967
1968 crtl->nothrow = 1;
1969
1970 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1971 something that can throw an exception. We specifically exempt
1972 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1973 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1974 is optimistic. */
1975
1976 crtl->all_throwers_are_sibcalls = 1;
1977
1978 /* If we don't know that this implementation of the function will
1979 actually be used, then we must not set TREE_NOTHROW, since
1980 callers must not assume that this function does not throw. */
1981 if (TREE_NOTHROW (current_function_decl))
1982 return 0;
1983
1984 if (! flag_exceptions)
1985 return 0;
1986
1987 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1988 if (can_throw_external (insn))
1989 {
1990 crtl->nothrow = 0;
1991
1992 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1993 {
1994 crtl->all_throwers_are_sibcalls = 0;
1995 return 0;
1996 }
1997 }
1998
1999 if (crtl->nothrow
2000 && (cgraph_node::get (current_function_decl)->get_availability ()
2001 >= AVAIL_AVAILABLE))
2002 {
2003 struct cgraph_node *node = cgraph_node::get (current_function_decl);
2004 struct cgraph_edge *e;
2005 for (e = node->callers; e; e = e->next_caller)
2006 e->can_throw_external = false;
2007 node->set_nothrow_flag (true);
2008
2009 if (dump_file)
2010 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2011 current_function_name ());
2012 }
2013 return 0;
2014 }
2015
2016 namespace {
2017
2018 const pass_data pass_data_set_nothrow_function_flags =
2019 {
2020 RTL_PASS, /* type */
2021 "nothrow", /* name */
2022 OPTGROUP_NONE, /* optinfo_flags */
2023 TV_NONE, /* tv_id */
2024 0, /* properties_required */
2025 0, /* properties_provided */
2026 0, /* properties_destroyed */
2027 0, /* todo_flags_start */
2028 0, /* todo_flags_finish */
2029 };
2030
2031 class pass_set_nothrow_function_flags : public rtl_opt_pass
2032 {
2033 public:
2034 pass_set_nothrow_function_flags (gcc::context *ctxt)
2035 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2036 {}
2037
2038 /* opt_pass methods: */
2039 virtual unsigned int execute (function *)
2040 {
2041 return set_nothrow_function_flags ();
2042 }
2043
2044 }; // class pass_set_nothrow_function_flags
2045
2046 } // anon namespace
2047
2048 rtl_opt_pass *
2049 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2050 {
2051 return new pass_set_nothrow_function_flags (ctxt);
2052 }
2053
2054 \f
2055 /* Various hooks for unwind library. */
2056
2057 /* Expand the EH support builtin functions:
2058 __builtin_eh_pointer and __builtin_eh_filter. */
2059
2060 static eh_region
2061 expand_builtin_eh_common (tree region_nr_t)
2062 {
2063 HOST_WIDE_INT region_nr;
2064 eh_region region;
2065
2066 gcc_assert (tree_fits_shwi_p (region_nr_t));
2067 region_nr = tree_to_shwi (region_nr_t);
2068
2069 region = (*cfun->eh->region_array)[region_nr];
2070
2071 /* ??? We shouldn't have been able to delete a eh region without
2072 deleting all the code that depended on it. */
2073 gcc_assert (region != NULL);
2074
2075 return region;
2076 }
2077
2078 /* Expand to the exc_ptr value from the given eh region. */
2079
2080 rtx
2081 expand_builtin_eh_pointer (tree exp)
2082 {
2083 eh_region region
2084 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2085 if (region->exc_ptr_reg == NULL)
2086 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2087 return region->exc_ptr_reg;
2088 }
2089
2090 /* Expand to the filter value from the given eh region. */
2091
2092 rtx
2093 expand_builtin_eh_filter (tree exp)
2094 {
2095 eh_region region
2096 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2097 if (region->filter_reg == NULL)
2098 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2099 return region->filter_reg;
2100 }
2101
2102 /* Copy the exc_ptr and filter values from one landing pad's registers
2103 to another. This is used to inline the resx statement. */
2104
2105 rtx
2106 expand_builtin_eh_copy_values (tree exp)
2107 {
2108 eh_region dst
2109 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2110 eh_region src
2111 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2112 machine_mode fmode = targetm.eh_return_filter_mode ();
2113
2114 if (dst->exc_ptr_reg == NULL)
2115 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2116 if (src->exc_ptr_reg == NULL)
2117 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2118
2119 if (dst->filter_reg == NULL)
2120 dst->filter_reg = gen_reg_rtx (fmode);
2121 if (src->filter_reg == NULL)
2122 src->filter_reg = gen_reg_rtx (fmode);
2123
2124 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2125 emit_move_insn (dst->filter_reg, src->filter_reg);
2126
2127 return const0_rtx;
2128 }
2129
2130 /* Do any necessary initialization to access arbitrary stack frames.
2131 On the SPARC, this means flushing the register windows. */
2132
2133 void
2134 expand_builtin_unwind_init (void)
2135 {
2136 /* Set this so all the registers get saved in our frame; we need to be
2137 able to copy the saved values for any registers from frames we unwind. */
2138 crtl->saves_all_registers = 1;
2139
2140 #ifdef SETUP_FRAME_ADDRESSES
2141 SETUP_FRAME_ADDRESSES ();
2142 #endif
2143 }
2144
2145 /* Map a non-negative number to an eh return data register number; expands
2146 to -1 if no return data register is associated with the input number.
2147 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2148
2149 rtx
2150 expand_builtin_eh_return_data_regno (tree exp)
2151 {
2152 tree which = CALL_EXPR_ARG (exp, 0);
2153 unsigned HOST_WIDE_INT iwhich;
2154
2155 if (TREE_CODE (which) != INTEGER_CST)
2156 {
2157 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2158 return constm1_rtx;
2159 }
2160
2161 iwhich = tree_to_uhwi (which);
2162 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2163 if (iwhich == INVALID_REGNUM)
2164 return constm1_rtx;
2165
2166 #ifdef DWARF_FRAME_REGNUM
2167 iwhich = DWARF_FRAME_REGNUM (iwhich);
2168 #else
2169 iwhich = DBX_REGISTER_NUMBER (iwhich);
2170 #endif
2171
2172 return GEN_INT (iwhich);
2173 }
2174
2175 /* Given a value extracted from the return address register or stack slot,
2176 return the actual address encoded in that value. */
2177
2178 rtx
2179 expand_builtin_extract_return_addr (tree addr_tree)
2180 {
2181 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2182
2183 if (GET_MODE (addr) != Pmode
2184 && GET_MODE (addr) != VOIDmode)
2185 {
2186 #ifdef POINTERS_EXTEND_UNSIGNED
2187 addr = convert_memory_address (Pmode, addr);
2188 #else
2189 addr = convert_to_mode (Pmode, addr, 0);
2190 #endif
2191 }
2192
2193 /* First mask out any unwanted bits. */
2194 #ifdef MASK_RETURN_ADDR
2195 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2196 #endif
2197
2198 /* Then adjust to find the real return address. */
2199 #if defined (RETURN_ADDR_OFFSET)
2200 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2201 #endif
2202
2203 return addr;
2204 }
2205
2206 /* Given an actual address in addr_tree, do any necessary encoding
2207 and return the value to be stored in the return address register or
2208 stack slot so the epilogue will return to that address. */
2209
2210 rtx
2211 expand_builtin_frob_return_addr (tree addr_tree)
2212 {
2213 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2214
2215 addr = convert_memory_address (Pmode, addr);
2216
2217 #ifdef RETURN_ADDR_OFFSET
2218 addr = force_reg (Pmode, addr);
2219 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2220 #endif
2221
2222 return addr;
2223 }
2224
2225 /* Set up the epilogue with the magic bits we'll need to return to the
2226 exception handler. */
2227
2228 void
2229 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2230 tree handler_tree)
2231 {
2232 rtx tmp;
2233
2234 #ifdef EH_RETURN_STACKADJ_RTX
2235 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2236 VOIDmode, EXPAND_NORMAL);
2237 tmp = convert_memory_address (Pmode, tmp);
2238 if (!crtl->eh.ehr_stackadj)
2239 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2240 else if (tmp != crtl->eh.ehr_stackadj)
2241 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2242 #endif
2243
2244 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2245 VOIDmode, EXPAND_NORMAL);
2246 tmp = convert_memory_address (Pmode, tmp);
2247 if (!crtl->eh.ehr_handler)
2248 crtl->eh.ehr_handler = copy_to_reg (tmp);
2249 else if (tmp != crtl->eh.ehr_handler)
2250 emit_move_insn (crtl->eh.ehr_handler, tmp);
2251
2252 if (!crtl->eh.ehr_label)
2253 crtl->eh.ehr_label = gen_label_rtx ();
2254 emit_jump (crtl->eh.ehr_label);
2255 }
2256
2257 /* Expand __builtin_eh_return. This exit path from the function loads up
2258 the eh return data registers, adjusts the stack, and branches to a
2259 given PC other than the normal return address. */
2260
2261 void
2262 expand_eh_return (void)
2263 {
2264 rtx_code_label *around_label;
2265
2266 if (! crtl->eh.ehr_label)
2267 return;
2268
2269 crtl->calls_eh_return = 1;
2270
2271 #ifdef EH_RETURN_STACKADJ_RTX
2272 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2273 #endif
2274
2275 around_label = gen_label_rtx ();
2276 emit_jump (around_label);
2277
2278 emit_label (crtl->eh.ehr_label);
2279 clobber_return_register ();
2280
2281 #ifdef EH_RETURN_STACKADJ_RTX
2282 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2283 #endif
2284
2285 #ifdef HAVE_eh_return
2286 if (HAVE_eh_return)
2287 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2288 else
2289 #endif
2290 {
2291 #ifdef EH_RETURN_HANDLER_RTX
2292 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2293 #else
2294 error ("__builtin_eh_return not supported on this target");
2295 #endif
2296 }
2297
2298 emit_label (around_label);
2299 }
2300
2301 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2302 POINTERS_EXTEND_UNSIGNED and return it. */
2303
2304 rtx
2305 expand_builtin_extend_pointer (tree addr_tree)
2306 {
2307 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2308 int extend;
2309
2310 #ifdef POINTERS_EXTEND_UNSIGNED
2311 extend = POINTERS_EXTEND_UNSIGNED;
2312 #else
2313 /* The previous EH code did an unsigned extend by default, so we do this also
2314 for consistency. */
2315 extend = 1;
2316 #endif
2317
2318 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2319 }
2320 \f
2321 static int
2322 add_action_record (action_hash_type *ar_hash, int filter, int next)
2323 {
2324 struct action_record **slot, *new_ar, tmp;
2325
2326 tmp.filter = filter;
2327 tmp.next = next;
2328 slot = ar_hash->find_slot (&tmp, INSERT);
2329
2330 if ((new_ar = *slot) == NULL)
2331 {
2332 new_ar = XNEW (struct action_record);
2333 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2334 new_ar->filter = filter;
2335 new_ar->next = next;
2336 *slot = new_ar;
2337
2338 /* The filter value goes in untouched. The link to the next
2339 record is a "self-relative" byte offset, or zero to indicate
2340 that there is no next record. So convert the absolute 1 based
2341 indices we've been carrying around into a displacement. */
2342
2343 push_sleb128 (&crtl->eh.action_record_data, filter);
2344 if (next)
2345 next -= crtl->eh.action_record_data->length () + 1;
2346 push_sleb128 (&crtl->eh.action_record_data, next);
2347 }
2348
2349 return new_ar->offset;
2350 }
2351
2352 static int
2353 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2354 {
2355 int next;
2356
2357 /* If we've reached the top of the region chain, then we have
2358 no actions, and require no landing pad. */
2359 if (region == NULL)
2360 return -1;
2361
2362 switch (region->type)
2363 {
2364 case ERT_CLEANUP:
2365 {
2366 eh_region r;
2367 /* A cleanup adds a zero filter to the beginning of the chain, but
2368 there are special cases to look out for. If there are *only*
2369 cleanups along a path, then it compresses to a zero action.
2370 Further, if there are multiple cleanups along a path, we only
2371 need to represent one of them, as that is enough to trigger
2372 entry to the landing pad at runtime. */
2373 next = collect_one_action_chain (ar_hash, region->outer);
2374 if (next <= 0)
2375 return 0;
2376 for (r = region->outer; r ; r = r->outer)
2377 if (r->type == ERT_CLEANUP)
2378 return next;
2379 return add_action_record (ar_hash, 0, next);
2380 }
2381
2382 case ERT_TRY:
2383 {
2384 eh_catch c;
2385
2386 /* Process the associated catch regions in reverse order.
2387 If there's a catch-all handler, then we don't need to
2388 search outer regions. Use a magic -3 value to record
2389 that we haven't done the outer search. */
2390 next = -3;
2391 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2392 {
2393 if (c->type_list == NULL)
2394 {
2395 /* Retrieve the filter from the head of the filter list
2396 where we have stored it (see assign_filter_values). */
2397 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2398 next = add_action_record (ar_hash, filter, 0);
2399 }
2400 else
2401 {
2402 /* Once the outer search is done, trigger an action record for
2403 each filter we have. */
2404 tree flt_node;
2405
2406 if (next == -3)
2407 {
2408 next = collect_one_action_chain (ar_hash, region->outer);
2409
2410 /* If there is no next action, terminate the chain. */
2411 if (next == -1)
2412 next = 0;
2413 /* If all outer actions are cleanups or must_not_throw,
2414 we'll have no action record for it, since we had wanted
2415 to encode these states in the call-site record directly.
2416 Add a cleanup action to the chain to catch these. */
2417 else if (next <= 0)
2418 next = add_action_record (ar_hash, 0, 0);
2419 }
2420
2421 flt_node = c->filter_list;
2422 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2423 {
2424 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2425 next = add_action_record (ar_hash, filter, next);
2426 }
2427 }
2428 }
2429 return next;
2430 }
2431
2432 case ERT_ALLOWED_EXCEPTIONS:
2433 /* An exception specification adds its filter to the
2434 beginning of the chain. */
2435 next = collect_one_action_chain (ar_hash, region->outer);
2436
2437 /* If there is no next action, terminate the chain. */
2438 if (next == -1)
2439 next = 0;
2440 /* If all outer actions are cleanups or must_not_throw,
2441 we'll have no action record for it, since we had wanted
2442 to encode these states in the call-site record directly.
2443 Add a cleanup action to the chain to catch these. */
2444 else if (next <= 0)
2445 next = add_action_record (ar_hash, 0, 0);
2446
2447 return add_action_record (ar_hash, region->u.allowed.filter, next);
2448
2449 case ERT_MUST_NOT_THROW:
2450 /* A must-not-throw region with no inner handlers or cleanups
2451 requires no call-site entry. Note that this differs from
2452 the no handler or cleanup case in that we do require an lsda
2453 to be generated. Return a magic -2 value to record this. */
2454 return -2;
2455 }
2456
2457 gcc_unreachable ();
2458 }
2459
2460 static int
2461 add_call_site (rtx landing_pad, int action, int section)
2462 {
2463 call_site_record record;
2464
2465 record = ggc_alloc<call_site_record_d> ();
2466 record->landing_pad = landing_pad;
2467 record->action = action;
2468
2469 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2470
2471 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2472 }
2473
2474 static rtx_note *
2475 emit_note_eh_region_end (rtx_insn *insn)
2476 {
2477 rtx_insn *next = NEXT_INSN (insn);
2478
2479 /* Make sure we do not split a call and its corresponding
2480 CALL_ARG_LOCATION note. */
2481 if (next && NOTE_P (next)
2482 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2483 insn = next;
2484
2485 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2486 }
2487
2488 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2489 The new note numbers will not refer to region numbers, but
2490 instead to call site entries. */
2491
2492 static unsigned int
2493 convert_to_eh_region_ranges (void)
2494 {
2495 rtx insn;
2496 rtx_insn *iter;
2497 rtx_note *note;
2498 action_hash_type ar_hash (31);
2499 int last_action = -3;
2500 rtx_insn *last_action_insn = NULL;
2501 rtx last_landing_pad = NULL_RTX;
2502 rtx_insn *first_no_action_insn = NULL;
2503 int call_site = 0;
2504 int cur_sec = 0;
2505 rtx section_switch_note = NULL_RTX;
2506 rtx_insn *first_no_action_insn_before_switch = NULL;
2507 rtx_insn *last_no_action_insn_before_switch = NULL;
2508 int saved_call_site_base = call_site_base;
2509
2510 vec_alloc (crtl->eh.action_record_data, 64);
2511
2512 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2513 if (INSN_P (iter))
2514 {
2515 eh_landing_pad lp;
2516 eh_region region;
2517 bool nothrow;
2518 int this_action;
2519 rtx this_landing_pad;
2520
2521 insn = iter;
2522 if (NONJUMP_INSN_P (insn)
2523 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2524 insn = XVECEXP (PATTERN (insn), 0, 0);
2525
2526 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2527 if (nothrow)
2528 continue;
2529 if (region)
2530 this_action = collect_one_action_chain (&ar_hash, region);
2531 else
2532 this_action = -1;
2533
2534 /* Existence of catch handlers, or must-not-throw regions
2535 implies that an lsda is needed (even if empty). */
2536 if (this_action != -1)
2537 crtl->uses_eh_lsda = 1;
2538
2539 /* Delay creation of region notes for no-action regions
2540 until we're sure that an lsda will be required. */
2541 else if (last_action == -3)
2542 {
2543 first_no_action_insn = iter;
2544 last_action = -1;
2545 }
2546
2547 if (this_action >= 0)
2548 this_landing_pad = lp->landing_pad;
2549 else
2550 this_landing_pad = NULL_RTX;
2551
2552 /* Differing actions or landing pads implies a change in call-site
2553 info, which implies some EH_REGION note should be emitted. */
2554 if (last_action != this_action
2555 || last_landing_pad != this_landing_pad)
2556 {
2557 /* If there is a queued no-action region in the other section
2558 with hot/cold partitioning, emit it now. */
2559 if (first_no_action_insn_before_switch)
2560 {
2561 gcc_assert (this_action != -1
2562 && last_action == (first_no_action_insn
2563 ? -1 : -3));
2564 call_site = add_call_site (NULL_RTX, 0, 0);
2565 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2566 first_no_action_insn_before_switch);
2567 NOTE_EH_HANDLER (note) = call_site;
2568 note
2569 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2570 NOTE_EH_HANDLER (note) = call_site;
2571 gcc_assert (last_action != -3
2572 || (last_action_insn
2573 == last_no_action_insn_before_switch));
2574 first_no_action_insn_before_switch = NULL;
2575 last_no_action_insn_before_switch = NULL;
2576 call_site_base++;
2577 }
2578 /* If we'd not seen a previous action (-3) or the previous
2579 action was must-not-throw (-2), then we do not need an
2580 end note. */
2581 if (last_action >= -1)
2582 {
2583 /* If we delayed the creation of the begin, do it now. */
2584 if (first_no_action_insn)
2585 {
2586 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2587 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2588 first_no_action_insn);
2589 NOTE_EH_HANDLER (note) = call_site;
2590 first_no_action_insn = NULL;
2591 }
2592
2593 note = emit_note_eh_region_end (last_action_insn);
2594 NOTE_EH_HANDLER (note) = call_site;
2595 }
2596
2597 /* If the new action is must-not-throw, then no region notes
2598 are created. */
2599 if (this_action >= -1)
2600 {
2601 call_site = add_call_site (this_landing_pad,
2602 this_action < 0 ? 0 : this_action,
2603 cur_sec);
2604 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2605 NOTE_EH_HANDLER (note) = call_site;
2606 }
2607
2608 last_action = this_action;
2609 last_landing_pad = this_landing_pad;
2610 }
2611 last_action_insn = iter;
2612 }
2613 else if (NOTE_P (iter)
2614 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2615 {
2616 gcc_assert (section_switch_note == NULL_RTX);
2617 gcc_assert (flag_reorder_blocks_and_partition);
2618 section_switch_note = iter;
2619 if (first_no_action_insn)
2620 {
2621 first_no_action_insn_before_switch = first_no_action_insn;
2622 last_no_action_insn_before_switch = last_action_insn;
2623 first_no_action_insn = NULL;
2624 gcc_assert (last_action == -1);
2625 last_action = -3;
2626 }
2627 /* Force closing of current EH region before section switch and
2628 opening a new one afterwards. */
2629 else if (last_action != -3)
2630 last_landing_pad = pc_rtx;
2631 if (crtl->eh.call_site_record_v[cur_sec])
2632 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2633 cur_sec++;
2634 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2635 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2636 }
2637
2638 if (last_action >= -1 && ! first_no_action_insn)
2639 {
2640 note = emit_note_eh_region_end (last_action_insn);
2641 NOTE_EH_HANDLER (note) = call_site;
2642 }
2643
2644 call_site_base = saved_call_site_base;
2645
2646 return 0;
2647 }
2648
2649 namespace {
2650
2651 const pass_data pass_data_convert_to_eh_region_ranges =
2652 {
2653 RTL_PASS, /* type */
2654 "eh_ranges", /* name */
2655 OPTGROUP_NONE, /* optinfo_flags */
2656 TV_NONE, /* tv_id */
2657 0, /* properties_required */
2658 0, /* properties_provided */
2659 0, /* properties_destroyed */
2660 0, /* todo_flags_start */
2661 0, /* todo_flags_finish */
2662 };
2663
2664 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2665 {
2666 public:
2667 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2668 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2669 {}
2670
2671 /* opt_pass methods: */
2672 virtual bool gate (function *);
2673 virtual unsigned int execute (function *)
2674 {
2675 return convert_to_eh_region_ranges ();
2676 }
2677
2678 }; // class pass_convert_to_eh_region_ranges
2679
2680 bool
2681 pass_convert_to_eh_region_ranges::gate (function *)
2682 {
2683 /* Nothing to do for SJLJ exceptions or if no regions created. */
2684 if (cfun->eh->region_tree == NULL)
2685 return false;
2686 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2687 return false;
2688 return true;
2689 }
2690
2691 } // anon namespace
2692
2693 rtl_opt_pass *
2694 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2695 {
2696 return new pass_convert_to_eh_region_ranges (ctxt);
2697 }
2698 \f
2699 static void
2700 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2701 {
2702 do
2703 {
2704 unsigned char byte = value & 0x7f;
2705 value >>= 7;
2706 if (value)
2707 byte |= 0x80;
2708 vec_safe_push (*data_area, byte);
2709 }
2710 while (value);
2711 }
2712
2713 static void
2714 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2715 {
2716 unsigned char byte;
2717 int more;
2718
2719 do
2720 {
2721 byte = value & 0x7f;
2722 value >>= 7;
2723 more = ! ((value == 0 && (byte & 0x40) == 0)
2724 || (value == -1 && (byte & 0x40) != 0));
2725 if (more)
2726 byte |= 0x80;
2727 vec_safe_push (*data_area, byte);
2728 }
2729 while (more);
2730 }
2731
2732 \f
2733 #ifndef HAVE_AS_LEB128
2734 static int
2735 dw2_size_of_call_site_table (int section)
2736 {
2737 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2738 int size = n * (4 + 4 + 4);
2739 int i;
2740
2741 for (i = 0; i < n; ++i)
2742 {
2743 struct call_site_record_d *cs =
2744 (*crtl->eh.call_site_record_v[section])[i];
2745 size += size_of_uleb128 (cs->action);
2746 }
2747
2748 return size;
2749 }
2750
2751 static int
2752 sjlj_size_of_call_site_table (void)
2753 {
2754 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2755 int size = 0;
2756 int i;
2757
2758 for (i = 0; i < n; ++i)
2759 {
2760 struct call_site_record_d *cs =
2761 (*crtl->eh.call_site_record_v[0])[i];
2762 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2763 size += size_of_uleb128 (cs->action);
2764 }
2765
2766 return size;
2767 }
2768 #endif
2769
2770 static void
2771 dw2_output_call_site_table (int cs_format, int section)
2772 {
2773 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2774 int i;
2775 const char *begin;
2776
2777 if (section == 0)
2778 begin = current_function_func_begin_label;
2779 else if (first_function_block_is_cold)
2780 begin = crtl->subsections.hot_section_label;
2781 else
2782 begin = crtl->subsections.cold_section_label;
2783
2784 for (i = 0; i < n; ++i)
2785 {
2786 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2787 char reg_start_lab[32];
2788 char reg_end_lab[32];
2789 char landing_pad_lab[32];
2790
2791 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2792 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2793
2794 if (cs->landing_pad)
2795 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2796 CODE_LABEL_NUMBER (cs->landing_pad));
2797
2798 /* ??? Perhaps use insn length scaling if the assembler supports
2799 generic arithmetic. */
2800 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2801 data4 if the function is small enough. */
2802 if (cs_format == DW_EH_PE_uleb128)
2803 {
2804 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2805 "region %d start", i);
2806 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2807 "length");
2808 if (cs->landing_pad)
2809 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2810 "landing pad");
2811 else
2812 dw2_asm_output_data_uleb128 (0, "landing pad");
2813 }
2814 else
2815 {
2816 dw2_asm_output_delta (4, reg_start_lab, begin,
2817 "region %d start", i);
2818 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2819 if (cs->landing_pad)
2820 dw2_asm_output_delta (4, landing_pad_lab, begin,
2821 "landing pad");
2822 else
2823 dw2_asm_output_data (4, 0, "landing pad");
2824 }
2825 dw2_asm_output_data_uleb128 (cs->action, "action");
2826 }
2827
2828 call_site_base += n;
2829 }
2830
2831 static void
2832 sjlj_output_call_site_table (void)
2833 {
2834 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2835 int i;
2836
2837 for (i = 0; i < n; ++i)
2838 {
2839 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2840
2841 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2842 "region %d landing pad", i);
2843 dw2_asm_output_data_uleb128 (cs->action, "action");
2844 }
2845
2846 call_site_base += n;
2847 }
2848
2849 /* Switch to the section that should be used for exception tables. */
2850
2851 static void
2852 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2853 {
2854 section *s;
2855
2856 if (exception_section)
2857 s = exception_section;
2858 else
2859 {
2860 /* Compute the section and cache it into exception_section,
2861 unless it depends on the function name. */
2862 if (targetm_common.have_named_sections)
2863 {
2864 int flags;
2865
2866 if (EH_TABLES_CAN_BE_READ_ONLY)
2867 {
2868 int tt_format =
2869 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2870 flags = ((! flag_pic
2871 || ((tt_format & 0x70) != DW_EH_PE_absptr
2872 && (tt_format & 0x70) != DW_EH_PE_aligned))
2873 ? 0 : SECTION_WRITE);
2874 }
2875 else
2876 flags = SECTION_WRITE;
2877
2878 #ifdef HAVE_LD_EH_GC_SECTIONS
2879 if (flag_function_sections
2880 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2881 {
2882 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2883 /* The EH table must match the code section, so only mark
2884 it linkonce if we have COMDAT groups to tie them together. */
2885 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2886 flags |= SECTION_LINKONCE;
2887 sprintf (section_name, ".gcc_except_table.%s", fnname);
2888 s = get_section (section_name, flags, current_function_decl);
2889 free (section_name);
2890 }
2891 else
2892 #endif
2893 exception_section
2894 = s = get_section (".gcc_except_table", flags, NULL);
2895 }
2896 else
2897 exception_section
2898 = s = flag_pic ? data_section : readonly_data_section;
2899 }
2900
2901 switch_to_section (s);
2902 }
2903
2904
2905 /* Output a reference from an exception table to the type_info object TYPE.
2906 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2907 the value. */
2908
2909 static void
2910 output_ttype (tree type, int tt_format, int tt_format_size)
2911 {
2912 rtx value;
2913 bool is_public = true;
2914
2915 if (type == NULL_TREE)
2916 value = const0_rtx;
2917 else
2918 {
2919 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2920 runtime types so TYPE should already be a runtime type
2921 reference. When pass_ipa_free_lang data is made a default
2922 pass, we can then remove the call to lookup_type_for_runtime
2923 below. */
2924 if (TYPE_P (type))
2925 type = lookup_type_for_runtime (type);
2926
2927 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2928
2929 /* Let cgraph know that the rtti decl is used. Not all of the
2930 paths below go through assemble_integer, which would take
2931 care of this for us. */
2932 STRIP_NOPS (type);
2933 if (TREE_CODE (type) == ADDR_EXPR)
2934 {
2935 type = TREE_OPERAND (type, 0);
2936 if (TREE_CODE (type) == VAR_DECL)
2937 is_public = TREE_PUBLIC (type);
2938 }
2939 else
2940 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2941 }
2942
2943 /* Allow the target to override the type table entry format. */
2944 if (targetm.asm_out.ttype (value))
2945 return;
2946
2947 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2948 assemble_integer (value, tt_format_size,
2949 tt_format_size * BITS_PER_UNIT, 1);
2950 else
2951 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2952 }
2953
2954 static void
2955 output_one_function_exception_table (int section)
2956 {
2957 int tt_format, cs_format, lp_format, i;
2958 #ifdef HAVE_AS_LEB128
2959 char ttype_label[32];
2960 char cs_after_size_label[32];
2961 char cs_end_label[32];
2962 #else
2963 int call_site_len;
2964 #endif
2965 int have_tt_data;
2966 int tt_format_size = 0;
2967
2968 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2969 || (targetm.arm_eabi_unwinder
2970 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2971 : vec_safe_length (cfun->eh->ehspec_data.other)));
2972
2973 /* Indicate the format of the @TType entries. */
2974 if (! have_tt_data)
2975 tt_format = DW_EH_PE_omit;
2976 else
2977 {
2978 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2979 #ifdef HAVE_AS_LEB128
2980 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2981 section ? "LLSDATTC" : "LLSDATT",
2982 current_function_funcdef_no);
2983 #endif
2984 tt_format_size = size_of_encoded_value (tt_format);
2985
2986 assemble_align (tt_format_size * BITS_PER_UNIT);
2987 }
2988
2989 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2990 current_function_funcdef_no);
2991
2992 /* The LSDA header. */
2993
2994 /* Indicate the format of the landing pad start pointer. An omitted
2995 field implies @LPStart == @Start. */
2996 /* Currently we always put @LPStart == @Start. This field would
2997 be most useful in moving the landing pads completely out of
2998 line to another section, but it could also be used to minimize
2999 the size of uleb128 landing pad offsets. */
3000 lp_format = DW_EH_PE_omit;
3001 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3002 eh_data_format_name (lp_format));
3003
3004 /* @LPStart pointer would go here. */
3005
3006 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3007 eh_data_format_name (tt_format));
3008
3009 #ifndef HAVE_AS_LEB128
3010 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3011 call_site_len = sjlj_size_of_call_site_table ();
3012 else
3013 call_site_len = dw2_size_of_call_site_table (section);
3014 #endif
3015
3016 /* A pc-relative 4-byte displacement to the @TType data. */
3017 if (have_tt_data)
3018 {
3019 #ifdef HAVE_AS_LEB128
3020 char ttype_after_disp_label[32];
3021 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3022 section ? "LLSDATTDC" : "LLSDATTD",
3023 current_function_funcdef_no);
3024 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3025 "@TType base offset");
3026 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3027 #else
3028 /* Ug. Alignment queers things. */
3029 unsigned int before_disp, after_disp, last_disp, disp;
3030
3031 before_disp = 1 + 1;
3032 after_disp = (1 + size_of_uleb128 (call_site_len)
3033 + call_site_len
3034 + vec_safe_length (crtl->eh.action_record_data)
3035 + (vec_safe_length (cfun->eh->ttype_data)
3036 * tt_format_size));
3037
3038 disp = after_disp;
3039 do
3040 {
3041 unsigned int disp_size, pad;
3042
3043 last_disp = disp;
3044 disp_size = size_of_uleb128 (disp);
3045 pad = before_disp + disp_size + after_disp;
3046 if (pad % tt_format_size)
3047 pad = tt_format_size - (pad % tt_format_size);
3048 else
3049 pad = 0;
3050 disp = after_disp + pad;
3051 }
3052 while (disp != last_disp);
3053
3054 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3055 #endif
3056 }
3057
3058 /* Indicate the format of the call-site offsets. */
3059 #ifdef HAVE_AS_LEB128
3060 cs_format = DW_EH_PE_uleb128;
3061 #else
3062 cs_format = DW_EH_PE_udata4;
3063 #endif
3064 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3065 eh_data_format_name (cs_format));
3066
3067 #ifdef HAVE_AS_LEB128
3068 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3069 section ? "LLSDACSBC" : "LLSDACSB",
3070 current_function_funcdef_no);
3071 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3072 section ? "LLSDACSEC" : "LLSDACSE",
3073 current_function_funcdef_no);
3074 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3075 "Call-site table length");
3076 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3077 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3078 sjlj_output_call_site_table ();
3079 else
3080 dw2_output_call_site_table (cs_format, section);
3081 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3082 #else
3083 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3084 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3085 sjlj_output_call_site_table ();
3086 else
3087 dw2_output_call_site_table (cs_format, section);
3088 #endif
3089
3090 /* ??? Decode and interpret the data for flag_debug_asm. */
3091 {
3092 uchar uc;
3093 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3094 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3095 }
3096
3097 if (have_tt_data)
3098 assemble_align (tt_format_size * BITS_PER_UNIT);
3099
3100 i = vec_safe_length (cfun->eh->ttype_data);
3101 while (i-- > 0)
3102 {
3103 tree type = (*cfun->eh->ttype_data)[i];
3104 output_ttype (type, tt_format, tt_format_size);
3105 }
3106
3107 #ifdef HAVE_AS_LEB128
3108 if (have_tt_data)
3109 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3110 #endif
3111
3112 /* ??? Decode and interpret the data for flag_debug_asm. */
3113 if (targetm.arm_eabi_unwinder)
3114 {
3115 tree type;
3116 for (i = 0;
3117 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3118 output_ttype (type, tt_format, tt_format_size);
3119 }
3120 else
3121 {
3122 uchar uc;
3123 for (i = 0;
3124 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3125 dw2_asm_output_data (1, uc,
3126 i ? NULL : "Exception specification table");
3127 }
3128 }
3129
3130 void
3131 output_function_exception_table (const char *fnname)
3132 {
3133 rtx personality = get_personality_function (current_function_decl);
3134
3135 /* Not all functions need anything. */
3136 if (! crtl->uses_eh_lsda)
3137 return;
3138
3139 if (personality)
3140 {
3141 assemble_external_libcall (personality);
3142
3143 if (targetm.asm_out.emit_except_personality)
3144 targetm.asm_out.emit_except_personality (personality);
3145 }
3146
3147 switch_to_exception_section (fnname);
3148
3149 /* If the target wants a label to begin the table, emit it here. */
3150 targetm.asm_out.emit_except_table_label (asm_out_file);
3151
3152 output_one_function_exception_table (0);
3153 if (crtl->eh.call_site_record_v[1])
3154 output_one_function_exception_table (1);
3155
3156 switch_to_section (current_function_section ());
3157 }
3158
3159 void
3160 set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
3161 {
3162 fun->eh->throw_stmt_table = table;
3163 }
3164
3165 hash_map<gimple, int> *
3166 get_eh_throw_stmt_table (struct function *fun)
3167 {
3168 return fun->eh->throw_stmt_table;
3169 }
3170 \f
3171 /* Determine if the function needs an EH personality function. */
3172
3173 enum eh_personality_kind
3174 function_needs_eh_personality (struct function *fn)
3175 {
3176 enum eh_personality_kind kind = eh_personality_none;
3177 eh_region i;
3178
3179 FOR_ALL_EH_REGION_FN (i, fn)
3180 {
3181 switch (i->type)
3182 {
3183 case ERT_CLEANUP:
3184 /* Can do with any personality including the generic C one. */
3185 kind = eh_personality_any;
3186 break;
3187
3188 case ERT_TRY:
3189 case ERT_ALLOWED_EXCEPTIONS:
3190 /* Always needs a EH personality function. The generic C
3191 personality doesn't handle these even for empty type lists. */
3192 return eh_personality_lang;
3193
3194 case ERT_MUST_NOT_THROW:
3195 /* Always needs a EH personality function. The language may specify
3196 what abort routine that must be used, e.g. std::terminate. */
3197 return eh_personality_lang;
3198 }
3199 }
3200
3201 return kind;
3202 }
3203 \f
3204 /* Dump EH information to OUT. */
3205
3206 void
3207 dump_eh_tree (FILE * out, struct function *fun)
3208 {
3209 eh_region i;
3210 int depth = 0;
3211 static const char *const type_name[] = {
3212 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3213 };
3214
3215 i = fun->eh->region_tree;
3216 if (!i)
3217 return;
3218
3219 fprintf (out, "Eh tree:\n");
3220 while (1)
3221 {
3222 fprintf (out, " %*s %i %s", depth * 2, "",
3223 i->index, type_name[(int) i->type]);
3224
3225 if (i->landing_pads)
3226 {
3227 eh_landing_pad lp;
3228
3229 fprintf (out, " land:");
3230 if (current_ir_type () == IR_GIMPLE)
3231 {
3232 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3233 {
3234 fprintf (out, "{%i,", lp->index);
3235 print_generic_expr (out, lp->post_landing_pad, 0);
3236 fputc ('}', out);
3237 if (lp->next_lp)
3238 fputc (',', out);
3239 }
3240 }
3241 else
3242 {
3243 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3244 {
3245 fprintf (out, "{%i,", lp->index);
3246 if (lp->landing_pad)
3247 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3248 NOTE_P (lp->landing_pad) ? "(del)" : "");
3249 else
3250 fprintf (out, "(nil),");
3251 if (lp->post_landing_pad)
3252 {
3253 rtx lab = label_rtx (lp->post_landing_pad);
3254 fprintf (out, "%i%s}", INSN_UID (lab),
3255 NOTE_P (lab) ? "(del)" : "");
3256 }
3257 else
3258 fprintf (out, "(nil)}");
3259 if (lp->next_lp)
3260 fputc (',', out);
3261 }
3262 }
3263 }
3264
3265 switch (i->type)
3266 {
3267 case ERT_CLEANUP:
3268 case ERT_MUST_NOT_THROW:
3269 break;
3270
3271 case ERT_TRY:
3272 {
3273 eh_catch c;
3274 fprintf (out, " catch:");
3275 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3276 {
3277 fputc ('{', out);
3278 if (c->label)
3279 {
3280 fprintf (out, "lab:");
3281 print_generic_expr (out, c->label, 0);
3282 fputc (';', out);
3283 }
3284 print_generic_expr (out, c->type_list, 0);
3285 fputc ('}', out);
3286 if (c->next_catch)
3287 fputc (',', out);
3288 }
3289 }
3290 break;
3291
3292 case ERT_ALLOWED_EXCEPTIONS:
3293 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3294 print_generic_expr (out, i->u.allowed.type_list, 0);
3295 break;
3296 }
3297 fputc ('\n', out);
3298
3299 /* If there are sub-regions, process them. */
3300 if (i->inner)
3301 i = i->inner, depth++;
3302 /* If there are peers, process them. */
3303 else if (i->next_peer)
3304 i = i->next_peer;
3305 /* Otherwise, step back up the tree to the next peer. */
3306 else
3307 {
3308 do
3309 {
3310 i = i->outer;
3311 depth--;
3312 if (i == NULL)
3313 return;
3314 }
3315 while (i->next_peer == NULL);
3316 i = i->next_peer;
3317 }
3318 }
3319 }
3320
3321 /* Dump the EH tree for FN on stderr. */
3322
3323 DEBUG_FUNCTION void
3324 debug_eh_tree (struct function *fn)
3325 {
3326 dump_eh_tree (stderr, fn);
3327 }
3328
3329 /* Verify invariants on EH datastructures. */
3330
3331 DEBUG_FUNCTION void
3332 verify_eh_tree (struct function *fun)
3333 {
3334 eh_region r, outer;
3335 int nvisited_lp, nvisited_r;
3336 int count_lp, count_r, depth, i;
3337 eh_landing_pad lp;
3338 bool err = false;
3339
3340 if (!fun->eh->region_tree)
3341 return;
3342
3343 count_r = 0;
3344 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3345 if (r)
3346 {
3347 if (r->index == i)
3348 count_r++;
3349 else
3350 {
3351 error ("region_array is corrupted for region %i", r->index);
3352 err = true;
3353 }
3354 }
3355
3356 count_lp = 0;
3357 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3358 if (lp)
3359 {
3360 if (lp->index == i)
3361 count_lp++;
3362 else
3363 {
3364 error ("lp_array is corrupted for lp %i", lp->index);
3365 err = true;
3366 }
3367 }
3368
3369 depth = nvisited_lp = nvisited_r = 0;
3370 outer = NULL;
3371 r = fun->eh->region_tree;
3372 while (1)
3373 {
3374 if ((*fun->eh->region_array)[r->index] != r)
3375 {
3376 error ("region_array is corrupted for region %i", r->index);
3377 err = true;
3378 }
3379 if (r->outer != outer)
3380 {
3381 error ("outer block of region %i is wrong", r->index);
3382 err = true;
3383 }
3384 if (depth < 0)
3385 {
3386 error ("negative nesting depth of region %i", r->index);
3387 err = true;
3388 }
3389 nvisited_r++;
3390
3391 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3392 {
3393 if ((*fun->eh->lp_array)[lp->index] != lp)
3394 {
3395 error ("lp_array is corrupted for lp %i", lp->index);
3396 err = true;
3397 }
3398 if (lp->region != r)
3399 {
3400 error ("region of lp %i is wrong", lp->index);
3401 err = true;
3402 }
3403 nvisited_lp++;
3404 }
3405
3406 if (r->inner)
3407 outer = r, r = r->inner, depth++;
3408 else if (r->next_peer)
3409 r = r->next_peer;
3410 else
3411 {
3412 do
3413 {
3414 r = r->outer;
3415 if (r == NULL)
3416 goto region_done;
3417 depth--;
3418 outer = r->outer;
3419 }
3420 while (r->next_peer == NULL);
3421 r = r->next_peer;
3422 }
3423 }
3424 region_done:
3425 if (depth != 0)
3426 {
3427 error ("tree list ends on depth %i", depth);
3428 err = true;
3429 }
3430 if (count_r != nvisited_r)
3431 {
3432 error ("region_array does not match region_tree");
3433 err = true;
3434 }
3435 if (count_lp != nvisited_lp)
3436 {
3437 error ("lp_array does not match region_tree");
3438 err = true;
3439 }
3440
3441 if (err)
3442 {
3443 dump_eh_tree (stderr, fun);
3444 internal_error ("verify_eh_tree failed");
3445 }
3446 }
3447 \f
3448 #include "gt-except.h"