]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/except.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / except.c
1 /* Implements exception handling.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 /* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered
34 to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35 The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36 into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112 #include "config.h"
113 #include "system.h"
114 #include "coretypes.h"
115 #include "backend.h"
116 #include "rtl.h"
117 #include "alias.h"
118 #include "tree.h"
119 #include "fold-const.h"
120 #include "stringpool.h"
121 #include "stor-layout.h"
122 #include "flags.h"
123 #include "insn-codes.h"
124 #include "optabs.h"
125 #include "insn-config.h"
126 #include "expmed.h"
127 #include "dojump.h"
128 #include "explow.h"
129 #include "calls.h"
130 #include "emit-rtl.h"
131 #include "varasm.h"
132 #include "stmt.h"
133 #include "expr.h"
134 #include "libfuncs.h"
135 #include "except.h"
136 #include "output.h"
137 #include "dwarf2asm.h"
138 #include "dwarf2out.h"
139 #include "dwarf2.h"
140 #include "toplev.h"
141 #include "intl.h"
142 #include "tm_p.h"
143 #include "target.h"
144 #include "common/common-target.h"
145 #include "langhooks.h"
146 #include "cfgrtl.h"
147 #include "cgraph.h"
148 #include "diagnostic.h"
149 #include "tree-pretty-print.h"
150 #include "tree-pass.h"
151 #include "cfgloop.h"
152 #include "builtins.h"
153 #include "tree-hash-traits.h"
154
155 static GTY(()) int call_site_base;
156
157 static GTY (()) hash_map<tree_hash, tree> *type_to_runtime_map;
158
159 /* Describe the SjLj_Function_Context structure. */
160 static GTY(()) tree sjlj_fc_type_node;
161 static int sjlj_fc_call_site_ofs;
162 static int sjlj_fc_data_ofs;
163 static int sjlj_fc_personality_ofs;
164 static int sjlj_fc_lsda_ofs;
165 static int sjlj_fc_jbuf_ofs;
166 \f
167
168 struct GTY(()) call_site_record_d
169 {
170 rtx landing_pad;
171 int action;
172 };
173
174 /* In the following structure and associated functions,
175 we represent entries in the action table as 1-based indices.
176 Special cases are:
177
178 0: null action record, non-null landing pad; implies cleanups
179 -1: null action record, null landing pad; implies no action
180 -2: no call-site entry; implies must_not_throw
181 -3: we have yet to process outer regions
182
183 Further, no special cases apply to the "next" field of the record.
184 For next, 0 means end of list. */
185
186 struct action_record
187 {
188 int offset;
189 int filter;
190 int next;
191 };
192
193 /* Hashtable helpers. */
194
195 struct action_record_hasher : free_ptr_hash <action_record>
196 {
197 static inline hashval_t hash (const action_record *);
198 static inline bool equal (const action_record *, const action_record *);
199 };
200
201 inline hashval_t
202 action_record_hasher::hash (const action_record *entry)
203 {
204 return entry->next * 1009 + entry->filter;
205 }
206
207 inline bool
208 action_record_hasher::equal (const action_record *entry,
209 const action_record *data)
210 {
211 return entry->filter == data->filter && entry->next == data->next;
212 }
213
214 typedef hash_table<action_record_hasher> action_hash_type;
215 \f
216 static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
217 eh_landing_pad *);
218
219 static void dw2_build_landing_pads (void);
220
221 static int collect_one_action_chain (action_hash_type *, eh_region);
222 static int add_call_site (rtx, int, int);
223
224 static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
225 static void push_sleb128 (vec<uchar, va_gc> **, int);
226 #ifndef HAVE_AS_LEB128
227 static int dw2_size_of_call_site_table (int);
228 static int sjlj_size_of_call_site_table (void);
229 #endif
230 static void dw2_output_call_site_table (int, int);
231 static void sjlj_output_call_site_table (void);
232
233 \f
234 void
235 init_eh (void)
236 {
237 if (! flag_exceptions)
238 return;
239
240 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
241
242 /* Create the SjLj_Function_Context structure. This should match
243 the definition in unwind-sjlj.c. */
244 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
245 {
246 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
247
248 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
249
250 f_prev = build_decl (BUILTINS_LOCATION,
251 FIELD_DECL, get_identifier ("__prev"),
252 build_pointer_type (sjlj_fc_type_node));
253 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
254
255 f_cs = build_decl (BUILTINS_LOCATION,
256 FIELD_DECL, get_identifier ("__call_site"),
257 integer_type_node);
258 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
259
260 tmp = build_index_type (size_int (4 - 1));
261 tmp = build_array_type (lang_hooks.types.type_for_mode
262 (targetm.unwind_word_mode (), 1),
263 tmp);
264 f_data = build_decl (BUILTINS_LOCATION,
265 FIELD_DECL, get_identifier ("__data"), tmp);
266 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
267
268 f_per = build_decl (BUILTINS_LOCATION,
269 FIELD_DECL, get_identifier ("__personality"),
270 ptr_type_node);
271 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
272
273 f_lsda = build_decl (BUILTINS_LOCATION,
274 FIELD_DECL, get_identifier ("__lsda"),
275 ptr_type_node);
276 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
277
278 #ifdef DONT_USE_BUILTIN_SETJMP
279 #ifdef JMP_BUF_SIZE
280 tmp = size_int (JMP_BUF_SIZE - 1);
281 #else
282 /* Should be large enough for most systems, if it is not,
283 JMP_BUF_SIZE should be defined with the proper value. It will
284 also tend to be larger than necessary for most systems, a more
285 optimal port will define JMP_BUF_SIZE. */
286 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
287 #endif
288 #else
289 /* Compute a minimally sized jump buffer. We need room to store at
290 least 3 pointers - stack pointer, frame pointer and return address.
291 Plus for some targets we need room for an extra pointer - in the
292 case of MIPS this is the global pointer. This makes a total of four
293 pointers, but to be safe we actually allocate room for 5.
294
295 If pointers are smaller than words then we allocate enough room for
296 5 words, just in case the backend needs this much room. For more
297 discussion on this issue see:
298 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
299 if (POINTER_SIZE > BITS_PER_WORD)
300 tmp = size_int (5 - 1);
301 else
302 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
303 #endif
304
305 tmp = build_index_type (tmp);
306 tmp = build_array_type (ptr_type_node, tmp);
307 f_jbuf = build_decl (BUILTINS_LOCATION,
308 FIELD_DECL, get_identifier ("__jbuf"), tmp);
309 #ifdef DONT_USE_BUILTIN_SETJMP
310 /* We don't know what the alignment requirements of the
311 runtime's jmp_buf has. Overestimate. */
312 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
313 DECL_USER_ALIGN (f_jbuf) = 1;
314 #endif
315 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
316
317 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
318 TREE_CHAIN (f_prev) = f_cs;
319 TREE_CHAIN (f_cs) = f_data;
320 TREE_CHAIN (f_data) = f_per;
321 TREE_CHAIN (f_per) = f_lsda;
322 TREE_CHAIN (f_lsda) = f_jbuf;
323
324 layout_type (sjlj_fc_type_node);
325
326 /* Cache the interesting field offsets so that we have
327 easy access from rtl. */
328 sjlj_fc_call_site_ofs
329 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
330 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
331 sjlj_fc_data_ofs
332 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
333 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
334 sjlj_fc_personality_ofs
335 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
336 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
337 sjlj_fc_lsda_ofs
338 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
339 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
340 sjlj_fc_jbuf_ofs
341 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
342 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
343 }
344 }
345
346 void
347 init_eh_for_function (void)
348 {
349 cfun->eh = ggc_cleared_alloc<eh_status> ();
350
351 /* Make sure zero'th entries are used. */
352 vec_safe_push (cfun->eh->region_array, (eh_region)0);
353 vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
354 }
355 \f
356 /* Routines to generate the exception tree somewhat directly.
357 These are used from tree-eh.c when processing exception related
358 nodes during tree optimization. */
359
360 static eh_region
361 gen_eh_region (enum eh_region_type type, eh_region outer)
362 {
363 eh_region new_eh;
364
365 /* Insert a new blank region as a leaf in the tree. */
366 new_eh = ggc_cleared_alloc<eh_region_d> ();
367 new_eh->type = type;
368 new_eh->outer = outer;
369 if (outer)
370 {
371 new_eh->next_peer = outer->inner;
372 outer->inner = new_eh;
373 }
374 else
375 {
376 new_eh->next_peer = cfun->eh->region_tree;
377 cfun->eh->region_tree = new_eh;
378 }
379
380 new_eh->index = vec_safe_length (cfun->eh->region_array);
381 vec_safe_push (cfun->eh->region_array, new_eh);
382
383 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
384 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
385 new_eh->use_cxa_end_cleanup = true;
386
387 return new_eh;
388 }
389
390 eh_region
391 gen_eh_region_cleanup (eh_region outer)
392 {
393 return gen_eh_region (ERT_CLEANUP, outer);
394 }
395
396 eh_region
397 gen_eh_region_try (eh_region outer)
398 {
399 return gen_eh_region (ERT_TRY, outer);
400 }
401
402 eh_catch
403 gen_eh_region_catch (eh_region t, tree type_or_list)
404 {
405 eh_catch c, l;
406 tree type_list, type_node;
407
408 gcc_assert (t->type == ERT_TRY);
409
410 /* Ensure to always end up with a type list to normalize further
411 processing, then register each type against the runtime types map. */
412 type_list = type_or_list;
413 if (type_or_list)
414 {
415 if (TREE_CODE (type_or_list) != TREE_LIST)
416 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
417
418 type_node = type_list;
419 for (; type_node; type_node = TREE_CHAIN (type_node))
420 add_type_for_runtime (TREE_VALUE (type_node));
421 }
422
423 c = ggc_cleared_alloc<eh_catch_d> ();
424 c->type_list = type_list;
425 l = t->u.eh_try.last_catch;
426 c->prev_catch = l;
427 if (l)
428 l->next_catch = c;
429 else
430 t->u.eh_try.first_catch = c;
431 t->u.eh_try.last_catch = c;
432
433 return c;
434 }
435
436 eh_region
437 gen_eh_region_allowed (eh_region outer, tree allowed)
438 {
439 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
440 region->u.allowed.type_list = allowed;
441
442 for (; allowed ; allowed = TREE_CHAIN (allowed))
443 add_type_for_runtime (TREE_VALUE (allowed));
444
445 return region;
446 }
447
448 eh_region
449 gen_eh_region_must_not_throw (eh_region outer)
450 {
451 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
452 }
453
454 eh_landing_pad
455 gen_eh_landing_pad (eh_region region)
456 {
457 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
458
459 lp->next_lp = region->landing_pads;
460 lp->region = region;
461 lp->index = vec_safe_length (cfun->eh->lp_array);
462 region->landing_pads = lp;
463
464 vec_safe_push (cfun->eh->lp_array, lp);
465
466 return lp;
467 }
468
469 eh_region
470 get_eh_region_from_number_fn (struct function *ifun, int i)
471 {
472 return (*ifun->eh->region_array)[i];
473 }
474
475 eh_region
476 get_eh_region_from_number (int i)
477 {
478 return get_eh_region_from_number_fn (cfun, i);
479 }
480
481 eh_landing_pad
482 get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
483 {
484 return (*ifun->eh->lp_array)[i];
485 }
486
487 eh_landing_pad
488 get_eh_landing_pad_from_number (int i)
489 {
490 return get_eh_landing_pad_from_number_fn (cfun, i);
491 }
492
493 eh_region
494 get_eh_region_from_lp_number_fn (struct function *ifun, int i)
495 {
496 if (i < 0)
497 return (*ifun->eh->region_array)[-i];
498 else if (i == 0)
499 return NULL;
500 else
501 {
502 eh_landing_pad lp;
503 lp = (*ifun->eh->lp_array)[i];
504 return lp->region;
505 }
506 }
507
508 eh_region
509 get_eh_region_from_lp_number (int i)
510 {
511 return get_eh_region_from_lp_number_fn (cfun, i);
512 }
513 \f
514 /* Returns true if the current function has exception handling regions. */
515
516 bool
517 current_function_has_exception_handlers (void)
518 {
519 return cfun->eh->region_tree != NULL;
520 }
521 \f
522 /* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
523 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
524
525 struct duplicate_eh_regions_data
526 {
527 duplicate_eh_regions_map label_map;
528 void *label_map_data;
529 hash_map<void *, void *> *eh_map;
530 };
531
532 static void
533 duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
534 eh_region old_r, eh_region outer)
535 {
536 eh_landing_pad old_lp, new_lp;
537 eh_region new_r;
538
539 new_r = gen_eh_region (old_r->type, outer);
540 gcc_assert (!data->eh_map->put (old_r, new_r));
541
542 switch (old_r->type)
543 {
544 case ERT_CLEANUP:
545 break;
546
547 case ERT_TRY:
548 {
549 eh_catch oc, nc;
550 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
551 {
552 /* We should be doing all our region duplication before and
553 during inlining, which is before filter lists are created. */
554 gcc_assert (oc->filter_list == NULL);
555 nc = gen_eh_region_catch (new_r, oc->type_list);
556 nc->label = data->label_map (oc->label, data->label_map_data);
557 }
558 }
559 break;
560
561 case ERT_ALLOWED_EXCEPTIONS:
562 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
563 if (old_r->u.allowed.label)
564 new_r->u.allowed.label
565 = data->label_map (old_r->u.allowed.label, data->label_map_data);
566 else
567 new_r->u.allowed.label = NULL_TREE;
568 break;
569
570 case ERT_MUST_NOT_THROW:
571 new_r->u.must_not_throw.failure_loc =
572 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
573 new_r->u.must_not_throw.failure_decl =
574 old_r->u.must_not_throw.failure_decl;
575 break;
576 }
577
578 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
579 {
580 /* Don't bother copying unused landing pads. */
581 if (old_lp->post_landing_pad == NULL)
582 continue;
583
584 new_lp = gen_eh_landing_pad (new_r);
585 gcc_assert (!data->eh_map->put (old_lp, new_lp));
586
587 new_lp->post_landing_pad
588 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
589 EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
590 }
591
592 /* Make sure to preserve the original use of __cxa_end_cleanup. */
593 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
594
595 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
596 duplicate_eh_regions_1 (data, old_r, new_r);
597 }
598
599 /* Duplicate the EH regions from IFUN rooted at COPY_REGION into
600 the current function and root the tree below OUTER_REGION.
601 The special case of COPY_REGION of NULL means all regions.
602 Remap labels using MAP/MAP_DATA callback. Return a pointer map
603 that allows the caller to remap uses of both EH regions and
604 EH landing pads. */
605
606 hash_map<void *, void *> *
607 duplicate_eh_regions (struct function *ifun,
608 eh_region copy_region, int outer_lp,
609 duplicate_eh_regions_map map, void *map_data)
610 {
611 struct duplicate_eh_regions_data data;
612 eh_region outer_region;
613
614 #ifdef ENABLE_CHECKING
615 verify_eh_tree (ifun);
616 #endif
617
618 data.label_map = map;
619 data.label_map_data = map_data;
620 data.eh_map = new hash_map<void *, void *>;
621
622 outer_region = get_eh_region_from_lp_number_fn (cfun, outer_lp);
623
624 /* Copy all the regions in the subtree. */
625 if (copy_region)
626 duplicate_eh_regions_1 (&data, copy_region, outer_region);
627 else
628 {
629 eh_region r;
630 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
631 duplicate_eh_regions_1 (&data, r, outer_region);
632 }
633
634 #ifdef ENABLE_CHECKING
635 verify_eh_tree (cfun);
636 #endif
637
638 return data.eh_map;
639 }
640
641 /* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
642
643 eh_region
644 eh_region_outermost (struct function *ifun, eh_region region_a,
645 eh_region region_b)
646 {
647 sbitmap b_outer;
648
649 gcc_assert (ifun->eh->region_array);
650 gcc_assert (ifun->eh->region_tree);
651
652 b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
653 bitmap_clear (b_outer);
654
655 do
656 {
657 bitmap_set_bit (b_outer, region_b->index);
658 region_b = region_b->outer;
659 }
660 while (region_b);
661
662 do
663 {
664 if (bitmap_bit_p (b_outer, region_a->index))
665 break;
666 region_a = region_a->outer;
667 }
668 while (region_a);
669
670 sbitmap_free (b_outer);
671 return region_a;
672 }
673 \f
674 void
675 add_type_for_runtime (tree type)
676 {
677 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
678 if (TREE_CODE (type) == NOP_EXPR)
679 return;
680
681 bool existed = false;
682 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
683 if (!existed)
684 *slot = lang_hooks.eh_runtime_type (type);
685 }
686
687 tree
688 lookup_type_for_runtime (tree type)
689 {
690 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
691 if (TREE_CODE (type) == NOP_EXPR)
692 return type;
693
694 /* We should have always inserted the data earlier. */
695 return *type_to_runtime_map->get (type);
696 }
697
698 \f
699 /* Represent an entry in @TTypes for either catch actions
700 or exception filter actions. */
701 struct ttypes_filter {
702 tree t;
703 int filter;
704 };
705
706 /* Helper for ttypes_filter hashing. */
707
708 struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
709 {
710 typedef tree_node *compare_type;
711 static inline hashval_t hash (const ttypes_filter *);
712 static inline bool equal (const ttypes_filter *, const tree_node *);
713 };
714
715 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
716 (a tree) for a @TTypes type node we are thinking about adding. */
717
718 inline bool
719 ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
720 {
721 return entry->t == data;
722 }
723
724 inline hashval_t
725 ttypes_filter_hasher::hash (const ttypes_filter *entry)
726 {
727 return TREE_HASH (entry->t);
728 }
729
730 typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
731
732
733 /* Helper for ehspec hashing. */
734
735 struct ehspec_hasher : free_ptr_hash <ttypes_filter>
736 {
737 static inline hashval_t hash (const ttypes_filter *);
738 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
739 };
740
741 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
742 exception specification list we are thinking about adding. */
743 /* ??? Currently we use the type lists in the order given. Someone
744 should put these in some canonical order. */
745
746 inline bool
747 ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
748 {
749 return type_list_equal (entry->t, data->t);
750 }
751
752 /* Hash function for exception specification lists. */
753
754 inline hashval_t
755 ehspec_hasher::hash (const ttypes_filter *entry)
756 {
757 hashval_t h = 0;
758 tree list;
759
760 for (list = entry->t; list ; list = TREE_CHAIN (list))
761 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
762 return h;
763 }
764
765 typedef hash_table<ehspec_hasher> ehspec_hash_type;
766
767
768 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
769 to speed up the search. Return the filter value to be used. */
770
771 static int
772 add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
773 {
774 struct ttypes_filter **slot, *n;
775
776 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
777 INSERT);
778
779 if ((n = *slot) == NULL)
780 {
781 /* Filter value is a 1 based table index. */
782
783 n = XNEW (struct ttypes_filter);
784 n->t = type;
785 n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
786 *slot = n;
787
788 vec_safe_push (cfun->eh->ttype_data, type);
789 }
790
791 return n->filter;
792 }
793
794 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
795 to speed up the search. Return the filter value to be used. */
796
797 static int
798 add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
799 tree list)
800 {
801 struct ttypes_filter **slot, *n;
802 struct ttypes_filter dummy;
803
804 dummy.t = list;
805 slot = ehspec_hash->find_slot (&dummy, INSERT);
806
807 if ((n = *slot) == NULL)
808 {
809 int len;
810
811 if (targetm.arm_eabi_unwinder)
812 len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
813 else
814 len = vec_safe_length (cfun->eh->ehspec_data.other);
815
816 /* Filter value is a -1 based byte index into a uleb128 buffer. */
817
818 n = XNEW (struct ttypes_filter);
819 n->t = list;
820 n->filter = -(len + 1);
821 *slot = n;
822
823 /* Generate a 0 terminated list of filter values. */
824 for (; list ; list = TREE_CHAIN (list))
825 {
826 if (targetm.arm_eabi_unwinder)
827 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
828 else
829 {
830 /* Look up each type in the list and encode its filter
831 value as a uleb128. */
832 push_uleb128 (&cfun->eh->ehspec_data.other,
833 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
834 }
835 }
836 if (targetm.arm_eabi_unwinder)
837 vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
838 else
839 vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
840 }
841
842 return n->filter;
843 }
844
845 /* Generate the action filter values to be used for CATCH and
846 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
847 we use lots of landing pads, and so every type or list can share
848 the same filter value, which saves table space. */
849
850 void
851 assign_filter_values (void)
852 {
853 int i;
854 eh_region r;
855 eh_catch c;
856
857 vec_alloc (cfun->eh->ttype_data, 16);
858 if (targetm.arm_eabi_unwinder)
859 vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
860 else
861 vec_alloc (cfun->eh->ehspec_data.other, 64);
862
863 ehspec_hash_type ehspec (31);
864 ttypes_hash_type ttypes (31);
865
866 for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
867 {
868 if (r == NULL)
869 continue;
870
871 switch (r->type)
872 {
873 case ERT_TRY:
874 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
875 {
876 /* Whatever type_list is (NULL or true list), we build a list
877 of filters for the region. */
878 c->filter_list = NULL_TREE;
879
880 if (c->type_list != NULL)
881 {
882 /* Get a filter value for each of the types caught and store
883 them in the region's dedicated list. */
884 tree tp_node = c->type_list;
885
886 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
887 {
888 int flt
889 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
890 tree flt_node = build_int_cst (integer_type_node, flt);
891
892 c->filter_list
893 = tree_cons (NULL_TREE, flt_node, c->filter_list);
894 }
895 }
896 else
897 {
898 /* Get a filter value for the NULL list also since it
899 will need an action record anyway. */
900 int flt = add_ttypes_entry (&ttypes, NULL);
901 tree flt_node = build_int_cst (integer_type_node, flt);
902
903 c->filter_list
904 = tree_cons (NULL_TREE, flt_node, NULL);
905 }
906 }
907 break;
908
909 case ERT_ALLOWED_EXCEPTIONS:
910 r->u.allowed.filter
911 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
912 break;
913
914 default:
915 break;
916 }
917 }
918 }
919
920 /* Emit SEQ into basic block just before INSN (that is assumed to be
921 first instruction of some existing BB and return the newly
922 produced block. */
923 static basic_block
924 emit_to_new_bb_before (rtx_insn *seq, rtx insn)
925 {
926 rtx_insn *last;
927 basic_block bb;
928 edge e;
929 edge_iterator ei;
930
931 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
932 call), we don't want it to go into newly created landing pad or other EH
933 construct. */
934 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
935 if (e->flags & EDGE_FALLTHRU)
936 force_nonfallthru (e);
937 else
938 ei_next (&ei);
939 last = emit_insn_before (seq, insn);
940 if (BARRIER_P (last))
941 last = PREV_INSN (last);
942 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
943 update_bb_for_insn (bb);
944 bb->flags |= BB_SUPERBLOCK;
945 return bb;
946 }
947 \f
948 /* A subroutine of dw2_build_landing_pads, also used for edge splitting
949 at the rtl level. Emit the code required by the target at a landing
950 pad for the given region. */
951
952 void
953 expand_dw2_landing_pad_for_region (eh_region region)
954 {
955 if (targetm.have_exception_receiver ())
956 emit_insn (targetm.gen_exception_receiver ());
957 else if (targetm.have_nonlocal_goto_receiver ())
958 emit_insn (targetm.gen_nonlocal_goto_receiver ());
959 else
960 { /* Nothing */ }
961
962 if (region->exc_ptr_reg)
963 emit_move_insn (region->exc_ptr_reg,
964 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
965 if (region->filter_reg)
966 emit_move_insn (region->filter_reg,
967 gen_rtx_REG (targetm.eh_return_filter_mode (),
968 EH_RETURN_DATA_REGNO (1)));
969 }
970
971 /* Expand the extra code needed at landing pads for dwarf2 unwinding. */
972
973 static void
974 dw2_build_landing_pads (void)
975 {
976 int i;
977 eh_landing_pad lp;
978 int e_flags = EDGE_FALLTHRU;
979
980 /* If we're going to partition blocks, we need to be able to add
981 new landing pads later, which means that we need to hold on to
982 the post-landing-pad block. Prevent it from being merged away.
983 We'll remove this bit after partitioning. */
984 if (flag_reorder_blocks_and_partition)
985 e_flags |= EDGE_PRESERVE;
986
987 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
988 {
989 basic_block bb;
990 rtx_insn *seq;
991 edge e;
992
993 if (lp == NULL || lp->post_landing_pad == NULL)
994 continue;
995
996 start_sequence ();
997
998 lp->landing_pad = gen_label_rtx ();
999 emit_label (lp->landing_pad);
1000 LABEL_PRESERVE_P (lp->landing_pad) = 1;
1001
1002 expand_dw2_landing_pad_for_region (lp->region);
1003
1004 seq = get_insns ();
1005 end_sequence ();
1006
1007 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1008 e = make_edge (bb, bb->next_bb, e_flags);
1009 e->count = bb->count;
1010 e->probability = REG_BR_PROB_BASE;
1011 if (current_loops)
1012 {
1013 struct loop *loop = bb->next_bb->loop_father;
1014 /* If we created a pre-header block, add the new block to the
1015 outer loop, otherwise to the loop itself. */
1016 if (bb->next_bb == loop->header)
1017 add_bb_to_loop (bb, loop_outer (loop));
1018 else
1019 add_bb_to_loop (bb, loop);
1020 }
1021 }
1022 }
1023
1024 \f
1025 static vec<int> sjlj_lp_call_site_index;
1026
1027 /* Process all active landing pads. Assign each one a compact dispatch
1028 index, and a call-site index. */
1029
1030 static int
1031 sjlj_assign_call_site_values (void)
1032 {
1033 action_hash_type ar_hash (31);
1034 int i, disp_index;
1035 eh_landing_pad lp;
1036
1037 vec_alloc (crtl->eh.action_record_data, 64);
1038
1039 disp_index = 0;
1040 call_site_base = 1;
1041 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1042 if (lp && lp->post_landing_pad)
1043 {
1044 int action, call_site;
1045
1046 /* First: build the action table. */
1047 action = collect_one_action_chain (&ar_hash, lp->region);
1048
1049 /* Next: assign call-site values. If dwarf2 terms, this would be
1050 the region number assigned by convert_to_eh_region_ranges, but
1051 handles no-action and must-not-throw differently. */
1052 /* Map must-not-throw to otherwise unused call-site index 0. */
1053 if (action == -2)
1054 call_site = 0;
1055 /* Map no-action to otherwise unused call-site index -1. */
1056 else if (action == -1)
1057 call_site = -1;
1058 /* Otherwise, look it up in the table. */
1059 else
1060 call_site = add_call_site (GEN_INT (disp_index), action, 0);
1061 sjlj_lp_call_site_index[i] = call_site;
1062
1063 disp_index++;
1064 }
1065
1066 return disp_index;
1067 }
1068
1069 /* Emit code to record the current call-site index before every
1070 insn that can throw. */
1071
1072 static void
1073 sjlj_mark_call_sites (void)
1074 {
1075 int last_call_site = -2;
1076 rtx_insn *insn;
1077 rtx mem;
1078
1079 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1080 {
1081 eh_landing_pad lp;
1082 eh_region r;
1083 bool nothrow;
1084 int this_call_site;
1085 rtx_insn *before, *p;
1086
1087 /* Reset value tracking at extended basic block boundaries. */
1088 if (LABEL_P (insn))
1089 last_call_site = -2;
1090
1091 /* If the function allocates dynamic stack space, the context must
1092 be updated after every allocation/deallocation accordingly. */
1093 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1094 {
1095 rtx buf_addr;
1096
1097 start_sequence ();
1098 buf_addr = plus_constant (Pmode, XEXP (crtl->eh.sjlj_fc, 0),
1099 sjlj_fc_jbuf_ofs);
1100 expand_builtin_update_setjmp_buf (buf_addr);
1101 p = get_insns ();
1102 end_sequence ();
1103 emit_insn_before (p, insn);
1104 }
1105
1106 if (! INSN_P (insn))
1107 continue;
1108
1109 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1110 if (nothrow)
1111 continue;
1112 if (lp)
1113 this_call_site = sjlj_lp_call_site_index[lp->index];
1114 else if (r == NULL)
1115 {
1116 /* Calls (and trapping insns) without notes are outside any
1117 exception handling region in this function. Mark them as
1118 no action. */
1119 this_call_site = -1;
1120 }
1121 else
1122 {
1123 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1124 this_call_site = 0;
1125 }
1126
1127 if (this_call_site != -1)
1128 crtl->uses_eh_lsda = 1;
1129
1130 if (this_call_site == last_call_site)
1131 continue;
1132
1133 /* Don't separate a call from it's argument loads. */
1134 before = insn;
1135 if (CALL_P (insn))
1136 before = find_first_parameter_load (insn, NULL);
1137
1138 start_sequence ();
1139 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1140 sjlj_fc_call_site_ofs);
1141 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1142 p = get_insns ();
1143 end_sequence ();
1144
1145 emit_insn_before (p, before);
1146 last_call_site = this_call_site;
1147 }
1148 }
1149
1150 /* Construct the SjLj_Function_Context. */
1151
1152 static void
1153 sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1154 {
1155 rtx_insn *fn_begin, *seq;
1156 rtx fc, mem;
1157 bool fn_begin_outside_block;
1158 rtx personality = get_personality_function (current_function_decl);
1159
1160 fc = crtl->eh.sjlj_fc;
1161
1162 start_sequence ();
1163
1164 /* We're storing this libcall's address into memory instead of
1165 calling it directly. Thus, we must call assemble_external_libcall
1166 here, as we can not depend on emit_library_call to do it for us. */
1167 assemble_external_libcall (personality);
1168 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1169 emit_move_insn (mem, personality);
1170
1171 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1172 if (crtl->uses_eh_lsda)
1173 {
1174 char buf[20];
1175 rtx sym;
1176
1177 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1178 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1179 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1180 emit_move_insn (mem, sym);
1181 }
1182 else
1183 emit_move_insn (mem, const0_rtx);
1184
1185 if (dispatch_label)
1186 {
1187 #ifdef DONT_USE_BUILTIN_SETJMP
1188 rtx x;
1189 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1190 TYPE_MODE (integer_type_node), 1,
1191 plus_constant (Pmode, XEXP (fc, 0),
1192 sjlj_fc_jbuf_ofs), Pmode);
1193
1194 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1195 TYPE_MODE (integer_type_node), 0,
1196 dispatch_label, REG_BR_PROB_BASE / 100);
1197 #else
1198 expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1199 sjlj_fc_jbuf_ofs),
1200 dispatch_label);
1201 #endif
1202 }
1203
1204 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1205 1, XEXP (fc, 0), Pmode);
1206
1207 seq = get_insns ();
1208 end_sequence ();
1209
1210 /* ??? Instead of doing this at the beginning of the function,
1211 do this in a block that is at loop level 0 and dominates all
1212 can_throw_internal instructions. */
1213
1214 fn_begin_outside_block = true;
1215 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1216 if (NOTE_P (fn_begin))
1217 {
1218 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1219 break;
1220 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1221 fn_begin_outside_block = false;
1222 }
1223
1224 if (fn_begin_outside_block)
1225 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1226 else
1227 emit_insn_after (seq, fn_begin);
1228 }
1229
1230 /* Call back from expand_function_end to know where we should put
1231 the call to unwind_sjlj_unregister_libfunc if needed. */
1232
1233 void
1234 sjlj_emit_function_exit_after (rtx_insn *after)
1235 {
1236 crtl->eh.sjlj_exit_after = after;
1237 }
1238
1239 static void
1240 sjlj_emit_function_exit (void)
1241 {
1242 rtx_insn *seq, *insn;
1243
1244 start_sequence ();
1245
1246 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1247 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1248
1249 seq = get_insns ();
1250 end_sequence ();
1251
1252 /* ??? Really this can be done in any block at loop level 0 that
1253 post-dominates all can_throw_internal instructions. This is
1254 the last possible moment. */
1255
1256 insn = crtl->eh.sjlj_exit_after;
1257 if (LABEL_P (insn))
1258 insn = NEXT_INSN (insn);
1259
1260 emit_insn_after (seq, insn);
1261 }
1262
1263 static void
1264 sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1265 {
1266 machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1267 machine_mode filter_mode = targetm.eh_return_filter_mode ();
1268 eh_landing_pad lp;
1269 rtx mem, fc, exc_ptr_reg, filter_reg;
1270 rtx_insn *seq;
1271 basic_block bb;
1272 eh_region r;
1273 edge e;
1274 int i, disp_index;
1275 vec<tree> dispatch_labels = vNULL;
1276
1277 fc = crtl->eh.sjlj_fc;
1278
1279 start_sequence ();
1280
1281 emit_label (dispatch_label);
1282
1283 #ifndef DONT_USE_BUILTIN_SETJMP
1284 expand_builtin_setjmp_receiver (dispatch_label);
1285
1286 /* The caller of expand_builtin_setjmp_receiver is responsible for
1287 making sure that the label doesn't vanish. The only other caller
1288 is the expander for __builtin_setjmp_receiver, which places this
1289 label on the nonlocal_goto_label list. Since we're modeling these
1290 CFG edges more exactly, we can use the forced_labels list instead. */
1291 LABEL_PRESERVE_P (dispatch_label) = 1;
1292 forced_labels
1293 = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1294 #endif
1295
1296 /* Load up exc_ptr and filter values from the function context. */
1297 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1298 if (unwind_word_mode != ptr_mode)
1299 {
1300 #ifdef POINTERS_EXTEND_UNSIGNED
1301 mem = convert_memory_address (ptr_mode, mem);
1302 #else
1303 mem = convert_to_mode (ptr_mode, mem, 0);
1304 #endif
1305 }
1306 exc_ptr_reg = force_reg (ptr_mode, mem);
1307
1308 mem = adjust_address (fc, unwind_word_mode,
1309 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1310 if (unwind_word_mode != filter_mode)
1311 mem = convert_to_mode (filter_mode, mem, 0);
1312 filter_reg = force_reg (filter_mode, mem);
1313
1314 /* Jump to one of the directly reachable regions. */
1315
1316 disp_index = 0;
1317 rtx_code_label *first_reachable_label = NULL;
1318
1319 /* If there's exactly one call site in the function, don't bother
1320 generating a switch statement. */
1321 if (num_dispatch > 1)
1322 dispatch_labels.create (num_dispatch);
1323
1324 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1325 if (lp && lp->post_landing_pad)
1326 {
1327 rtx_insn *seq2;
1328 rtx_code_label *label;
1329
1330 start_sequence ();
1331
1332 lp->landing_pad = dispatch_label;
1333
1334 if (num_dispatch > 1)
1335 {
1336 tree t_label, case_elt, t;
1337
1338 t_label = create_artificial_label (UNKNOWN_LOCATION);
1339 t = build_int_cst (integer_type_node, disp_index);
1340 case_elt = build_case_label (t, NULL, t_label);
1341 dispatch_labels.quick_push (case_elt);
1342 label = jump_target_rtx (t_label);
1343 }
1344 else
1345 label = gen_label_rtx ();
1346
1347 if (disp_index == 0)
1348 first_reachable_label = label;
1349 emit_label (label);
1350
1351 r = lp->region;
1352 if (r->exc_ptr_reg)
1353 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1354 if (r->filter_reg)
1355 emit_move_insn (r->filter_reg, filter_reg);
1356
1357 seq2 = get_insns ();
1358 end_sequence ();
1359
1360 rtx_insn *before = label_rtx (lp->post_landing_pad);
1361 bb = emit_to_new_bb_before (seq2, before);
1362 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1363 e->count = bb->count;
1364 e->probability = REG_BR_PROB_BASE;
1365 if (current_loops)
1366 {
1367 struct loop *loop = bb->next_bb->loop_father;
1368 /* If we created a pre-header block, add the new block to the
1369 outer loop, otherwise to the loop itself. */
1370 if (bb->next_bb == loop->header)
1371 add_bb_to_loop (bb, loop_outer (loop));
1372 else
1373 add_bb_to_loop (bb, loop);
1374 /* ??? For multiple dispatches we will end up with edges
1375 from the loop tree root into this loop, making it a
1376 multiple-entry loop. Discard all affected loops. */
1377 if (num_dispatch > 1)
1378 {
1379 for (loop = bb->loop_father;
1380 loop_outer (loop); loop = loop_outer (loop))
1381 mark_loop_for_removal (loop);
1382 }
1383 }
1384
1385 disp_index++;
1386 }
1387 gcc_assert (disp_index == num_dispatch);
1388
1389 if (num_dispatch > 1)
1390 {
1391 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1392 sjlj_fc_call_site_ofs);
1393 expand_sjlj_dispatch_table (disp, dispatch_labels);
1394 }
1395
1396 seq = get_insns ();
1397 end_sequence ();
1398
1399 bb = emit_to_new_bb_before (seq, first_reachable_label);
1400 if (num_dispatch == 1)
1401 {
1402 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1403 e->count = bb->count;
1404 e->probability = REG_BR_PROB_BASE;
1405 if (current_loops)
1406 {
1407 struct loop *loop = bb->next_bb->loop_father;
1408 /* If we created a pre-header block, add the new block to the
1409 outer loop, otherwise to the loop itself. */
1410 if (bb->next_bb == loop->header)
1411 add_bb_to_loop (bb, loop_outer (loop));
1412 else
1413 add_bb_to_loop (bb, loop);
1414 }
1415 }
1416 else
1417 {
1418 /* We are not wiring up edges here, but as the dispatcher call
1419 is at function begin simply associate the block with the
1420 outermost (non-)loop. */
1421 if (current_loops)
1422 add_bb_to_loop (bb, current_loops->tree_root);
1423 }
1424 }
1425
1426 static void
1427 sjlj_build_landing_pads (void)
1428 {
1429 int num_dispatch;
1430
1431 num_dispatch = vec_safe_length (cfun->eh->lp_array);
1432 if (num_dispatch == 0)
1433 return;
1434 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1435
1436 num_dispatch = sjlj_assign_call_site_values ();
1437 if (num_dispatch > 0)
1438 {
1439 rtx_code_label *dispatch_label = gen_label_rtx ();
1440 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1441 TYPE_MODE (sjlj_fc_type_node),
1442 TYPE_ALIGN (sjlj_fc_type_node));
1443 crtl->eh.sjlj_fc
1444 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1445 int_size_in_bytes (sjlj_fc_type_node),
1446 align);
1447
1448 sjlj_mark_call_sites ();
1449 sjlj_emit_function_enter (dispatch_label);
1450 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1451 sjlj_emit_function_exit ();
1452 }
1453
1454 /* If we do not have any landing pads, we may still need to register a
1455 personality routine and (empty) LSDA to handle must-not-throw regions. */
1456 else if (function_needs_eh_personality (cfun) != eh_personality_none)
1457 {
1458 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1459 TYPE_MODE (sjlj_fc_type_node),
1460 TYPE_ALIGN (sjlj_fc_type_node));
1461 crtl->eh.sjlj_fc
1462 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1463 int_size_in_bytes (sjlj_fc_type_node),
1464 align);
1465
1466 sjlj_mark_call_sites ();
1467 sjlj_emit_function_enter (NULL);
1468 sjlj_emit_function_exit ();
1469 }
1470
1471 sjlj_lp_call_site_index.release ();
1472 }
1473
1474 /* Update the sjlj function context. This function should be called
1475 whenever we allocate or deallocate dynamic stack space. */
1476
1477 void
1478 update_sjlj_context (void)
1479 {
1480 if (!flag_exceptions)
1481 return;
1482
1483 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1484 }
1485
1486 /* After initial rtl generation, call back to finish generating
1487 exception support code. */
1488
1489 void
1490 finish_eh_generation (void)
1491 {
1492 basic_block bb;
1493
1494 /* Construct the landing pads. */
1495 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1496 sjlj_build_landing_pads ();
1497 else
1498 dw2_build_landing_pads ();
1499 break_superblocks ();
1500
1501 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1502 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1503 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1504 commit_edge_insertions ();
1505
1506 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1507 FOR_EACH_BB_FN (bb, cfun)
1508 {
1509 eh_landing_pad lp;
1510 edge_iterator ei;
1511 edge e;
1512
1513 lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1514
1515 FOR_EACH_EDGE (e, ei, bb->succs)
1516 if (e->flags & EDGE_EH)
1517 break;
1518
1519 /* We should not have generated any new throwing insns during this
1520 pass, and we should not have lost any EH edges, so we only need
1521 to handle two cases here:
1522 (1) reachable handler and an existing edge to post-landing-pad,
1523 (2) no reachable handler and no edge. */
1524 gcc_assert ((lp != NULL) == (e != NULL));
1525 if (lp != NULL)
1526 {
1527 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1528
1529 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1530 e->flags |= (CALL_P (BB_END (bb))
1531 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1532 : EDGE_ABNORMAL);
1533 }
1534 }
1535 }
1536 \f
1537 /* This section handles removing dead code for flow. */
1538
1539 void
1540 remove_eh_landing_pad (eh_landing_pad lp)
1541 {
1542 eh_landing_pad *pp;
1543
1544 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1545 continue;
1546 *pp = lp->next_lp;
1547
1548 if (lp->post_landing_pad)
1549 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1550 (*cfun->eh->lp_array)[lp->index] = NULL;
1551 }
1552
1553 /* Splice the EH region at PP from the region tree. */
1554
1555 static void
1556 remove_eh_handler_splicer (eh_region *pp)
1557 {
1558 eh_region region = *pp;
1559 eh_landing_pad lp;
1560
1561 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1562 {
1563 if (lp->post_landing_pad)
1564 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1565 (*cfun->eh->lp_array)[lp->index] = NULL;
1566 }
1567
1568 if (region->inner)
1569 {
1570 eh_region p, outer;
1571 outer = region->outer;
1572
1573 *pp = p = region->inner;
1574 do
1575 {
1576 p->outer = outer;
1577 pp = &p->next_peer;
1578 p = *pp;
1579 }
1580 while (p);
1581 }
1582 *pp = region->next_peer;
1583
1584 (*cfun->eh->region_array)[region->index] = NULL;
1585 }
1586
1587 /* Splice a single EH region REGION from the region tree.
1588
1589 To unlink REGION, we need to find the pointer to it with a relatively
1590 expensive search in REGION's outer region. If you are going to
1591 remove a number of handlers, using remove_unreachable_eh_regions may
1592 be a better option. */
1593
1594 void
1595 remove_eh_handler (eh_region region)
1596 {
1597 eh_region *pp, *pp_start, p, outer;
1598
1599 outer = region->outer;
1600 if (outer)
1601 pp_start = &outer->inner;
1602 else
1603 pp_start = &cfun->eh->region_tree;
1604 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1605 continue;
1606
1607 remove_eh_handler_splicer (pp);
1608 }
1609
1610 /* Worker for remove_unreachable_eh_regions.
1611 PP is a pointer to the region to start a region tree depth-first
1612 search from. R_REACHABLE is the set of regions that have to be
1613 preserved. */
1614
1615 static void
1616 remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1617 {
1618 while (*pp)
1619 {
1620 eh_region region = *pp;
1621 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1622 if (!bitmap_bit_p (r_reachable, region->index))
1623 remove_eh_handler_splicer (pp);
1624 else
1625 pp = &region->next_peer;
1626 }
1627 }
1628
1629 /* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1630 Do this by traversing the EH tree top-down and splice out regions that
1631 are not marked. By removing regions from the leaves, we avoid costly
1632 searches in the region tree. */
1633
1634 void
1635 remove_unreachable_eh_regions (sbitmap r_reachable)
1636 {
1637 remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1638 }
1639
1640 /* Invokes CALLBACK for every exception handler landing pad label.
1641 Only used by reload hackery; should not be used by new code. */
1642
1643 void
1644 for_each_eh_label (void (*callback) (rtx))
1645 {
1646 eh_landing_pad lp;
1647 int i;
1648
1649 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1650 {
1651 if (lp)
1652 {
1653 rtx_code_label *lab = lp->landing_pad;
1654 if (lab && LABEL_P (lab))
1655 (*callback) (lab);
1656 }
1657 }
1658 }
1659 \f
1660 /* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1661 call insn.
1662
1663 At the gimple level, we use LP_NR
1664 > 0 : The statement transfers to landing pad LP_NR
1665 = 0 : The statement is outside any EH region
1666 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1667
1668 At the rtl level, we use LP_NR
1669 > 0 : The insn transfers to landing pad LP_NR
1670 = 0 : The insn cannot throw
1671 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1672 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1673 missing note: The insn is outside any EH region.
1674
1675 ??? This difference probably ought to be avoided. We could stand
1676 to record nothrow for arbitrary gimple statements, and so avoid
1677 some moderately complex lookups in stmt_could_throw_p. Perhaps
1678 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1679 no-nonlocal-goto property should be recorded elsewhere as a bit
1680 on the call_insn directly. Perhaps we should make more use of
1681 attaching the trees to call_insns (reachable via symbol_ref in
1682 direct call cases) and just pull the data out of the trees. */
1683
1684 void
1685 make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1686 {
1687 rtx value;
1688 if (ecf_flags & ECF_NOTHROW)
1689 value = const0_rtx;
1690 else if (lp_nr != 0)
1691 value = GEN_INT (lp_nr);
1692 else
1693 return;
1694 add_reg_note (insn, REG_EH_REGION, value);
1695 }
1696
1697 /* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1698 nor perform a non-local goto. Replace the region note if it
1699 already exists. */
1700
1701 void
1702 make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1703 {
1704 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1705 rtx intmin = GEN_INT (INT_MIN);
1706
1707 if (note != 0)
1708 XEXP (note, 0) = intmin;
1709 else
1710 add_reg_note (insn, REG_EH_REGION, intmin);
1711 }
1712
1713 /* Return true if INSN could throw, assuming no REG_EH_REGION note
1714 to the contrary. */
1715
1716 bool
1717 insn_could_throw_p (const_rtx insn)
1718 {
1719 if (!flag_exceptions)
1720 return false;
1721 if (CALL_P (insn))
1722 return true;
1723 if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1724 return may_trap_p (PATTERN (insn));
1725 return false;
1726 }
1727
1728 /* Copy an REG_EH_REGION note to each insn that might throw beginning
1729 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1730 to look for a note, or the note itself. */
1731
1732 void
1733 copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1734 {
1735 rtx_insn *insn;
1736 rtx note = note_or_insn;
1737
1738 if (INSN_P (note_or_insn))
1739 {
1740 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1741 if (note == NULL)
1742 return;
1743 }
1744 note = XEXP (note, 0);
1745
1746 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1747 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1748 && insn_could_throw_p (insn))
1749 add_reg_note (insn, REG_EH_REGION, note);
1750 }
1751
1752 /* Likewise, but iterate backward. */
1753
1754 void
1755 copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1756 {
1757 rtx_insn *insn;
1758 rtx note = note_or_insn;
1759
1760 if (INSN_P (note_or_insn))
1761 {
1762 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1763 if (note == NULL)
1764 return;
1765 }
1766 note = XEXP (note, 0);
1767
1768 for (insn = last; insn != first; insn = PREV_INSN (insn))
1769 if (insn_could_throw_p (insn))
1770 add_reg_note (insn, REG_EH_REGION, note);
1771 }
1772
1773
1774 /* Extract all EH information from INSN. Return true if the insn
1775 was marked NOTHROW. */
1776
1777 static bool
1778 get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1779 eh_landing_pad *plp)
1780 {
1781 eh_landing_pad lp = NULL;
1782 eh_region r = NULL;
1783 bool ret = false;
1784 rtx note;
1785 int lp_nr;
1786
1787 if (! INSN_P (insn))
1788 goto egress;
1789
1790 if (NONJUMP_INSN_P (insn)
1791 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1792 insn = XVECEXP (PATTERN (insn), 0, 0);
1793
1794 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1795 if (!note)
1796 {
1797 ret = !insn_could_throw_p (insn);
1798 goto egress;
1799 }
1800
1801 lp_nr = INTVAL (XEXP (note, 0));
1802 if (lp_nr == 0 || lp_nr == INT_MIN)
1803 {
1804 ret = true;
1805 goto egress;
1806 }
1807
1808 if (lp_nr < 0)
1809 r = (*cfun->eh->region_array)[-lp_nr];
1810 else
1811 {
1812 lp = (*cfun->eh->lp_array)[lp_nr];
1813 r = lp->region;
1814 }
1815
1816 egress:
1817 *plp = lp;
1818 *pr = r;
1819 return ret;
1820 }
1821
1822 /* Return the landing pad to which INSN may go, or NULL if it does not
1823 have a reachable landing pad within this function. */
1824
1825 eh_landing_pad
1826 get_eh_landing_pad_from_rtx (const_rtx insn)
1827 {
1828 eh_landing_pad lp;
1829 eh_region r;
1830
1831 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1832 return lp;
1833 }
1834
1835 /* Return the region to which INSN may go, or NULL if it does not
1836 have a reachable region within this function. */
1837
1838 eh_region
1839 get_eh_region_from_rtx (const_rtx insn)
1840 {
1841 eh_landing_pad lp;
1842 eh_region r;
1843
1844 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1845 return r;
1846 }
1847
1848 /* Return true if INSN throws and is caught by something in this function. */
1849
1850 bool
1851 can_throw_internal (const_rtx insn)
1852 {
1853 return get_eh_landing_pad_from_rtx (insn) != NULL;
1854 }
1855
1856 /* Return true if INSN throws and escapes from the current function. */
1857
1858 bool
1859 can_throw_external (const_rtx insn)
1860 {
1861 eh_landing_pad lp;
1862 eh_region r;
1863 bool nothrow;
1864
1865 if (! INSN_P (insn))
1866 return false;
1867
1868 if (NONJUMP_INSN_P (insn)
1869 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1870 {
1871 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1872 int i, n = seq->len ();
1873
1874 for (i = 0; i < n; i++)
1875 if (can_throw_external (seq->element (i)))
1876 return true;
1877
1878 return false;
1879 }
1880
1881 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1882
1883 /* If we can't throw, we obviously can't throw external. */
1884 if (nothrow)
1885 return false;
1886
1887 /* If we have an internal landing pad, then we're not external. */
1888 if (lp != NULL)
1889 return false;
1890
1891 /* If we're not within an EH region, then we are external. */
1892 if (r == NULL)
1893 return true;
1894
1895 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1896 which don't always have landing pads. */
1897 gcc_assert (r->type == ERT_MUST_NOT_THROW);
1898 return false;
1899 }
1900
1901 /* Return true if INSN cannot throw at all. */
1902
1903 bool
1904 insn_nothrow_p (const_rtx insn)
1905 {
1906 eh_landing_pad lp;
1907 eh_region r;
1908
1909 if (! INSN_P (insn))
1910 return true;
1911
1912 if (NONJUMP_INSN_P (insn)
1913 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1914 {
1915 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1916 int i, n = seq->len ();
1917
1918 for (i = 0; i < n; i++)
1919 if (!insn_nothrow_p (seq->element (i)))
1920 return false;
1921
1922 return true;
1923 }
1924
1925 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1926 }
1927
1928 /* Return true if INSN can perform a non-local goto. */
1929 /* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1930
1931 bool
1932 can_nonlocal_goto (const rtx_insn *insn)
1933 {
1934 if (nonlocal_goto_handler_labels && CALL_P (insn))
1935 {
1936 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1937 if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1938 return true;
1939 }
1940 return false;
1941 }
1942 \f
1943 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1944
1945 static unsigned int
1946 set_nothrow_function_flags (void)
1947 {
1948 rtx_insn *insn;
1949
1950 crtl->nothrow = 1;
1951
1952 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1953 something that can throw an exception. We specifically exempt
1954 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1955 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1956 is optimistic. */
1957
1958 crtl->all_throwers_are_sibcalls = 1;
1959
1960 /* If we don't know that this implementation of the function will
1961 actually be used, then we must not set TREE_NOTHROW, since
1962 callers must not assume that this function does not throw. */
1963 if (TREE_NOTHROW (current_function_decl))
1964 return 0;
1965
1966 if (! flag_exceptions)
1967 return 0;
1968
1969 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1970 if (can_throw_external (insn))
1971 {
1972 crtl->nothrow = 0;
1973
1974 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1975 {
1976 crtl->all_throwers_are_sibcalls = 0;
1977 return 0;
1978 }
1979 }
1980
1981 if (crtl->nothrow
1982 && (cgraph_node::get (current_function_decl)->get_availability ()
1983 >= AVAIL_AVAILABLE))
1984 {
1985 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1986 struct cgraph_edge *e;
1987 for (e = node->callers; e; e = e->next_caller)
1988 e->can_throw_external = false;
1989 node->set_nothrow_flag (true);
1990
1991 if (dump_file)
1992 fprintf (dump_file, "Marking function nothrow: %s\n\n",
1993 current_function_name ());
1994 }
1995 return 0;
1996 }
1997
1998 namespace {
1999
2000 const pass_data pass_data_set_nothrow_function_flags =
2001 {
2002 RTL_PASS, /* type */
2003 "nothrow", /* name */
2004 OPTGROUP_NONE, /* optinfo_flags */
2005 TV_NONE, /* tv_id */
2006 0, /* properties_required */
2007 0, /* properties_provided */
2008 0, /* properties_destroyed */
2009 0, /* todo_flags_start */
2010 0, /* todo_flags_finish */
2011 };
2012
2013 class pass_set_nothrow_function_flags : public rtl_opt_pass
2014 {
2015 public:
2016 pass_set_nothrow_function_flags (gcc::context *ctxt)
2017 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2018 {}
2019
2020 /* opt_pass methods: */
2021 virtual unsigned int execute (function *)
2022 {
2023 return set_nothrow_function_flags ();
2024 }
2025
2026 }; // class pass_set_nothrow_function_flags
2027
2028 } // anon namespace
2029
2030 rtl_opt_pass *
2031 make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2032 {
2033 return new pass_set_nothrow_function_flags (ctxt);
2034 }
2035
2036 \f
2037 /* Various hooks for unwind library. */
2038
2039 /* Expand the EH support builtin functions:
2040 __builtin_eh_pointer and __builtin_eh_filter. */
2041
2042 static eh_region
2043 expand_builtin_eh_common (tree region_nr_t)
2044 {
2045 HOST_WIDE_INT region_nr;
2046 eh_region region;
2047
2048 gcc_assert (tree_fits_shwi_p (region_nr_t));
2049 region_nr = tree_to_shwi (region_nr_t);
2050
2051 region = (*cfun->eh->region_array)[region_nr];
2052
2053 /* ??? We shouldn't have been able to delete a eh region without
2054 deleting all the code that depended on it. */
2055 gcc_assert (region != NULL);
2056
2057 return region;
2058 }
2059
2060 /* Expand to the exc_ptr value from the given eh region. */
2061
2062 rtx
2063 expand_builtin_eh_pointer (tree exp)
2064 {
2065 eh_region region
2066 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2067 if (region->exc_ptr_reg == NULL)
2068 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2069 return region->exc_ptr_reg;
2070 }
2071
2072 /* Expand to the filter value from the given eh region. */
2073
2074 rtx
2075 expand_builtin_eh_filter (tree exp)
2076 {
2077 eh_region region
2078 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2079 if (region->filter_reg == NULL)
2080 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2081 return region->filter_reg;
2082 }
2083
2084 /* Copy the exc_ptr and filter values from one landing pad's registers
2085 to another. This is used to inline the resx statement. */
2086
2087 rtx
2088 expand_builtin_eh_copy_values (tree exp)
2089 {
2090 eh_region dst
2091 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2092 eh_region src
2093 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2094 machine_mode fmode = targetm.eh_return_filter_mode ();
2095
2096 if (dst->exc_ptr_reg == NULL)
2097 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2098 if (src->exc_ptr_reg == NULL)
2099 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2100
2101 if (dst->filter_reg == NULL)
2102 dst->filter_reg = gen_reg_rtx (fmode);
2103 if (src->filter_reg == NULL)
2104 src->filter_reg = gen_reg_rtx (fmode);
2105
2106 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2107 emit_move_insn (dst->filter_reg, src->filter_reg);
2108
2109 return const0_rtx;
2110 }
2111
2112 /* Do any necessary initialization to access arbitrary stack frames.
2113 On the SPARC, this means flushing the register windows. */
2114
2115 void
2116 expand_builtin_unwind_init (void)
2117 {
2118 /* Set this so all the registers get saved in our frame; we need to be
2119 able to copy the saved values for any registers from frames we unwind. */
2120 crtl->saves_all_registers = 1;
2121
2122 #ifdef SETUP_FRAME_ADDRESSES
2123 SETUP_FRAME_ADDRESSES ();
2124 #endif
2125 }
2126
2127 /* Map a non-negative number to an eh return data register number; expands
2128 to -1 if no return data register is associated with the input number.
2129 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2130
2131 rtx
2132 expand_builtin_eh_return_data_regno (tree exp)
2133 {
2134 tree which = CALL_EXPR_ARG (exp, 0);
2135 unsigned HOST_WIDE_INT iwhich;
2136
2137 if (TREE_CODE (which) != INTEGER_CST)
2138 {
2139 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2140 return constm1_rtx;
2141 }
2142
2143 iwhich = tree_to_uhwi (which);
2144 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2145 if (iwhich == INVALID_REGNUM)
2146 return constm1_rtx;
2147
2148 #ifdef DWARF_FRAME_REGNUM
2149 iwhich = DWARF_FRAME_REGNUM (iwhich);
2150 #else
2151 iwhich = DBX_REGISTER_NUMBER (iwhich);
2152 #endif
2153
2154 return GEN_INT (iwhich);
2155 }
2156
2157 /* Given a value extracted from the return address register or stack slot,
2158 return the actual address encoded in that value. */
2159
2160 rtx
2161 expand_builtin_extract_return_addr (tree addr_tree)
2162 {
2163 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2164
2165 if (GET_MODE (addr) != Pmode
2166 && GET_MODE (addr) != VOIDmode)
2167 {
2168 #ifdef POINTERS_EXTEND_UNSIGNED
2169 addr = convert_memory_address (Pmode, addr);
2170 #else
2171 addr = convert_to_mode (Pmode, addr, 0);
2172 #endif
2173 }
2174
2175 /* First mask out any unwanted bits. */
2176 rtx mask = MASK_RETURN_ADDR;
2177 if (mask)
2178 expand_and (Pmode, addr, mask, addr);
2179
2180 /* Then adjust to find the real return address. */
2181 if (RETURN_ADDR_OFFSET)
2182 addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2183
2184 return addr;
2185 }
2186
2187 /* Given an actual address in addr_tree, do any necessary encoding
2188 and return the value to be stored in the return address register or
2189 stack slot so the epilogue will return to that address. */
2190
2191 rtx
2192 expand_builtin_frob_return_addr (tree addr_tree)
2193 {
2194 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2195
2196 addr = convert_memory_address (Pmode, addr);
2197
2198 if (RETURN_ADDR_OFFSET)
2199 {
2200 addr = force_reg (Pmode, addr);
2201 addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2202 }
2203
2204 return addr;
2205 }
2206
2207 /* Set up the epilogue with the magic bits we'll need to return to the
2208 exception handler. */
2209
2210 void
2211 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2212 tree handler_tree)
2213 {
2214 rtx tmp;
2215
2216 #ifdef EH_RETURN_STACKADJ_RTX
2217 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2218 VOIDmode, EXPAND_NORMAL);
2219 tmp = convert_memory_address (Pmode, tmp);
2220 if (!crtl->eh.ehr_stackadj)
2221 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2222 else if (tmp != crtl->eh.ehr_stackadj)
2223 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2224 #endif
2225
2226 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2227 VOIDmode, EXPAND_NORMAL);
2228 tmp = convert_memory_address (Pmode, tmp);
2229 if (!crtl->eh.ehr_handler)
2230 crtl->eh.ehr_handler = copy_to_reg (tmp);
2231 else if (tmp != crtl->eh.ehr_handler)
2232 emit_move_insn (crtl->eh.ehr_handler, tmp);
2233
2234 if (!crtl->eh.ehr_label)
2235 crtl->eh.ehr_label = gen_label_rtx ();
2236 emit_jump (crtl->eh.ehr_label);
2237 }
2238
2239 /* Expand __builtin_eh_return. This exit path from the function loads up
2240 the eh return data registers, adjusts the stack, and branches to a
2241 given PC other than the normal return address. */
2242
2243 void
2244 expand_eh_return (void)
2245 {
2246 rtx_code_label *around_label;
2247
2248 if (! crtl->eh.ehr_label)
2249 return;
2250
2251 crtl->calls_eh_return = 1;
2252
2253 #ifdef EH_RETURN_STACKADJ_RTX
2254 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2255 #endif
2256
2257 around_label = gen_label_rtx ();
2258 emit_jump (around_label);
2259
2260 emit_label (crtl->eh.ehr_label);
2261 clobber_return_register ();
2262
2263 #ifdef EH_RETURN_STACKADJ_RTX
2264 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2265 #endif
2266
2267 #ifdef HAVE_eh_return
2268 if (HAVE_eh_return)
2269 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2270 else
2271 #endif
2272 {
2273 #ifdef EH_RETURN_HANDLER_RTX
2274 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2275 #else
2276 error ("__builtin_eh_return not supported on this target");
2277 #endif
2278 }
2279
2280 emit_label (around_label);
2281 }
2282
2283 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2284 POINTERS_EXTEND_UNSIGNED and return it. */
2285
2286 rtx
2287 expand_builtin_extend_pointer (tree addr_tree)
2288 {
2289 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2290 int extend;
2291
2292 #ifdef POINTERS_EXTEND_UNSIGNED
2293 extend = POINTERS_EXTEND_UNSIGNED;
2294 #else
2295 /* The previous EH code did an unsigned extend by default, so we do this also
2296 for consistency. */
2297 extend = 1;
2298 #endif
2299
2300 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2301 }
2302 \f
2303 static int
2304 add_action_record (action_hash_type *ar_hash, int filter, int next)
2305 {
2306 struct action_record **slot, *new_ar, tmp;
2307
2308 tmp.filter = filter;
2309 tmp.next = next;
2310 slot = ar_hash->find_slot (&tmp, INSERT);
2311
2312 if ((new_ar = *slot) == NULL)
2313 {
2314 new_ar = XNEW (struct action_record);
2315 new_ar->offset = crtl->eh.action_record_data->length () + 1;
2316 new_ar->filter = filter;
2317 new_ar->next = next;
2318 *slot = new_ar;
2319
2320 /* The filter value goes in untouched. The link to the next
2321 record is a "self-relative" byte offset, or zero to indicate
2322 that there is no next record. So convert the absolute 1 based
2323 indices we've been carrying around into a displacement. */
2324
2325 push_sleb128 (&crtl->eh.action_record_data, filter);
2326 if (next)
2327 next -= crtl->eh.action_record_data->length () + 1;
2328 push_sleb128 (&crtl->eh.action_record_data, next);
2329 }
2330
2331 return new_ar->offset;
2332 }
2333
2334 static int
2335 collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2336 {
2337 int next;
2338
2339 /* If we've reached the top of the region chain, then we have
2340 no actions, and require no landing pad. */
2341 if (region == NULL)
2342 return -1;
2343
2344 switch (region->type)
2345 {
2346 case ERT_CLEANUP:
2347 {
2348 eh_region r;
2349 /* A cleanup adds a zero filter to the beginning of the chain, but
2350 there are special cases to look out for. If there are *only*
2351 cleanups along a path, then it compresses to a zero action.
2352 Further, if there are multiple cleanups along a path, we only
2353 need to represent one of them, as that is enough to trigger
2354 entry to the landing pad at runtime. */
2355 next = collect_one_action_chain (ar_hash, region->outer);
2356 if (next <= 0)
2357 return 0;
2358 for (r = region->outer; r ; r = r->outer)
2359 if (r->type == ERT_CLEANUP)
2360 return next;
2361 return add_action_record (ar_hash, 0, next);
2362 }
2363
2364 case ERT_TRY:
2365 {
2366 eh_catch c;
2367
2368 /* Process the associated catch regions in reverse order.
2369 If there's a catch-all handler, then we don't need to
2370 search outer regions. Use a magic -3 value to record
2371 that we haven't done the outer search. */
2372 next = -3;
2373 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2374 {
2375 if (c->type_list == NULL)
2376 {
2377 /* Retrieve the filter from the head of the filter list
2378 where we have stored it (see assign_filter_values). */
2379 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2380 next = add_action_record (ar_hash, filter, 0);
2381 }
2382 else
2383 {
2384 /* Once the outer search is done, trigger an action record for
2385 each filter we have. */
2386 tree flt_node;
2387
2388 if (next == -3)
2389 {
2390 next = collect_one_action_chain (ar_hash, region->outer);
2391
2392 /* If there is no next action, terminate the chain. */
2393 if (next == -1)
2394 next = 0;
2395 /* If all outer actions are cleanups or must_not_throw,
2396 we'll have no action record for it, since we had wanted
2397 to encode these states in the call-site record directly.
2398 Add a cleanup action to the chain to catch these. */
2399 else if (next <= 0)
2400 next = add_action_record (ar_hash, 0, 0);
2401 }
2402
2403 flt_node = c->filter_list;
2404 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2405 {
2406 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2407 next = add_action_record (ar_hash, filter, next);
2408 }
2409 }
2410 }
2411 return next;
2412 }
2413
2414 case ERT_ALLOWED_EXCEPTIONS:
2415 /* An exception specification adds its filter to the
2416 beginning of the chain. */
2417 next = collect_one_action_chain (ar_hash, region->outer);
2418
2419 /* If there is no next action, terminate the chain. */
2420 if (next == -1)
2421 next = 0;
2422 /* If all outer actions are cleanups or must_not_throw,
2423 we'll have no action record for it, since we had wanted
2424 to encode these states in the call-site record directly.
2425 Add a cleanup action to the chain to catch these. */
2426 else if (next <= 0)
2427 next = add_action_record (ar_hash, 0, 0);
2428
2429 return add_action_record (ar_hash, region->u.allowed.filter, next);
2430
2431 case ERT_MUST_NOT_THROW:
2432 /* A must-not-throw region with no inner handlers or cleanups
2433 requires no call-site entry. Note that this differs from
2434 the no handler or cleanup case in that we do require an lsda
2435 to be generated. Return a magic -2 value to record this. */
2436 return -2;
2437 }
2438
2439 gcc_unreachable ();
2440 }
2441
2442 static int
2443 add_call_site (rtx landing_pad, int action, int section)
2444 {
2445 call_site_record record;
2446
2447 record = ggc_alloc<call_site_record_d> ();
2448 record->landing_pad = landing_pad;
2449 record->action = action;
2450
2451 vec_safe_push (crtl->eh.call_site_record_v[section], record);
2452
2453 return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2454 }
2455
2456 static rtx_note *
2457 emit_note_eh_region_end (rtx_insn *insn)
2458 {
2459 rtx_insn *next = NEXT_INSN (insn);
2460
2461 /* Make sure we do not split a call and its corresponding
2462 CALL_ARG_LOCATION note. */
2463 if (next && NOTE_P (next)
2464 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2465 insn = next;
2466
2467 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2468 }
2469
2470 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2471 The new note numbers will not refer to region numbers, but
2472 instead to call site entries. */
2473
2474 static unsigned int
2475 convert_to_eh_region_ranges (void)
2476 {
2477 rtx insn;
2478 rtx_insn *iter;
2479 rtx_note *note;
2480 action_hash_type ar_hash (31);
2481 int last_action = -3;
2482 rtx_insn *last_action_insn = NULL;
2483 rtx last_landing_pad = NULL_RTX;
2484 rtx_insn *first_no_action_insn = NULL;
2485 int call_site = 0;
2486 int cur_sec = 0;
2487 rtx_insn *section_switch_note = NULL;
2488 rtx_insn *first_no_action_insn_before_switch = NULL;
2489 rtx_insn *last_no_action_insn_before_switch = NULL;
2490 int saved_call_site_base = call_site_base;
2491
2492 vec_alloc (crtl->eh.action_record_data, 64);
2493
2494 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2495 if (INSN_P (iter))
2496 {
2497 eh_landing_pad lp;
2498 eh_region region;
2499 bool nothrow;
2500 int this_action;
2501 rtx_code_label *this_landing_pad;
2502
2503 insn = iter;
2504 if (NONJUMP_INSN_P (insn)
2505 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2506 insn = XVECEXP (PATTERN (insn), 0, 0);
2507
2508 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2509 if (nothrow)
2510 continue;
2511 if (region)
2512 this_action = collect_one_action_chain (&ar_hash, region);
2513 else
2514 this_action = -1;
2515
2516 /* Existence of catch handlers, or must-not-throw regions
2517 implies that an lsda is needed (even if empty). */
2518 if (this_action != -1)
2519 crtl->uses_eh_lsda = 1;
2520
2521 /* Delay creation of region notes for no-action regions
2522 until we're sure that an lsda will be required. */
2523 else if (last_action == -3)
2524 {
2525 first_no_action_insn = iter;
2526 last_action = -1;
2527 }
2528
2529 if (this_action >= 0)
2530 this_landing_pad = lp->landing_pad;
2531 else
2532 this_landing_pad = NULL;
2533
2534 /* Differing actions or landing pads implies a change in call-site
2535 info, which implies some EH_REGION note should be emitted. */
2536 if (last_action != this_action
2537 || last_landing_pad != this_landing_pad)
2538 {
2539 /* If there is a queued no-action region in the other section
2540 with hot/cold partitioning, emit it now. */
2541 if (first_no_action_insn_before_switch)
2542 {
2543 gcc_assert (this_action != -1
2544 && last_action == (first_no_action_insn
2545 ? -1 : -3));
2546 call_site = add_call_site (NULL_RTX, 0, 0);
2547 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2548 first_no_action_insn_before_switch);
2549 NOTE_EH_HANDLER (note) = call_site;
2550 note
2551 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2552 NOTE_EH_HANDLER (note) = call_site;
2553 gcc_assert (last_action != -3
2554 || (last_action_insn
2555 == last_no_action_insn_before_switch));
2556 first_no_action_insn_before_switch = NULL;
2557 last_no_action_insn_before_switch = NULL;
2558 call_site_base++;
2559 }
2560 /* If we'd not seen a previous action (-3) or the previous
2561 action was must-not-throw (-2), then we do not need an
2562 end note. */
2563 if (last_action >= -1)
2564 {
2565 /* If we delayed the creation of the begin, do it now. */
2566 if (first_no_action_insn)
2567 {
2568 call_site = add_call_site (NULL_RTX, 0, cur_sec);
2569 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2570 first_no_action_insn);
2571 NOTE_EH_HANDLER (note) = call_site;
2572 first_no_action_insn = NULL;
2573 }
2574
2575 note = emit_note_eh_region_end (last_action_insn);
2576 NOTE_EH_HANDLER (note) = call_site;
2577 }
2578
2579 /* If the new action is must-not-throw, then no region notes
2580 are created. */
2581 if (this_action >= -1)
2582 {
2583 call_site = add_call_site (this_landing_pad,
2584 this_action < 0 ? 0 : this_action,
2585 cur_sec);
2586 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2587 NOTE_EH_HANDLER (note) = call_site;
2588 }
2589
2590 last_action = this_action;
2591 last_landing_pad = this_landing_pad;
2592 }
2593 last_action_insn = iter;
2594 }
2595 else if (NOTE_P (iter)
2596 && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2597 {
2598 gcc_assert (section_switch_note == NULL_RTX);
2599 gcc_assert (flag_reorder_blocks_and_partition);
2600 section_switch_note = iter;
2601 if (first_no_action_insn)
2602 {
2603 first_no_action_insn_before_switch = first_no_action_insn;
2604 last_no_action_insn_before_switch = last_action_insn;
2605 first_no_action_insn = NULL;
2606 gcc_assert (last_action == -1);
2607 last_action = -3;
2608 }
2609 /* Force closing of current EH region before section switch and
2610 opening a new one afterwards. */
2611 else if (last_action != -3)
2612 last_landing_pad = pc_rtx;
2613 if (crtl->eh.call_site_record_v[cur_sec])
2614 call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2615 cur_sec++;
2616 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2617 vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2618 }
2619
2620 if (last_action >= -1 && ! first_no_action_insn)
2621 {
2622 note = emit_note_eh_region_end (last_action_insn);
2623 NOTE_EH_HANDLER (note) = call_site;
2624 }
2625
2626 call_site_base = saved_call_site_base;
2627
2628 return 0;
2629 }
2630
2631 namespace {
2632
2633 const pass_data pass_data_convert_to_eh_region_ranges =
2634 {
2635 RTL_PASS, /* type */
2636 "eh_ranges", /* name */
2637 OPTGROUP_NONE, /* optinfo_flags */
2638 TV_NONE, /* tv_id */
2639 0, /* properties_required */
2640 0, /* properties_provided */
2641 0, /* properties_destroyed */
2642 0, /* todo_flags_start */
2643 0, /* todo_flags_finish */
2644 };
2645
2646 class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2647 {
2648 public:
2649 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2650 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2651 {}
2652
2653 /* opt_pass methods: */
2654 virtual bool gate (function *);
2655 virtual unsigned int execute (function *)
2656 {
2657 return convert_to_eh_region_ranges ();
2658 }
2659
2660 }; // class pass_convert_to_eh_region_ranges
2661
2662 bool
2663 pass_convert_to_eh_region_ranges::gate (function *)
2664 {
2665 /* Nothing to do for SJLJ exceptions or if no regions created. */
2666 if (cfun->eh->region_tree == NULL)
2667 return false;
2668 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2669 return false;
2670 return true;
2671 }
2672
2673 } // anon namespace
2674
2675 rtl_opt_pass *
2676 make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2677 {
2678 return new pass_convert_to_eh_region_ranges (ctxt);
2679 }
2680 \f
2681 static void
2682 push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2683 {
2684 do
2685 {
2686 unsigned char byte = value & 0x7f;
2687 value >>= 7;
2688 if (value)
2689 byte |= 0x80;
2690 vec_safe_push (*data_area, byte);
2691 }
2692 while (value);
2693 }
2694
2695 static void
2696 push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2697 {
2698 unsigned char byte;
2699 int more;
2700
2701 do
2702 {
2703 byte = value & 0x7f;
2704 value >>= 7;
2705 more = ! ((value == 0 && (byte & 0x40) == 0)
2706 || (value == -1 && (byte & 0x40) != 0));
2707 if (more)
2708 byte |= 0x80;
2709 vec_safe_push (*data_area, byte);
2710 }
2711 while (more);
2712 }
2713
2714 \f
2715 #ifndef HAVE_AS_LEB128
2716 static int
2717 dw2_size_of_call_site_table (int section)
2718 {
2719 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2720 int size = n * (4 + 4 + 4);
2721 int i;
2722
2723 for (i = 0; i < n; ++i)
2724 {
2725 struct call_site_record_d *cs =
2726 (*crtl->eh.call_site_record_v[section])[i];
2727 size += size_of_uleb128 (cs->action);
2728 }
2729
2730 return size;
2731 }
2732
2733 static int
2734 sjlj_size_of_call_site_table (void)
2735 {
2736 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2737 int size = 0;
2738 int i;
2739
2740 for (i = 0; i < n; ++i)
2741 {
2742 struct call_site_record_d *cs =
2743 (*crtl->eh.call_site_record_v[0])[i];
2744 size += size_of_uleb128 (INTVAL (cs->landing_pad));
2745 size += size_of_uleb128 (cs->action);
2746 }
2747
2748 return size;
2749 }
2750 #endif
2751
2752 static void
2753 dw2_output_call_site_table (int cs_format, int section)
2754 {
2755 int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2756 int i;
2757 const char *begin;
2758
2759 if (section == 0)
2760 begin = current_function_func_begin_label;
2761 else if (first_function_block_is_cold)
2762 begin = crtl->subsections.hot_section_label;
2763 else
2764 begin = crtl->subsections.cold_section_label;
2765
2766 for (i = 0; i < n; ++i)
2767 {
2768 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2769 char reg_start_lab[32];
2770 char reg_end_lab[32];
2771 char landing_pad_lab[32];
2772
2773 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2774 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2775
2776 if (cs->landing_pad)
2777 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2778 CODE_LABEL_NUMBER (cs->landing_pad));
2779
2780 /* ??? Perhaps use insn length scaling if the assembler supports
2781 generic arithmetic. */
2782 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2783 data4 if the function is small enough. */
2784 if (cs_format == DW_EH_PE_uleb128)
2785 {
2786 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2787 "region %d start", i);
2788 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2789 "length");
2790 if (cs->landing_pad)
2791 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2792 "landing pad");
2793 else
2794 dw2_asm_output_data_uleb128 (0, "landing pad");
2795 }
2796 else
2797 {
2798 dw2_asm_output_delta (4, reg_start_lab, begin,
2799 "region %d start", i);
2800 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2801 if (cs->landing_pad)
2802 dw2_asm_output_delta (4, landing_pad_lab, begin,
2803 "landing pad");
2804 else
2805 dw2_asm_output_data (4, 0, "landing pad");
2806 }
2807 dw2_asm_output_data_uleb128 (cs->action, "action");
2808 }
2809
2810 call_site_base += n;
2811 }
2812
2813 static void
2814 sjlj_output_call_site_table (void)
2815 {
2816 int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2817 int i;
2818
2819 for (i = 0; i < n; ++i)
2820 {
2821 struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2822
2823 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2824 "region %d landing pad", i);
2825 dw2_asm_output_data_uleb128 (cs->action, "action");
2826 }
2827
2828 call_site_base += n;
2829 }
2830
2831 /* Switch to the section that should be used for exception tables. */
2832
2833 static void
2834 switch_to_exception_section (const char * ARG_UNUSED (fnname))
2835 {
2836 section *s;
2837
2838 if (exception_section)
2839 s = exception_section;
2840 else
2841 {
2842 /* Compute the section and cache it into exception_section,
2843 unless it depends on the function name. */
2844 if (targetm_common.have_named_sections)
2845 {
2846 int flags;
2847
2848 if (EH_TABLES_CAN_BE_READ_ONLY)
2849 {
2850 int tt_format =
2851 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2852 flags = ((! flag_pic
2853 || ((tt_format & 0x70) != DW_EH_PE_absptr
2854 && (tt_format & 0x70) != DW_EH_PE_aligned))
2855 ? 0 : SECTION_WRITE);
2856 }
2857 else
2858 flags = SECTION_WRITE;
2859
2860 #ifdef HAVE_LD_EH_GC_SECTIONS
2861 if (flag_function_sections
2862 || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2863 {
2864 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2865 /* The EH table must match the code section, so only mark
2866 it linkonce if we have COMDAT groups to tie them together. */
2867 if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2868 flags |= SECTION_LINKONCE;
2869 sprintf (section_name, ".gcc_except_table.%s", fnname);
2870 s = get_section (section_name, flags, current_function_decl);
2871 free (section_name);
2872 }
2873 else
2874 #endif
2875 exception_section
2876 = s = get_section (".gcc_except_table", flags, NULL);
2877 }
2878 else
2879 exception_section
2880 = s = flag_pic ? data_section : readonly_data_section;
2881 }
2882
2883 switch_to_section (s);
2884 }
2885
2886
2887 /* Output a reference from an exception table to the type_info object TYPE.
2888 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2889 the value. */
2890
2891 static void
2892 output_ttype (tree type, int tt_format, int tt_format_size)
2893 {
2894 rtx value;
2895 bool is_public = true;
2896
2897 if (type == NULL_TREE)
2898 value = const0_rtx;
2899 else
2900 {
2901 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2902 runtime types so TYPE should already be a runtime type
2903 reference. When pass_ipa_free_lang data is made a default
2904 pass, we can then remove the call to lookup_type_for_runtime
2905 below. */
2906 if (TYPE_P (type))
2907 type = lookup_type_for_runtime (type);
2908
2909 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2910
2911 /* Let cgraph know that the rtti decl is used. Not all of the
2912 paths below go through assemble_integer, which would take
2913 care of this for us. */
2914 STRIP_NOPS (type);
2915 if (TREE_CODE (type) == ADDR_EXPR)
2916 {
2917 type = TREE_OPERAND (type, 0);
2918 if (TREE_CODE (type) == VAR_DECL)
2919 is_public = TREE_PUBLIC (type);
2920 }
2921 else
2922 gcc_assert (TREE_CODE (type) == INTEGER_CST);
2923 }
2924
2925 /* Allow the target to override the type table entry format. */
2926 if (targetm.asm_out.ttype (value))
2927 return;
2928
2929 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2930 assemble_integer (value, tt_format_size,
2931 tt_format_size * BITS_PER_UNIT, 1);
2932 else
2933 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2934 }
2935
2936 static void
2937 output_one_function_exception_table (int section)
2938 {
2939 int tt_format, cs_format, lp_format, i;
2940 #ifdef HAVE_AS_LEB128
2941 char ttype_label[32];
2942 char cs_after_size_label[32];
2943 char cs_end_label[32];
2944 #else
2945 int call_site_len;
2946 #endif
2947 int have_tt_data;
2948 int tt_format_size = 0;
2949
2950 have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2951 || (targetm.arm_eabi_unwinder
2952 ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2953 : vec_safe_length (cfun->eh->ehspec_data.other)));
2954
2955 /* Indicate the format of the @TType entries. */
2956 if (! have_tt_data)
2957 tt_format = DW_EH_PE_omit;
2958 else
2959 {
2960 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2961 #ifdef HAVE_AS_LEB128
2962 ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2963 section ? "LLSDATTC" : "LLSDATT",
2964 current_function_funcdef_no);
2965 #endif
2966 tt_format_size = size_of_encoded_value (tt_format);
2967
2968 assemble_align (tt_format_size * BITS_PER_UNIT);
2969 }
2970
2971 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2972 current_function_funcdef_no);
2973
2974 /* The LSDA header. */
2975
2976 /* Indicate the format of the landing pad start pointer. An omitted
2977 field implies @LPStart == @Start. */
2978 /* Currently we always put @LPStart == @Start. This field would
2979 be most useful in moving the landing pads completely out of
2980 line to another section, but it could also be used to minimize
2981 the size of uleb128 landing pad offsets. */
2982 lp_format = DW_EH_PE_omit;
2983 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
2984 eh_data_format_name (lp_format));
2985
2986 /* @LPStart pointer would go here. */
2987
2988 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
2989 eh_data_format_name (tt_format));
2990
2991 #ifndef HAVE_AS_LEB128
2992 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2993 call_site_len = sjlj_size_of_call_site_table ();
2994 else
2995 call_site_len = dw2_size_of_call_site_table (section);
2996 #endif
2997
2998 /* A pc-relative 4-byte displacement to the @TType data. */
2999 if (have_tt_data)
3000 {
3001 #ifdef HAVE_AS_LEB128
3002 char ttype_after_disp_label[32];
3003 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3004 section ? "LLSDATTDC" : "LLSDATTD",
3005 current_function_funcdef_no);
3006 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3007 "@TType base offset");
3008 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3009 #else
3010 /* Ug. Alignment queers things. */
3011 unsigned int before_disp, after_disp, last_disp, disp;
3012
3013 before_disp = 1 + 1;
3014 after_disp = (1 + size_of_uleb128 (call_site_len)
3015 + call_site_len
3016 + vec_safe_length (crtl->eh.action_record_data)
3017 + (vec_safe_length (cfun->eh->ttype_data)
3018 * tt_format_size));
3019
3020 disp = after_disp;
3021 do
3022 {
3023 unsigned int disp_size, pad;
3024
3025 last_disp = disp;
3026 disp_size = size_of_uleb128 (disp);
3027 pad = before_disp + disp_size + after_disp;
3028 if (pad % tt_format_size)
3029 pad = tt_format_size - (pad % tt_format_size);
3030 else
3031 pad = 0;
3032 disp = after_disp + pad;
3033 }
3034 while (disp != last_disp);
3035
3036 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3037 #endif
3038 }
3039
3040 /* Indicate the format of the call-site offsets. */
3041 #ifdef HAVE_AS_LEB128
3042 cs_format = DW_EH_PE_uleb128;
3043 #else
3044 cs_format = DW_EH_PE_udata4;
3045 #endif
3046 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3047 eh_data_format_name (cs_format));
3048
3049 #ifdef HAVE_AS_LEB128
3050 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3051 section ? "LLSDACSBC" : "LLSDACSB",
3052 current_function_funcdef_no);
3053 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3054 section ? "LLSDACSEC" : "LLSDACSE",
3055 current_function_funcdef_no);
3056 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3057 "Call-site table length");
3058 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3059 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3060 sjlj_output_call_site_table ();
3061 else
3062 dw2_output_call_site_table (cs_format, section);
3063 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3064 #else
3065 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3066 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3067 sjlj_output_call_site_table ();
3068 else
3069 dw2_output_call_site_table (cs_format, section);
3070 #endif
3071
3072 /* ??? Decode and interpret the data for flag_debug_asm. */
3073 {
3074 uchar uc;
3075 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3076 dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3077 }
3078
3079 if (have_tt_data)
3080 assemble_align (tt_format_size * BITS_PER_UNIT);
3081
3082 i = vec_safe_length (cfun->eh->ttype_data);
3083 while (i-- > 0)
3084 {
3085 tree type = (*cfun->eh->ttype_data)[i];
3086 output_ttype (type, tt_format, tt_format_size);
3087 }
3088
3089 #ifdef HAVE_AS_LEB128
3090 if (have_tt_data)
3091 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3092 #endif
3093
3094 /* ??? Decode and interpret the data for flag_debug_asm. */
3095 if (targetm.arm_eabi_unwinder)
3096 {
3097 tree type;
3098 for (i = 0;
3099 vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3100 output_ttype (type, tt_format, tt_format_size);
3101 }
3102 else
3103 {
3104 uchar uc;
3105 for (i = 0;
3106 vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3107 dw2_asm_output_data (1, uc,
3108 i ? NULL : "Exception specification table");
3109 }
3110 }
3111
3112 void
3113 output_function_exception_table (const char *fnname)
3114 {
3115 rtx personality = get_personality_function (current_function_decl);
3116
3117 /* Not all functions need anything. */
3118 if (! crtl->uses_eh_lsda)
3119 return;
3120
3121 if (personality)
3122 {
3123 assemble_external_libcall (personality);
3124
3125 if (targetm.asm_out.emit_except_personality)
3126 targetm.asm_out.emit_except_personality (personality);
3127 }
3128
3129 switch_to_exception_section (fnname);
3130
3131 /* If the target wants a label to begin the table, emit it here. */
3132 targetm.asm_out.emit_except_table_label (asm_out_file);
3133
3134 output_one_function_exception_table (0);
3135 if (crtl->eh.call_site_record_v[1])
3136 output_one_function_exception_table (1);
3137
3138 switch_to_section (current_function_section ());
3139 }
3140
3141 void
3142 set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
3143 {
3144 fun->eh->throw_stmt_table = table;
3145 }
3146
3147 hash_map<gimple, int> *
3148 get_eh_throw_stmt_table (struct function *fun)
3149 {
3150 return fun->eh->throw_stmt_table;
3151 }
3152 \f
3153 /* Determine if the function needs an EH personality function. */
3154
3155 enum eh_personality_kind
3156 function_needs_eh_personality (struct function *fn)
3157 {
3158 enum eh_personality_kind kind = eh_personality_none;
3159 eh_region i;
3160
3161 FOR_ALL_EH_REGION_FN (i, fn)
3162 {
3163 switch (i->type)
3164 {
3165 case ERT_CLEANUP:
3166 /* Can do with any personality including the generic C one. */
3167 kind = eh_personality_any;
3168 break;
3169
3170 case ERT_TRY:
3171 case ERT_ALLOWED_EXCEPTIONS:
3172 /* Always needs a EH personality function. The generic C
3173 personality doesn't handle these even for empty type lists. */
3174 return eh_personality_lang;
3175
3176 case ERT_MUST_NOT_THROW:
3177 /* Always needs a EH personality function. The language may specify
3178 what abort routine that must be used, e.g. std::terminate. */
3179 return eh_personality_lang;
3180 }
3181 }
3182
3183 return kind;
3184 }
3185 \f
3186 /* Dump EH information to OUT. */
3187
3188 void
3189 dump_eh_tree (FILE * out, struct function *fun)
3190 {
3191 eh_region i;
3192 int depth = 0;
3193 static const char *const type_name[] = {
3194 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3195 };
3196
3197 i = fun->eh->region_tree;
3198 if (!i)
3199 return;
3200
3201 fprintf (out, "Eh tree:\n");
3202 while (1)
3203 {
3204 fprintf (out, " %*s %i %s", depth * 2, "",
3205 i->index, type_name[(int) i->type]);
3206
3207 if (i->landing_pads)
3208 {
3209 eh_landing_pad lp;
3210
3211 fprintf (out, " land:");
3212 if (current_ir_type () == IR_GIMPLE)
3213 {
3214 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3215 {
3216 fprintf (out, "{%i,", lp->index);
3217 print_generic_expr (out, lp->post_landing_pad, 0);
3218 fputc ('}', out);
3219 if (lp->next_lp)
3220 fputc (',', out);
3221 }
3222 }
3223 else
3224 {
3225 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3226 {
3227 fprintf (out, "{%i,", lp->index);
3228 if (lp->landing_pad)
3229 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3230 NOTE_P (lp->landing_pad) ? "(del)" : "");
3231 else
3232 fprintf (out, "(nil),");
3233 if (lp->post_landing_pad)
3234 {
3235 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3236 fprintf (out, "%i%s}", INSN_UID (lab),
3237 NOTE_P (lab) ? "(del)" : "");
3238 }
3239 else
3240 fprintf (out, "(nil)}");
3241 if (lp->next_lp)
3242 fputc (',', out);
3243 }
3244 }
3245 }
3246
3247 switch (i->type)
3248 {
3249 case ERT_CLEANUP:
3250 case ERT_MUST_NOT_THROW:
3251 break;
3252
3253 case ERT_TRY:
3254 {
3255 eh_catch c;
3256 fprintf (out, " catch:");
3257 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3258 {
3259 fputc ('{', out);
3260 if (c->label)
3261 {
3262 fprintf (out, "lab:");
3263 print_generic_expr (out, c->label, 0);
3264 fputc (';', out);
3265 }
3266 print_generic_expr (out, c->type_list, 0);
3267 fputc ('}', out);
3268 if (c->next_catch)
3269 fputc (',', out);
3270 }
3271 }
3272 break;
3273
3274 case ERT_ALLOWED_EXCEPTIONS:
3275 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3276 print_generic_expr (out, i->u.allowed.type_list, 0);
3277 break;
3278 }
3279 fputc ('\n', out);
3280
3281 /* If there are sub-regions, process them. */
3282 if (i->inner)
3283 i = i->inner, depth++;
3284 /* If there are peers, process them. */
3285 else if (i->next_peer)
3286 i = i->next_peer;
3287 /* Otherwise, step back up the tree to the next peer. */
3288 else
3289 {
3290 do
3291 {
3292 i = i->outer;
3293 depth--;
3294 if (i == NULL)
3295 return;
3296 }
3297 while (i->next_peer == NULL);
3298 i = i->next_peer;
3299 }
3300 }
3301 }
3302
3303 /* Dump the EH tree for FN on stderr. */
3304
3305 DEBUG_FUNCTION void
3306 debug_eh_tree (struct function *fn)
3307 {
3308 dump_eh_tree (stderr, fn);
3309 }
3310
3311 /* Verify invariants on EH datastructures. */
3312
3313 DEBUG_FUNCTION void
3314 verify_eh_tree (struct function *fun)
3315 {
3316 eh_region r, outer;
3317 int nvisited_lp, nvisited_r;
3318 int count_lp, count_r, depth, i;
3319 eh_landing_pad lp;
3320 bool err = false;
3321
3322 if (!fun->eh->region_tree)
3323 return;
3324
3325 count_r = 0;
3326 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3327 if (r)
3328 {
3329 if (r->index == i)
3330 count_r++;
3331 else
3332 {
3333 error ("region_array is corrupted for region %i", r->index);
3334 err = true;
3335 }
3336 }
3337
3338 count_lp = 0;
3339 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3340 if (lp)
3341 {
3342 if (lp->index == i)
3343 count_lp++;
3344 else
3345 {
3346 error ("lp_array is corrupted for lp %i", lp->index);
3347 err = true;
3348 }
3349 }
3350
3351 depth = nvisited_lp = nvisited_r = 0;
3352 outer = NULL;
3353 r = fun->eh->region_tree;
3354 while (1)
3355 {
3356 if ((*fun->eh->region_array)[r->index] != r)
3357 {
3358 error ("region_array is corrupted for region %i", r->index);
3359 err = true;
3360 }
3361 if (r->outer != outer)
3362 {
3363 error ("outer block of region %i is wrong", r->index);
3364 err = true;
3365 }
3366 if (depth < 0)
3367 {
3368 error ("negative nesting depth of region %i", r->index);
3369 err = true;
3370 }
3371 nvisited_r++;
3372
3373 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3374 {
3375 if ((*fun->eh->lp_array)[lp->index] != lp)
3376 {
3377 error ("lp_array is corrupted for lp %i", lp->index);
3378 err = true;
3379 }
3380 if (lp->region != r)
3381 {
3382 error ("region of lp %i is wrong", lp->index);
3383 err = true;
3384 }
3385 nvisited_lp++;
3386 }
3387
3388 if (r->inner)
3389 outer = r, r = r->inner, depth++;
3390 else if (r->next_peer)
3391 r = r->next_peer;
3392 else
3393 {
3394 do
3395 {
3396 r = r->outer;
3397 if (r == NULL)
3398 goto region_done;
3399 depth--;
3400 outer = r->outer;
3401 }
3402 while (r->next_peer == NULL);
3403 r = r->next_peer;
3404 }
3405 }
3406 region_done:
3407 if (depth != 0)
3408 {
3409 error ("tree list ends on depth %i", depth);
3410 err = true;
3411 }
3412 if (count_r != nvisited_r)
3413 {
3414 error ("region_array does not match region_tree");
3415 err = true;
3416 }
3417 if (count_lp != nvisited_lp)
3418 {
3419 error ("lp_array does not match region_tree");
3420 err = true;
3421 }
3422
3423 if (err)
3424 {
3425 dump_eh_tree (stderr, fun);
3426 internal_error ("verify_eh_tree failed");
3427 }
3428 }
3429 \f
3430 #include "gt-except.h"