]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
* Make-lang.in (treelang-warn): Set to $(STRICT_WARN).
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
97bb6c17 1/* Implements exception handling.
1ed5443b 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
48b3d385 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
97ecdf3e 4 Contributed by Mike Stump <mrs@cygnus.com>.
5
f12b58b3 6This file is part of GCC.
97ecdf3e 7
f12b58b3 8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
97ecdf3e 12
f12b58b3 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
97ecdf3e 17
18You should have received a copy of the GNU General Public License
f12b58b3 19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
97ecdf3e 22
23
97bb6c17 24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
ad87de1e 27 be transferred to any arbitrary code associated with a function call
97bb6c17 28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
df4b504c 47 [ Add updated documentation on how to use this. ] */
97ecdf3e 48
49
50#include "config.h"
405711de 51#include "system.h"
805e22b2 52#include "coretypes.h"
53#include "tm.h"
97ecdf3e 54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
97ecdf3e 57#include "function.h"
97ecdf3e 58#include "expr.h"
d8fc4d0b 59#include "libfuncs.h"
97ecdf3e 60#include "insn-config.h"
df4b504c 61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
97ecdf3e 65#include "output.h"
df4b504c 66#include "dwarf2asm.h"
67#include "dwarf2out.h"
ad5818ae 68#include "dwarf2.h"
12874aaf 69#include "toplev.h"
df4b504c 70#include "hashtab.h"
723766b6 71#include "intl.h"
a7b0c170 72#include "ggc.h"
075136a2 73#include "tm_p.h"
02c8b767 74#include "target.h"
a1f71e15 75#include "langhooks.h"
3b919cde 76#include "cgraph.h"
df4b504c 77
78/* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
df4b504c 80#ifndef EH_RETURN_DATA_REGNO
81#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
98ad18c5 82#endif
83
8591d03a 84
df4b504c 85/* Protect cleanup actions with must-not-throw regions, with a call
86 to the given failure handler. */
35cb5232 87tree (*lang_protect_cleanup_actions) (void);
8591d03a 88
df4b504c 89/* Return true if type A catches type B. */
35cb5232 90int (*lang_eh_type_covers) (tree a, tree b);
8591d03a 91
df4b504c 92/* Map a type to a runtime object to match type. */
35cb5232 93tree (*lang_eh_runtime_type) (tree);
97ecdf3e 94
8f8dcce4 95/* A hash table of label to region number. */
96
1f3233d1 97struct ehl_map_entry GTY(())
8f8dcce4 98{
99 rtx label;
100 struct eh_region *region;
101};
102
252cc619 103static GTY(()) int call_site_base;
1f3233d1 104static GTY ((param_is (union tree_node)))
105 htab_t type_to_runtime_map;
df4b504c 106
107/* Describe the SjLj_Function_Context structure. */
1f3233d1 108static GTY(()) tree sjlj_fc_type_node;
df4b504c 109static int sjlj_fc_call_site_ofs;
110static int sjlj_fc_data_ofs;
111static int sjlj_fc_personality_ofs;
112static int sjlj_fc_lsda_ofs;
113static int sjlj_fc_jbuf_ofs;
114\f
115/* Describes one exception region. */
1f3233d1 116struct eh_region GTY(())
df4b504c 117{
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
ad87de1e 120
df4b504c 121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
ad87de1e 124
df4b504c 125 /* An identifier for this region. */
126 int region_number;
ec37ccb4 127
8f8dcce4 128 /* When a region is deleted, its parents inherit the REG_EH_REGION
129 numbers already assigned. */
130 bitmap aka;
131
df4b504c 132 /* Each region does exactly one thing. */
133 enum eh_region_type
4ee9c684 134 {
5461f9e4 135 ERT_UNKNOWN = 0,
136 ERT_CLEANUP,
df4b504c 137 ERT_TRY,
138 ERT_CATCH,
139 ERT_ALLOWED_EXCEPTIONS,
140 ERT_MUST_NOT_THROW,
141 ERT_THROW,
142 ERT_FIXUP
143 } type;
144
4a82352a 145 /* Holds the action to perform based on the preceding type. */
1f3233d1 146 union eh_region_u {
df4b504c 147 /* A list of catch blocks, a surrounding try block,
148 and the label for continuing after a catch. */
1f3233d1 149 struct eh_region_u_try {
df4b504c 150 struct eh_region *catch;
151 struct eh_region *last_catch;
152 struct eh_region *prev_try;
153 rtx continue_label;
1f3233d1 154 } GTY ((tag ("ERT_TRY"))) try;
df4b504c 155
5c15c916 156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
1f3233d1 158 struct eh_region_u_catch {
df4b504c 159 struct eh_region *next_catch;
160 struct eh_region *prev_catch;
5c15c916 161 tree type_list;
162 tree filter_list;
1f3233d1 163 } GTY ((tag ("ERT_CATCH"))) catch;
df4b504c 164
165 /* A tree_list of allowed types. */
1f3233d1 166 struct eh_region_u_allowed {
df4b504c 167 tree type_list;
168 int filter;
1f3233d1 169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
df4b504c 170
1ed5443b 171 /* The type given by a call to "throw foo();", or discovered
df4b504c 172 for a throw. */
1f3233d1 173 struct eh_region_u_throw {
df4b504c 174 tree type;
1f3233d1 175 } GTY ((tag ("ERT_THROW"))) throw;
df4b504c 176
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
1f3233d1 179 struct eh_region_u_cleanup {
df4b504c 180 tree exp;
0d3f1801 181 struct eh_region *prev_try;
1f3233d1 182 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
df4b504c 183
184 /* The real region (by expression and by pointer) that fixup code
185 should live in. */
1f3233d1 186 struct eh_region_u_fixup {
df4b504c 187 tree cleanup_exp;
188 struct eh_region *real_region;
618fa93e 189 bool resolved;
1f3233d1 190 } GTY ((tag ("ERT_FIXUP"))) fixup;
191 } GTY ((desc ("%0.type"))) u;
df4b504c 192
1639c6db 193 /* Entry point for this region's handler before landing pads are built. */
194 rtx label;
4ee9c684 195 tree tree_label;
df4b504c 196
1639c6db 197 /* Entry point for this region's handler from the runtime eh library. */
df4b504c 198 rtx landing_pad;
199
1639c6db 200 /* Entry point for this region's handler from an inner region. */
df4b504c 201 rtx post_landing_pad;
1639c6db 202
203 /* The RESX insn for handing off control to the next outermost handler,
204 if appropriate. */
205 rtx resume;
95cedffb 206
207 /* True if something in this region may throw. */
208 unsigned may_contain_throw : 1;
df4b504c 209};
ec37ccb4 210
1f3233d1 211struct call_site_record GTY(())
212{
213 rtx landing_pad;
214 int action;
215};
216
df4b504c 217/* Used to save exception status for each function. */
1f3233d1 218struct eh_status GTY(())
df4b504c 219{
220 /* The tree of all regions for this function. */
221 struct eh_region *region_tree;
d63ea2f2 222
df4b504c 223 /* The same information as an indexable array. */
1f3233d1 224 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
d63ea2f2 225
df4b504c 226 /* The most recently open region. */
227 struct eh_region *cur_region;
d63ea2f2 228
df4b504c 229 /* This is the region for which we are processing catch blocks. */
230 struct eh_region *try_region;
ec37ccb4 231
df4b504c 232 rtx filter;
233 rtx exc_ptr;
97ecdf3e 234
df4b504c 235 int built_landing_pads;
236 int last_region_number;
d63ea2f2 237
df4b504c 238 varray_type ttype_data;
239 varray_type ehspec_data;
240 varray_type action_record_data;
ed74c60e 241
1f3233d1 242 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
243
35cb5232 244 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
1f3233d1 245 call_site_data;
df4b504c 246 int call_site_data_used;
247 int call_site_data_size;
248
249 rtx ehr_stackadj;
250 rtx ehr_handler;
251 rtx ehr_label;
252
253 rtx sjlj_fc;
254 rtx sjlj_exit_after;
255};
d63ea2f2 256
df4b504c 257\f
35cb5232 258static int t2r_eq (const void *, const void *);
259static hashval_t t2r_hash (const void *);
260static void add_type_for_runtime (tree);
261static tree lookup_type_for_runtime (tree);
262
35cb5232 263static void resolve_fixup_regions (void);
264static void remove_fixup_regions (void);
265static void remove_unreachable_regions (rtx);
266static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
267
268static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
269 struct inline_remap *);
270static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
271static int ttypes_filter_eq (const void *, const void *);
272static hashval_t ttypes_filter_hash (const void *);
273static int ehspec_filter_eq (const void *, const void *);
274static hashval_t ehspec_filter_hash (const void *);
275static int add_ttypes_entry (htab_t, tree);
276static int add_ehspec_entry (htab_t, htab_t, tree);
277static void assign_filter_values (void);
278static void build_post_landing_pads (void);
279static void connect_post_landing_pads (void);
280static void dw2_build_landing_pads (void);
df4b504c 281
282struct sjlj_lp_info;
35cb5232 283static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
284static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
285static void sjlj_mark_call_sites (struct sjlj_lp_info *);
286static void sjlj_emit_function_enter (rtx);
287static void sjlj_emit_function_exit (void);
288static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
289static void sjlj_build_landing_pads (void);
290
291static hashval_t ehl_hash (const void *);
292static int ehl_eq (const void *, const void *);
293static void add_ehl_entry (rtx, struct eh_region *);
294static void remove_exception_handler_label (rtx);
295static void remove_eh_handler (struct eh_region *);
296static int for_each_eh_label_1 (void **, void *);
df4b504c 297
df4b504c 298/* The return value of reachable_next_level. */
299enum reachable_code
300{
301 /* The given exception is not processed by the given region. */
302 RNL_NOT_CAUGHT,
303 /* The given exception may need processing by the given region. */
304 RNL_MAYBE_CAUGHT,
305 /* The given exception is completely processed by the given region. */
306 RNL_CAUGHT,
307 /* The given exception is completely processed by the runtime. */
308 RNL_BLOCKED
309};
d63ea2f2 310
4ee9c684 311struct reachable_info;
35cb5232 312static enum reachable_code reachable_next_level (struct eh_region *, tree,
313 struct reachable_info *);
314
315static int action_record_eq (const void *, const void *);
316static hashval_t action_record_hash (const void *);
317static int add_action_record (htab_t, int, int);
318static int collect_one_action_chain (htab_t, struct eh_region *);
319static int add_call_site (rtx, int);
320
321static void push_uleb128 (varray_type *, unsigned int);
322static void push_sleb128 (varray_type *, int);
df4b504c 323#ifndef HAVE_AS_LEB128
35cb5232 324static int dw2_size_of_call_site_table (void);
325static int sjlj_size_of_call_site_table (void);
df4b504c 326#endif
35cb5232 327static void dw2_output_call_site_table (void);
328static void sjlj_output_call_site_table (void);
d63ea2f2 329
df4b504c 330\f
331/* Routine to see if exception handling is turned on.
6ef828f9 332 DO_WARN is nonzero if we want to inform the user that exception
1ed5443b 333 handling is turned off.
97ecdf3e 334
df4b504c 335 This is used to ensure that -fexceptions has been specified if the
336 compiler tries to use any exception-specific functions. */
97ecdf3e 337
df4b504c 338int
35cb5232 339doing_eh (int do_warn)
df4b504c 340{
341 if (! flag_exceptions)
342 {
343 static int warned = 0;
344 if (! warned && do_warn)
345 {
346 error ("exception handling disabled, use -fexceptions to enable");
347 warned = 1;
348 }
349 return 0;
350 }
351 return 1;
97ecdf3e 352}
353
df4b504c 354\f
355void
35cb5232 356init_eh (void)
97ecdf3e 357{
df4b504c 358 if (! flag_exceptions)
359 return;
97ecdf3e 360
1f3233d1 361 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
97ecdf3e 362
df4b504c 363 /* Create the SjLj_Function_Context structure. This should match
364 the definition in unwind-sjlj.c. */
365 if (USING_SJLJ_EXCEPTIONS)
366 {
367 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
97ecdf3e 368
dc24ddbd 369 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
011a7f23 370
df4b504c 371 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
372 build_pointer_type (sjlj_fc_type_node));
373 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
011a7f23 374
df4b504c 375 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
376 integer_type_node);
377 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
97ecdf3e 378
7016c612 379 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
dc24ddbd 380 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
771d21fa 381 tmp);
df4b504c 382 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
383 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
011a7f23 384
df4b504c 385 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
386 ptr_type_node);
387 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
97ecdf3e 388
df4b504c 389 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
390 ptr_type_node);
391 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
ed74c60e 392
df4b504c 393#ifdef DONT_USE_BUILTIN_SETJMP
394#ifdef JMP_BUF_SIZE
7016c612 395 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
df4b504c 396#else
397 /* Should be large enough for most systems, if it is not,
398 JMP_BUF_SIZE should be defined with the proper value. It will
399 also tend to be larger than necessary for most systems, a more
400 optimal port will define JMP_BUF_SIZE. */
7016c612 401 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
df4b504c 402#endif
403#else
5b8192c3 404 /* builtin_setjmp takes a pointer to 5 words. */
7016c612 405 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
df4b504c 406#endif
407 tmp = build_index_type (tmp);
408 tmp = build_array_type (ptr_type_node, tmp);
409 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
410#ifdef DONT_USE_BUILTIN_SETJMP
411 /* We don't know what the alignment requirements of the
412 runtime's jmp_buf has. Overestimate. */
413 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
414 DECL_USER_ALIGN (f_jbuf) = 1;
415#endif
416 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
417
418 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
419 TREE_CHAIN (f_prev) = f_cs;
420 TREE_CHAIN (f_cs) = f_data;
421 TREE_CHAIN (f_data) = f_per;
422 TREE_CHAIN (f_per) = f_lsda;
423 TREE_CHAIN (f_lsda) = f_jbuf;
424
425 layout_type (sjlj_fc_type_node);
426
427 /* Cache the interesting field offsets so that we have
428 easy access from rtl. */
429 sjlj_fc_call_site_ofs
430 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
431 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
432 sjlj_fc_data_ofs
433 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
434 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
435 sjlj_fc_personality_ofs
436 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
437 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
438 sjlj_fc_lsda_ofs
439 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
440 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
441 sjlj_fc_jbuf_ofs
442 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
443 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
444 }
97ecdf3e 445}
446
df4b504c 447void
35cb5232 448init_eh_for_function (void)
97ecdf3e 449{
f0af5a88 450 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
8f8dcce4 451}
df4b504c 452\f
4ee9c684 453/* Routines to generate the exception tree somewhat directly.
454 These are used from tree-eh.c when processing exception related
455 nodes during tree optimization. */
456
457static struct eh_region *
458gen_eh_region (enum eh_region_type type, struct eh_region *outer)
459{
460 struct eh_region *new;
461
462#ifdef ENABLE_CHECKING
611234b4 463 gcc_assert (doing_eh (0));
4ee9c684 464#endif
465
466 /* Insert a new blank region as a leaf in the tree. */
467 new = ggc_alloc_cleared (sizeof (*new));
468 new->type = type;
469 new->outer = outer;
470 if (outer)
471 {
472 new->next_peer = outer->inner;
473 outer->inner = new;
474 }
475 else
476 {
477 new->next_peer = cfun->eh->region_tree;
478 cfun->eh->region_tree = new;
479 }
480
481 new->region_number = ++cfun->eh->last_region_number;
482
483 return new;
484}
485
486struct eh_region *
487gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
488{
489 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
490 cleanup->u.cleanup.prev_try = prev_try;
491 return cleanup;
492}
493
494struct eh_region *
495gen_eh_region_try (struct eh_region *outer)
496{
497 return gen_eh_region (ERT_TRY, outer);
498}
499
500struct eh_region *
501gen_eh_region_catch (struct eh_region *t, tree type_or_list)
502{
503 struct eh_region *c, *l;
504 tree type_list, type_node;
505
506 /* Ensure to always end up with a type list to normalize further
507 processing, then register each type against the runtime types map. */
508 type_list = type_or_list;
509 if (type_or_list)
510 {
511 if (TREE_CODE (type_or_list) != TREE_LIST)
512 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
513
514 type_node = type_list;
515 for (; type_node; type_node = TREE_CHAIN (type_node))
516 add_type_for_runtime (TREE_VALUE (type_node));
517 }
518
519 c = gen_eh_region (ERT_CATCH, t->outer);
520 c->u.catch.type_list = type_list;
521 l = t->u.try.last_catch;
522 c->u.catch.prev_catch = l;
523 if (l)
524 l->u.catch.next_catch = c;
525 else
526 t->u.try.catch = c;
527 t->u.try.last_catch = c;
528
529 return c;
530}
531
532struct eh_region *
533gen_eh_region_allowed (struct eh_region *outer, tree allowed)
534{
535 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
536 region->u.allowed.type_list = allowed;
537
538 for (; allowed ; allowed = TREE_CHAIN (allowed))
539 add_type_for_runtime (TREE_VALUE (allowed));
540
541 return region;
542}
543
544struct eh_region *
545gen_eh_region_must_not_throw (struct eh_region *outer)
546{
547 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
548}
549
550int
551get_eh_region_number (struct eh_region *region)
552{
553 return region->region_number;
554}
555
556bool
557get_eh_region_may_contain_throw (struct eh_region *region)
558{
559 return region->may_contain_throw;
560}
561
562tree
563get_eh_region_tree_label (struct eh_region *region)
564{
565 return region->tree_label;
566}
567
568void
569set_eh_region_tree_label (struct eh_region *region, tree lab)
570{
571 region->tree_label = lab;
572}
573\f
4ee9c684 574void
575expand_resx_expr (tree exp)
576{
577 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
578 struct eh_region *reg = cfun->eh->region_array[region_nr];
579
580 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
581 emit_barrier ();
582}
583
95cedffb 584/* Note that the current EH region (if any) may contain a throw, or a
585 call to a function which itself may contain a throw. */
586
587void
4ee9c684 588note_eh_region_may_contain_throw (struct eh_region *region)
95cedffb 589{
95cedffb 590 while (region && !region->may_contain_throw)
591 {
592 region->may_contain_throw = 1;
593 region = region->outer;
594 }
595}
596
4ee9c684 597void
598note_current_region_may_contain_throw (void)
599{
600 note_eh_region_may_contain_throw (cfun->eh->cur_region);
601}
602
603
1639c6db 604/* Return an rtl expression for a pointer to the exception object
df4b504c 605 within a handler. */
97ecdf3e 606
607rtx
35cb5232 608get_exception_pointer (struct function *fun)
97ecdf3e 609{
572fdaa3 610 rtx exc_ptr = fun->eh->exc_ptr;
611 if (fun == cfun && ! exc_ptr)
df4b504c 612 {
f797387a 613 exc_ptr = gen_reg_rtx (ptr_mode);
572fdaa3 614 fun->eh->exc_ptr = exc_ptr;
df4b504c 615 }
616 return exc_ptr;
617}
97ecdf3e 618
1639c6db 619/* Return an rtl expression for the exception dispatch filter
620 within a handler. */
621
4ee9c684 622rtx
35cb5232 623get_exception_filter (struct function *fun)
1639c6db 624{
572fdaa3 625 rtx filter = fun->eh->filter;
626 if (fun == cfun && ! filter)
1639c6db 627 {
95215948 628 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
572fdaa3 629 fun->eh->filter = filter;
1639c6db 630 }
631 return filter;
632}
df4b504c 633\f
634/* This section is for the exception handling specific optimization pass. */
732992fa 635
6388f9f7 636/* Random access the exception region tree. */
732992fa 637
4ee9c684 638void
35cb5232 639collect_eh_region_array (void)
732992fa 640{
df4b504c 641 struct eh_region **array, *i;
732992fa 642
df4b504c 643 i = cfun->eh->region_tree;
644 if (! i)
645 return;
732992fa 646
1f3233d1 647 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
648 * sizeof (*array));
df4b504c 649 cfun->eh->region_array = array;
732992fa 650
df4b504c 651 while (1)
652 {
653 array[i->region_number] = i;
654
655 /* If there are sub-regions, process them. */
656 if (i->inner)
657 i = i->inner;
658 /* If there are peers, process them. */
659 else if (i->next_peer)
660 i = i->next_peer;
661 /* Otherwise, step back up the tree to the next peer. */
662 else
663 {
664 do {
665 i = i->outer;
666 if (i == NULL)
667 return;
668 } while (i->next_peer == NULL);
669 i = i->next_peer;
670 }
671 }
8591d03a 672}
673
618fa93e 674static void
675resolve_one_fixup_region (struct eh_region *fixup)
676{
677 struct eh_region *cleanup, *real;
678 int j, n;
679
680 n = cfun->eh->last_region_number;
681 cleanup = 0;
682
683 for (j = 1; j <= n; ++j)
684 {
685 cleanup = cfun->eh->region_array[j];
686 if (cleanup && cleanup->type == ERT_CLEANUP
687 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
688 break;
689 }
611234b4 690 gcc_assert (j <= n);
618fa93e 691
692 real = cleanup->outer;
693 if (real && real->type == ERT_FIXUP)
694 {
695 if (!real->u.fixup.resolved)
696 resolve_one_fixup_region (real);
697 real = real->u.fixup.real_region;
698 }
699
700 fixup->u.fixup.real_region = real;
701 fixup->u.fixup.resolved = true;
702}
703
df4b504c 704static void
35cb5232 705resolve_fixup_regions (void)
8591d03a 706{
618fa93e 707 int i, n = cfun->eh->last_region_number;
8591d03a 708
df4b504c 709 for (i = 1; i <= n; ++i)
710 {
711 struct eh_region *fixup = cfun->eh->region_array[i];
8591d03a 712
618fa93e 713 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
df4b504c 714 continue;
8591d03a 715
618fa93e 716 resolve_one_fixup_region (fixup);
df4b504c 717 }
8591d03a 718}
8591d03a 719
df4b504c 720/* Now that we've discovered what region actually encloses a fixup,
721 we can shuffle pointers and remove them from the tree. */
8591d03a 722
723static void
35cb5232 724remove_fixup_regions (void)
8591d03a 725{
df4b504c 726 int i;
41a1fbdb 727 rtx insn, note;
728 struct eh_region *fixup;
8591d03a 729
41a1fbdb 730 /* Walk the insn chain and adjust the REG_EH_REGION numbers
731 for instructions referencing fixup regions. This is only
732 strictly necessary for fixup regions with no parent, but
733 doesn't hurt to do it for all regions. */
734 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
735 if (INSN_P (insn)
736 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
737 && INTVAL (XEXP (note, 0)) > 0
738 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
739 && fixup->type == ERT_FIXUP)
740 {
741 if (fixup->u.fixup.real_region)
a745c6ae 742 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
41a1fbdb 743 else
744 remove_note (insn, note);
745 }
746
747 /* Remove the fixup regions from the tree. */
df4b504c 748 for (i = cfun->eh->last_region_number; i > 0; --i)
749 {
41a1fbdb 750 fixup = cfun->eh->region_array[i];
df4b504c 751 if (! fixup)
752 continue;
8591d03a 753
df4b504c 754 /* Allow GC to maybe free some memory. */
755 if (fixup->type == ERT_CLEANUP)
ff385626 756 fixup->u.cleanup.exp = NULL_TREE;
8591d03a 757
df4b504c 758 if (fixup->type != ERT_FIXUP)
759 continue;
8591d03a 760
df4b504c 761 if (fixup->inner)
762 {
763 struct eh_region *parent, *p, **pp;
8591d03a 764
df4b504c 765 parent = fixup->u.fixup.real_region;
8591d03a 766
df4b504c 767 /* Fix up the children's parent pointers; find the end of
768 the list. */
769 for (p = fixup->inner; ; p = p->next_peer)
770 {
771 p->outer = parent;
772 if (! p->next_peer)
773 break;
774 }
8591d03a 775
df4b504c 776 /* In the tree of cleanups, only outer-inner ordering matters.
777 So link the children back in anywhere at the correct level. */
778 if (parent)
779 pp = &parent->inner;
780 else
781 pp = &cfun->eh->region_tree;
782 p->next_peer = *pp;
783 *pp = fixup->inner;
784 fixup->inner = NULL;
785 }
8591d03a 786
df4b504c 787 remove_eh_handler (fixup);
788 }
8591d03a 789}
790
f491db07 791/* Remove all regions whose labels are not reachable from insns. */
792
793static void
35cb5232 794remove_unreachable_regions (rtx insns)
f491db07 795{
796 int i, *uid_region_num;
797 bool *reachable;
798 struct eh_region *r;
799 rtx insn;
800
801 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
802 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
803
804 for (i = cfun->eh->last_region_number; i > 0; --i)
805 {
806 r = cfun->eh->region_array[i];
807 if (!r || r->region_number != i)
808 continue;
809
810 if (r->resume)
ff385626 811 {
611234b4 812 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
f491db07 813 uid_region_num[INSN_UID (r->resume)] = i;
ff385626 814 }
f491db07 815 if (r->label)
ff385626 816 {
611234b4 817 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
f491db07 818 uid_region_num[INSN_UID (r->label)] = i;
ff385626 819 }
f491db07 820 }
821
822 for (insn = insns; insn; insn = NEXT_INSN (insn))
fbac255a 823 reachable[uid_region_num[INSN_UID (insn)]] = true;
f491db07 824
825 for (i = cfun->eh->last_region_number; i > 0; --i)
826 {
827 r = cfun->eh->region_array[i];
828 if (r && r->region_number == i && !reachable[i])
829 {
4ee9c684 830 bool kill_it = true;
831 switch (r->type)
832 {
833 case ERT_THROW:
834 /* Don't remove ERT_THROW regions if their outer region
835 is reachable. */
836 if (r->outer && reachable[r->outer->region_number])
837 kill_it = false;
838 break;
839
840 case ERT_MUST_NOT_THROW:
841 /* MUST_NOT_THROW regions are implementable solely in the
365db11e 842 runtime, but their existence continues to affect calls
4ee9c684 843 within that region. Never delete them here. */
844 kill_it = false;
845 break;
846
847 case ERT_TRY:
848 {
849 /* TRY regions are reachable if any of its CATCH regions
850 are reachable. */
851 struct eh_region *c;
852 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
853 if (reachable[c->region_number])
854 {
855 kill_it = false;
856 break;
857 }
858 break;
859 }
f491db07 860
4ee9c684 861 default:
862 break;
863 }
864
865 if (kill_it)
866 remove_eh_handler (r);
f491db07 867 }
868 }
869
870 free (reachable);
871 free (uid_region_num);
872}
873
df4b504c 874/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
875 can_throw instruction in the region. */
8591d03a 876
877static void
35cb5232 878convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
8591d03a 879{
df4b504c 880 int *sp = orig_sp;
881 rtx insn, next;
8591d03a 882
df4b504c 883 for (insn = *pinsns; insn ; insn = next)
884 {
885 next = NEXT_INSN (insn);
6d7dc5b9 886 if (NOTE_P (insn))
df4b504c 887 {
888 int kind = NOTE_LINE_NUMBER (insn);
889 if (kind == NOTE_INSN_EH_REGION_BEG
890 || kind == NOTE_INSN_EH_REGION_END)
891 {
892 if (kind == NOTE_INSN_EH_REGION_BEG)
893 {
894 struct eh_region *r;
8591d03a 895
df4b504c 896 *sp++ = cur;
897 cur = NOTE_EH_HANDLER (insn);
8591d03a 898
df4b504c 899 r = cfun->eh->region_array[cur];
900 if (r->type == ERT_FIXUP)
901 {
902 r = r->u.fixup.real_region;
903 cur = r ? r->region_number : 0;
904 }
905 else if (r->type == ERT_CATCH)
906 {
907 r = r->outer;
908 cur = r ? r->region_number : 0;
909 }
910 }
911 else
912 cur = *--sp;
913
df4b504c 914 if (insn == *pinsns)
915 *pinsns = next;
916 remove_insn (insn);
917 continue;
918 }
919 }
920 else if (INSN_P (insn))
921 {
ce9beb5c 922 if (cur > 0
923 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
924 /* Calls can always potentially throw exceptions, unless
925 they have a REG_EH_REGION note with a value of 0 or less.
926 Which should be the only possible kind so far. */
6d7dc5b9 927 && (CALL_P (insn)
ce9beb5c 928 /* If we wanted exceptions for non-call insns, then
929 any may_trap_p instruction could throw. */
930 || (flag_non_call_exceptions
931 && GET_CODE (PATTERN (insn)) != CLOBBER
932 && GET_CODE (PATTERN (insn)) != USE
933 && may_trap_p (PATTERN (insn)))))
df4b504c 934 {
ce9beb5c 935 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
df4b504c 936 REG_NOTES (insn));
937 }
df4b504c 938 }
939 }
8591d03a 940
611234b4 941 gcc_assert (sp == orig_sp);
df4b504c 942}
8591d03a 943
4ee9c684 944static void
945collect_rtl_labels_from_trees (void)
946{
947 int i, n = cfun->eh->last_region_number;
948 for (i = 1; i <= n; ++i)
949 {
950 struct eh_region *reg = cfun->eh->region_array[i];
951 if (reg && reg->tree_label)
952 reg->label = DECL_RTL_IF_SET (reg->tree_label);
953 }
954}
955
df4b504c 956void
35cb5232 957convert_from_eh_region_ranges (void)
df4b504c 958{
4ee9c684 959 rtx insns = get_insns ();
960
961 if (cfun->eh->region_array)
962 {
963 /* If the region array already exists, assume we're coming from
964 optimize_function_tree. In this case all we need to do is
965 collect the rtl labels that correspond to the tree labels
966 that we allocated earlier. */
967 collect_rtl_labels_from_trees ();
968 }
969 else
970 {
971 int *stack;
8591d03a 972
4ee9c684 973 collect_eh_region_array ();
974 resolve_fixup_regions ();
8591d03a 975
4ee9c684 976 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
977 convert_from_eh_region_ranges_1 (&insns, stack, 0);
978 free (stack);
979
980 remove_fixup_regions ();
981 }
8591d03a 982
f491db07 983 remove_unreachable_regions (insns);
8591d03a 984}
985
8f8dcce4 986static void
35cb5232 987add_ehl_entry (rtx label, struct eh_region *region)
8f8dcce4 988{
989 struct ehl_map_entry **slot, *entry;
990
991 LABEL_PRESERVE_P (label) = 1;
992
f0af5a88 993 entry = ggc_alloc (sizeof (*entry));
8f8dcce4 994 entry->label = label;
995 entry->region = region;
996
997 slot = (struct ehl_map_entry **)
1f3233d1 998 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
017cb544 999
1000 /* Before landing pad creation, each exception handler has its own
1001 label. After landing pad creation, the exception handlers may
1002 share landing pads. This is ok, since maybe_remove_eh_handler
1003 only requires the 1-1 mapping before landing pad creation. */
611234b4 1004 gcc_assert (!*slot || cfun->eh->built_landing_pads);
017cb544 1005
8f8dcce4 1006 *slot = entry;
1007}
1008
df4b504c 1009void
35cb5232 1010find_exception_handler_labels (void)
8591d03a 1011{
df4b504c 1012 int i;
8591d03a 1013
1f3233d1 1014 if (cfun->eh->exception_handler_label_map)
1015 htab_empty (cfun->eh->exception_handler_label_map);
8f8dcce4 1016 else
1017 {
1018 /* ??? The expansion factor here (3/2) must be greater than the htab
1019 occupancy factor (4/3) to avoid unnecessary resizing. */
1f3233d1 1020 cfun->eh->exception_handler_label_map
1021 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1022 ehl_hash, ehl_eq, NULL);
8f8dcce4 1023 }
8591d03a 1024
df4b504c 1025 if (cfun->eh->region_tree == NULL)
1026 return;
8591d03a 1027
df4b504c 1028 for (i = cfun->eh->last_region_number; i > 0; --i)
1029 {
1030 struct eh_region *region = cfun->eh->region_array[i];
1031 rtx lab;
8591d03a 1032
f491db07 1033 if (! region || region->region_number != i)
df4b504c 1034 continue;
1035 if (cfun->eh->built_landing_pads)
1036 lab = region->landing_pad;
1037 else
1038 lab = region->label;
8591d03a 1039
df4b504c 1040 if (lab)
8f8dcce4 1041 add_ehl_entry (lab, region);
8591d03a 1042 }
1043
df4b504c 1044 /* For sjlj exceptions, need the return label to remain live until
1045 after landing pad generation. */
1046 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
8f8dcce4 1047 add_ehl_entry (return_label, NULL);
8591d03a 1048}
1049
7cb6ef9c 1050bool
35cb5232 1051current_function_has_exception_handlers (void)
7cb6ef9c 1052{
1053 int i;
1054
1055 for (i = cfun->eh->last_region_number; i > 0; --i)
1056 {
1057 struct eh_region *region = cfun->eh->region_array[i];
1058
1059 if (! region || region->region_number != i)
1060 continue;
1061 if (region->type != ERT_THROW)
1062 return true;
1063 }
1064
1065 return false;
1066}
df4b504c 1067\f
1068static struct eh_region *
35cb5232 1069duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
97ecdf3e 1070{
f0af5a88 1071 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
97ecdf3e 1072
df4b504c 1073 n->region_number = o->region_number + cfun->eh->last_region_number;
1074 n->type = o->type;
97ecdf3e 1075
df4b504c 1076 switch (n->type)
1077 {
1078 case ERT_CLEANUP:
1079 case ERT_MUST_NOT_THROW:
1080 break;
8591d03a 1081
df4b504c 1082 case ERT_TRY:
1083 if (o->u.try.continue_label)
1084 n->u.try.continue_label
1085 = get_label_from_map (map,
1086 CODE_LABEL_NUMBER (o->u.try.continue_label));
1087 break;
8591d03a 1088
df4b504c 1089 case ERT_CATCH:
5c15c916 1090 n->u.catch.type_list = o->u.catch.type_list;
df4b504c 1091 break;
8591d03a 1092
df4b504c 1093 case ERT_ALLOWED_EXCEPTIONS:
1094 n->u.allowed.type_list = o->u.allowed.type_list;
1095 break;
1096
1097 case ERT_THROW:
1098 n->u.throw.type = o->u.throw.type;
1ed5443b 1099
df4b504c 1100 default:
611234b4 1101 gcc_unreachable ();
df4b504c 1102 }
1103
1104 if (o->label)
1105 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1639c6db 1106 if (o->resume)
777ddd59 1107 {
1639c6db 1108 n->resume = map->insn_map[INSN_UID (o->resume)];
611234b4 1109 gcc_assert (n->resume);
8591d03a 1110 }
97ecdf3e 1111
df4b504c 1112 return n;
97ecdf3e 1113}
1114
df4b504c 1115static void
35cb5232 1116duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
3ec33c57 1117{
df4b504c 1118 struct eh_region *n = n_array[o->region_number];
3ec33c57 1119
df4b504c 1120 switch (n->type)
1121 {
1122 case ERT_TRY:
1123 n->u.try.catch = n_array[o->u.try.catch->region_number];
1124 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1125 break;
97bb6c17 1126
df4b504c 1127 case ERT_CATCH:
1128 if (o->u.catch.next_catch)
ff385626 1129 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
df4b504c 1130 if (o->u.catch.prev_catch)
ff385626 1131 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
df4b504c 1132 break;
97bb6c17 1133
df4b504c 1134 default:
1135 break;
1136 }
97ecdf3e 1137
df4b504c 1138 if (o->outer)
1139 n->outer = n_array[o->outer->region_number];
1140 if (o->inner)
1141 n->inner = n_array[o->inner->region_number];
1142 if (o->next_peer)
1143 n->next_peer = n_array[o->next_peer->region_number];
1ed5443b 1144}
df4b504c 1145
1146int
35cb5232 1147duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
97ecdf3e 1148{
df4b504c 1149 int ifun_last_region_number = ifun->eh->last_region_number;
1150 struct eh_region **n_array, *root, *cur;
1151 int i;
97ecdf3e 1152
df4b504c 1153 if (ifun_last_region_number == 0)
1154 return 0;
97ecdf3e 1155
df4b504c 1156 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
97ecdf3e 1157
df4b504c 1158 for (i = 1; i <= ifun_last_region_number; ++i)
8591d03a 1159 {
df4b504c 1160 cur = ifun->eh->region_array[i];
1161 if (!cur || cur->region_number != i)
1162 continue;
1163 n_array[i] = duplicate_eh_region_1 (cur, map);
8591d03a 1164 }
df4b504c 1165 for (i = 1; i <= ifun_last_region_number; ++i)
8591d03a 1166 {
df4b504c 1167 cur = ifun->eh->region_array[i];
1168 if (!cur || cur->region_number != i)
1169 continue;
1170 duplicate_eh_region_2 (cur, n_array);
1171 }
8591d03a 1172
df4b504c 1173 root = n_array[ifun->eh->region_tree->region_number];
1174 cur = cfun->eh->cur_region;
1175 if (cur)
1176 {
1177 struct eh_region *p = cur->inner;
1178 if (p)
1179 {
1180 while (p->next_peer)
1181 p = p->next_peer;
1182 p->next_peer = root;
1183 }
1184 else
1185 cur->inner = root;
8591d03a 1186
df4b504c 1187 for (i = 1; i <= ifun_last_region_number; ++i)
d9fbafcd 1188 if (n_array[i] && n_array[i]->outer == NULL)
df4b504c 1189 n_array[i]->outer = cur;
1190 }
1191 else
1192 {
1193 struct eh_region *p = cfun->eh->region_tree;
1194 if (p)
1195 {
1196 while (p->next_peer)
1197 p = p->next_peer;
1198 p->next_peer = root;
1199 }
1200 else
1201 cfun->eh->region_tree = root;
8591d03a 1202 }
6c74b671 1203
df4b504c 1204 free (n_array);
6c74b671 1205
df4b504c 1206 i = cfun->eh->last_region_number;
1207 cfun->eh->last_region_number = i + ifun_last_region_number;
1208 return i;
97ecdf3e 1209}
1210
df4b504c 1211\f
df4b504c 1212static int
35cb5232 1213t2r_eq (const void *pentry, const void *pdata)
506b6864 1214{
df4b504c 1215 tree entry = (tree) pentry;
1216 tree data = (tree) pdata;
506b6864 1217
df4b504c 1218 return TREE_PURPOSE (entry) == data;
506b6864 1219}
1220
df4b504c 1221static hashval_t
35cb5232 1222t2r_hash (const void *pentry)
df4b504c 1223{
1224 tree entry = (tree) pentry;
908e5f41 1225 return TREE_HASH (TREE_PURPOSE (entry));
df4b504c 1226}
506b6864 1227
df4b504c 1228static void
35cb5232 1229add_type_for_runtime (tree type)
df4b504c 1230{
1231 tree *slot;
506b6864 1232
df4b504c 1233 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
908e5f41 1234 TREE_HASH (type), INSERT);
df4b504c 1235 if (*slot == NULL)
1236 {
1237 tree runtime = (*lang_eh_runtime_type) (type);
1238 *slot = tree_cons (type, runtime, NULL_TREE);
1239 }
1240}
1ed5443b 1241
df4b504c 1242static tree
35cb5232 1243lookup_type_for_runtime (tree type)
df4b504c 1244{
1245 tree *slot;
8cd32046 1246
df4b504c 1247 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
908e5f41 1248 TREE_HASH (type), NO_INSERT);
8cd32046 1249
3fb1e43b 1250 /* We should have always inserted the data earlier. */
df4b504c 1251 return TREE_VALUE (*slot);
1252}
506b6864 1253
df4b504c 1254\f
1255/* Represent an entry in @TTypes for either catch actions
1256 or exception filter actions. */
1f3233d1 1257struct ttypes_filter GTY(())
df4b504c 1258{
1259 tree t;
1260 int filter;
1261};
8cd32046 1262
df4b504c 1263/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1264 (a tree) for a @TTypes type node we are thinking about adding. */
8cd32046 1265
df4b504c 1266static int
35cb5232 1267ttypes_filter_eq (const void *pentry, const void *pdata)
df4b504c 1268{
1269 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1270 tree data = (tree) pdata;
8cd32046 1271
df4b504c 1272 return entry->t == data;
506b6864 1273}
1274
df4b504c 1275static hashval_t
35cb5232 1276ttypes_filter_hash (const void *pentry)
df4b504c 1277{
1278 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
908e5f41 1279 return TREE_HASH (entry->t);
df4b504c 1280}
97ecdf3e 1281
df4b504c 1282/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1283 exception specification list we are thinking about adding. */
1284/* ??? Currently we use the type lists in the order given. Someone
1285 should put these in some canonical order. */
1286
1287static int
35cb5232 1288ehspec_filter_eq (const void *pentry, const void *pdata)
97ecdf3e 1289{
df4b504c 1290 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1291 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1292
1293 return type_list_equal (entry->t, data->t);
97ecdf3e 1294}
1295
df4b504c 1296/* Hash function for exception specification lists. */
97ecdf3e 1297
df4b504c 1298static hashval_t
35cb5232 1299ehspec_filter_hash (const void *pentry)
97ecdf3e 1300{
df4b504c 1301 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1302 hashval_t h = 0;
1303 tree list;
1304
1305 for (list = entry->t; list ; list = TREE_CHAIN (list))
908e5f41 1306 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
df4b504c 1307 return h;
97ecdf3e 1308}
1309
908e5f41 1310/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1311 to speed up the search. Return the filter value to be used. */
97ecdf3e 1312
df4b504c 1313static int
35cb5232 1314add_ttypes_entry (htab_t ttypes_hash, tree type)
97ecdf3e 1315{
df4b504c 1316 struct ttypes_filter **slot, *n;
97ecdf3e 1317
df4b504c 1318 slot = (struct ttypes_filter **)
908e5f41 1319 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
df4b504c 1320
1321 if ((n = *slot) == NULL)
97ecdf3e 1322 {
df4b504c 1323 /* Filter value is a 1 based table index. */
97bb6c17 1324
f0af5a88 1325 n = xmalloc (sizeof (*n));
df4b504c 1326 n->t = type;
1327 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1328 *slot = n;
1329
1330 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
97ecdf3e 1331 }
df4b504c 1332
1333 return n->filter;
97ecdf3e 1334}
1335
df4b504c 1336/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1337 to speed up the search. Return the filter value to be used. */
1338
1339static int
35cb5232 1340add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
97bb6c17 1341{
df4b504c 1342 struct ttypes_filter **slot, *n;
1343 struct ttypes_filter dummy;
97bb6c17 1344
df4b504c 1345 dummy.t = list;
1346 slot = (struct ttypes_filter **)
1347 htab_find_slot (ehspec_hash, &dummy, INSERT);
1348
1349 if ((n = *slot) == NULL)
1350 {
1351 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1352
f0af5a88 1353 n = xmalloc (sizeof (*n));
df4b504c 1354 n->t = list;
1355 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1356 *slot = n;
1357
1358 /* Look up each type in the list and encode its filter
1359 value as a uleb128. Terminate the list with 0. */
1360 for (; list ; list = TREE_CHAIN (list))
1ed5443b 1361 push_uleb128 (&cfun->eh->ehspec_data,
df4b504c 1362 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1363 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1364 }
1365
1366 return n->filter;
97bb6c17 1367}
1368
df4b504c 1369/* Generate the action filter values to be used for CATCH and
1370 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1371 we use lots of landing pads, and so every type or list can share
1372 the same filter value, which saves table space. */
1373
1374static void
35cb5232 1375assign_filter_values (void)
011a7f23 1376{
df4b504c 1377 int i;
1378 htab_t ttypes, ehspec;
8753e6e9 1379
df4b504c 1380 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1381 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
8753e6e9 1382
df4b504c 1383 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1384 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
011a7f23 1385
df4b504c 1386 for (i = cfun->eh->last_region_number; i > 0; --i)
1387 {
1388 struct eh_region *r = cfun->eh->region_array[i];
011a7f23 1389
df4b504c 1390 /* Mind we don't process a region more than once. */
1391 if (!r || r->region_number != i)
1392 continue;
011a7f23 1393
df4b504c 1394 switch (r->type)
1395 {
1396 case ERT_CATCH:
5c15c916 1397 /* Whatever type_list is (NULL or true list), we build a list
1398 of filters for the region. */
1399 r->u.catch.filter_list = NULL_TREE;
1400
1401 if (r->u.catch.type_list != NULL)
1402 {
1403 /* Get a filter value for each of the types caught and store
1404 them in the region's dedicated list. */
1405 tree tp_node = r->u.catch.type_list;
1406
1407 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1408 {
1409 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
7016c612 1410 tree flt_node = build_int_cst (NULL_TREE, flt);
1ed5443b 1411
1412 r->u.catch.filter_list
5c15c916 1413 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1414 }
1415 }
1416 else
1417 {
1418 /* Get a filter value for the NULL list also since it will need
1419 an action record anyway. */
1420 int flt = add_ttypes_entry (ttypes, NULL);
7016c612 1421 tree flt_node = build_int_cst (NULL_TREE, flt);
1ed5443b 1422
1423 r->u.catch.filter_list
5c15c916 1424 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1425 }
1ed5443b 1426
df4b504c 1427 break;
15f6e7d9 1428
df4b504c 1429 case ERT_ALLOWED_EXCEPTIONS:
1430 r->u.allowed.filter
1431 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1432 break;
15f6e7d9 1433
df4b504c 1434 default:
1435 break;
1436 }
1437 }
1438
1439 htab_delete (ttypes);
1440 htab_delete (ehspec);
1441}
1442
54f7a985 1443/* Emit SEQ into basic block just before INSN (that is assumed to be
1444 first instruction of some existing BB and return the newly
1445 produced block. */
1446static basic_block
1447emit_to_new_bb_before (rtx seq, rtx insn)
1448{
1449 rtx last;
1450 basic_block bb;
5d2eea9a 1451 edge e;
1452
1453 /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
1454 call), we don't want it to go into newly created landing pad or other EH
1455 construct. */
1456 for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
1457 if (e->flags & EDGE_FALLTHRU)
1458 force_nonfallthru (e);
54f7a985 1459 last = emit_insn_before (seq, insn);
6d7dc5b9 1460 if (BARRIER_P (last))
54f7a985 1461 last = PREV_INSN (last);
1462 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1463 update_bb_for_insn (bb);
1464 bb->flags |= BB_SUPERBLOCK;
1465 return bb;
1466}
1467
31804070 1468/* Generate the code to actually handle exceptions, which will follow the
1469 landing pads. */
1470
df4b504c 1471static void
35cb5232 1472build_post_landing_pads (void)
df4b504c 1473{
1474 int i;
15f6e7d9 1475
df4b504c 1476 for (i = cfun->eh->last_region_number; i > 0; --i)
15f6e7d9 1477 {
df4b504c 1478 struct eh_region *region = cfun->eh->region_array[i];
1479 rtx seq;
15f6e7d9 1480
df4b504c 1481 /* Mind we don't process a region more than once. */
1482 if (!region || region->region_number != i)
1483 continue;
1484
1485 switch (region->type)
dda90815 1486 {
df4b504c 1487 case ERT_TRY:
1488 /* ??? Collect the set of all non-overlapping catch handlers
1489 all the way up the chain until blocked by a cleanup. */
1490 /* ??? Outer try regions can share landing pads with inner
1491 try regions if the types are completely non-overlapping,
3fb1e43b 1492 and there are no intervening cleanups. */
15f6e7d9 1493
df4b504c 1494 region->post_landing_pad = gen_label_rtx ();
15f6e7d9 1495
df4b504c 1496 start_sequence ();
15f6e7d9 1497
df4b504c 1498 emit_label (region->post_landing_pad);
15f6e7d9 1499
df4b504c 1500 /* ??? It is mighty inconvenient to call back into the
1501 switch statement generation code in expand_end_case.
1502 Rapid prototyping sez a sequence of ifs. */
1503 {
1504 struct eh_region *c;
1505 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1506 {
5c15c916 1507 if (c->u.catch.type_list == NULL)
ce9beb5c 1508 emit_jump (c->label);
df4b504c 1509 else
5c15c916 1510 {
1511 /* Need for one cmp/jump per type caught. Each type
1512 list entry has a matching entry in the filter list
1513 (see assign_filter_values). */
1514 tree tp_node = c->u.catch.type_list;
1515 tree flt_node = c->u.catch.filter_list;
1516
1517 for (; tp_node; )
1518 {
1519 emit_cmp_and_jump_insns
1520 (cfun->eh->filter,
1521 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
95215948 1522 EQ, NULL_RTX,
1523 targetm.eh_return_filter_mode (), 0, c->label);
5c15c916 1524
1525 tp_node = TREE_CHAIN (tp_node);
1526 flt_node = TREE_CHAIN (flt_node);
1527 }
1528 }
df4b504c 1529 }
1530 }
15f6e7d9 1531
1639c6db 1532 /* We delay the generation of the _Unwind_Resume until we generate
1533 landing pads. We emit a marker here so as to get good control
1534 flow data in the meantime. */
1535 region->resume
1536 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1537 emit_barrier ();
1538
df4b504c 1539 seq = get_insns ();
1540 end_sequence ();
d63ea2f2 1541
54f7a985 1542 emit_to_new_bb_before (seq, region->u.try.catch->label);
1543
df4b504c 1544 break;
15f6e7d9 1545
df4b504c 1546 case ERT_ALLOWED_EXCEPTIONS:
1547 region->post_landing_pad = gen_label_rtx ();
011a7f23 1548
df4b504c 1549 start_sequence ();
d3a0267f 1550
df4b504c 1551 emit_label (region->post_landing_pad);
d3a0267f 1552
df4b504c 1553 emit_cmp_and_jump_insns (cfun->eh->filter,
1554 GEN_INT (region->u.allowed.filter),
95215948 1555 EQ, NULL_RTX,
1556 targetm.eh_return_filter_mode (), 0, region->label);
d3a0267f 1557
1639c6db 1558 /* We delay the generation of the _Unwind_Resume until we generate
1559 landing pads. We emit a marker here so as to get good control
1560 flow data in the meantime. */
1561 region->resume
1562 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1563 emit_barrier ();
1564
df4b504c 1565 seq = get_insns ();
1566 end_sequence ();
1567
54f7a985 1568 emit_to_new_bb_before (seq, region->label);
df4b504c 1569 break;
d3a0267f 1570
df4b504c 1571 case ERT_CLEANUP:
b2b188b5 1572 case ERT_MUST_NOT_THROW:
ce9beb5c 1573 region->post_landing_pad = region->label;
b2b188b5 1574 break;
1575
df4b504c 1576 case ERT_CATCH:
1577 case ERT_THROW:
1578 /* Nothing to do. */
1579 break;
1580
1581 default:
611234b4 1582 gcc_unreachable ();
df4b504c 1583 }
1584 }
1585}
6c74b671 1586
1639c6db 1587/* Replace RESX patterns with jumps to the next handler if any, or calls to
1588 _Unwind_Resume otherwise. */
1589
6c74b671 1590static void
35cb5232 1591connect_post_landing_pads (void)
6c74b671 1592{
df4b504c 1593 int i;
1228f0d8 1594
df4b504c 1595 for (i = cfun->eh->last_region_number; i > 0; --i)
1596 {
1597 struct eh_region *region = cfun->eh->region_array[i];
1598 struct eh_region *outer;
1639c6db 1599 rtx seq;
54f7a985 1600 rtx barrier;
6c74b671 1601
df4b504c 1602 /* Mind we don't process a region more than once. */
1603 if (!region || region->region_number != i)
1604 continue;
6c74b671 1605
1639c6db 1606 /* If there is no RESX, or it has been deleted by flow, there's
1607 nothing to fix up. */
1608 if (! region->resume || INSN_DELETED_P (region->resume))
df4b504c 1609 continue;
1228f0d8 1610
df4b504c 1611 /* Search for another landing pad in this function. */
1612 for (outer = region->outer; outer ; outer = outer->outer)
1613 if (outer->post_landing_pad)
1614 break;
6c74b671 1615
df4b504c 1616 start_sequence ();
97bb6c17 1617
df4b504c 1618 if (outer)
54f7a985 1619 {
1620 edge e;
1621 basic_block src, dest;
1622
1623 emit_jump (outer->post_landing_pad);
1624 src = BLOCK_FOR_INSN (region->resume);
1625 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1626 while (src->succ)
1627 remove_edge (src->succ);
1628 e = make_edge (src, dest, 0);
1629 e->probability = REG_BR_PROB_BASE;
1630 e->count = src->count;
1631 }
df4b504c 1632 else
9056f714 1633 {
1634 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1635 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1636
1637 /* What we just emitted was a throwing libcall, so it got a
1638 barrier automatically added after it. If the last insn in
1639 the libcall sequence isn't the barrier, it's because the
1640 target emits multiple insns for a call, and there are insns
1641 after the actual call insn (which are redundant and would be
1642 optimized away). The barrier is inserted exactly after the
1643 call insn, so let's go get that and delete the insns after
1644 it, because below we need the barrier to be the last insn in
1645 the sequence. */
1646 delete_insns_since (NEXT_INSN (last_call_insn ()));
1647 }
97ecdf3e 1648
df4b504c 1649 seq = get_insns ();
1650 end_sequence ();
54f7a985 1651 barrier = emit_insn_before (seq, region->resume);
1652 /* Avoid duplicate barrier. */
611234b4 1653 gcc_assert (BARRIER_P (barrier));
54f7a985 1654 delete_insn (barrier);
e4bf866d 1655 delete_insn (region->resume);
4ee9c684 1656
1657 /* ??? From tree-ssa we can wind up with catch regions whose
1658 label is not instantiated, but whose resx is present. Now
1659 that we've dealt with the resx, kill the region. */
1660 if (region->label == NULL && region->type == ERT_CLEANUP)
1661 remove_eh_handler (region);
df4b504c 1662 }
1663}
1664
1665\f
1666static void
35cb5232 1667dw2_build_landing_pads (void)
97ecdf3e 1668{
97b330ca 1669 int i;
1670 unsigned int j;
97ecdf3e 1671
df4b504c 1672 for (i = cfun->eh->last_region_number; i > 0; --i)
1673 {
1674 struct eh_region *region = cfun->eh->region_array[i];
1675 rtx seq;
54f7a985 1676 basic_block bb;
77918ba4 1677 bool clobbers_hard_regs = false;
54f7a985 1678 edge e;
97ecdf3e 1679
df4b504c 1680 /* Mind we don't process a region more than once. */
1681 if (!region || region->region_number != i)
1682 continue;
2f2bc719 1683
df4b504c 1684 if (region->type != ERT_CLEANUP
1685 && region->type != ERT_TRY
1686 && region->type != ERT_ALLOWED_EXCEPTIONS)
1687 continue;
97bb6c17 1688
df4b504c 1689 start_sequence ();
97ecdf3e 1690
df4b504c 1691 region->landing_pad = gen_label_rtx ();
1692 emit_label (region->landing_pad);
97ecdf3e 1693
df4b504c 1694#ifdef HAVE_exception_receiver
1695 if (HAVE_exception_receiver)
1696 emit_insn (gen_exception_receiver ());
1697 else
1698#endif
1699#ifdef HAVE_nonlocal_goto_receiver
1700 if (HAVE_nonlocal_goto_receiver)
1701 emit_insn (gen_nonlocal_goto_receiver ());
1702 else
1703#endif
1704 { /* Nothing */ }
97ecdf3e 1705
df4b504c 1706 /* If the eh_return data registers are call-saved, then we
1707 won't have considered them clobbered from the call that
1708 threw. Kill them now. */
1709 for (j = 0; ; ++j)
1710 {
1711 unsigned r = EH_RETURN_DATA_REGNO (j);
1712 if (r == INVALID_REGNUM)
1713 break;
1714 if (! call_used_regs[r])
77918ba4 1715 {
1716 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1717 clobbers_hard_regs = true;
1718 }
1719 }
1720
1721 if (clobbers_hard_regs)
1722 {
1723 /* @@@ This is a kludge. Not all machine descriptions define a
1724 blockage insn, but we must not allow the code we just generated
1725 to be reordered by scheduling. So emit an ASM_INPUT to act as
88b5b080 1726 blockage insn. */
77918ba4 1727 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
df4b504c 1728 }
23ceb7b2 1729
df4b504c 1730 emit_move_insn (cfun->eh->exc_ptr,
f797387a 1731 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
df4b504c 1732 emit_move_insn (cfun->eh->filter,
95215948 1733 gen_rtx_REG (targetm.eh_return_filter_mode (),
1734 EH_RETURN_DATA_REGNO (1)));
011a7f23 1735
df4b504c 1736 seq = get_insns ();
1737 end_sequence ();
64db8f62 1738
54f7a985 1739 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1740 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1741 e->count = bb->count;
1742 e->probability = REG_BR_PROB_BASE;
df4b504c 1743 }
97ecdf3e 1744}
1745
df4b504c 1746\f
1747struct sjlj_lp_info
1748{
1749 int directly_reachable;
1750 int action_index;
1751 int dispatch_index;
1752 int call_site_index;
1753};
97ecdf3e 1754
df4b504c 1755static bool
35cb5232 1756sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
97ecdf3e 1757{
df4b504c 1758 rtx insn;
1759 bool found_one = false;
97ecdf3e 1760
df4b504c 1761 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1762 {
1763 struct eh_region *region;
d33336ad 1764 enum reachable_code rc;
df4b504c 1765 tree type_thrown;
1766 rtx note;
97ecdf3e 1767
df4b504c 1768 if (! INSN_P (insn))
1769 continue;
c50af1d9 1770
df4b504c 1771 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1772 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1773 continue;
c401b398 1774
df4b504c 1775 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
c401b398 1776
df4b504c 1777 type_thrown = NULL_TREE;
1778 if (region->type == ERT_THROW)
1779 {
1780 type_thrown = region->u.throw.type;
1781 region = region->outer;
1782 }
97bb6c17 1783
df4b504c 1784 /* Find the first containing region that might handle the exception.
1785 That's the landing pad to which we will transfer control. */
d33336ad 1786 rc = RNL_NOT_CAUGHT;
df4b504c 1787 for (; region; region = region->outer)
d33336ad 1788 {
4ee9c684 1789 rc = reachable_next_level (region, type_thrown, NULL);
d33336ad 1790 if (rc != RNL_NOT_CAUGHT)
1791 break;
1792 }
1793 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
df4b504c 1794 {
1795 lp_info[region->region_number].directly_reachable = 1;
1796 found_one = true;
1797 }
1798 }
97ecdf3e 1799
df4b504c 1800 return found_one;
1801}
23ceb7b2 1802
1803static void
35cb5232 1804sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
23ceb7b2 1805{
df4b504c 1806 htab_t ar_hash;
1807 int i, index;
1808
1809 /* First task: build the action table. */
1810
1811 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1812 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1813
1814 for (i = cfun->eh->last_region_number; i > 0; --i)
1815 if (lp_info[i].directly_reachable)
d63ea2f2 1816 {
df4b504c 1817 struct eh_region *r = cfun->eh->region_array[i];
1818 r->landing_pad = dispatch_label;
1819 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1820 if (lp_info[i].action_index != -1)
1821 cfun->uses_eh_lsda = 1;
d63ea2f2 1822 }
23ceb7b2 1823
df4b504c 1824 htab_delete (ar_hash);
1228f0d8 1825
df4b504c 1826 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1827 landing pad label for the region. For sjlj though, there is one
1828 common landing pad from which we dispatch to the post-landing pads.
1228f0d8 1829
df4b504c 1830 A region receives a dispatch index if it is directly reachable
1831 and requires in-function processing. Regions that share post-landing
4a82352a 1832 pads may share dispatch indices. */
df4b504c 1833 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1834 (see build_post_landing_pads) so we don't bother checking for it. */
97ecdf3e 1835
df4b504c 1836 index = 0;
1837 for (i = cfun->eh->last_region_number; i > 0; --i)
d33336ad 1838 if (lp_info[i].directly_reachable)
df4b504c 1839 lp_info[i].dispatch_index = index++;
1228f0d8 1840
df4b504c 1841 /* Finally: assign call-site values. If dwarf2 terms, this would be
1842 the region number assigned by convert_to_eh_region_ranges, but
1843 handles no-action and must-not-throw differently. */
1228f0d8 1844
df4b504c 1845 call_site_base = 1;
1846 for (i = cfun->eh->last_region_number; i > 0; --i)
1847 if (lp_info[i].directly_reachable)
1848 {
1849 int action = lp_info[i].action_index;
1850
1851 /* Map must-not-throw to otherwise unused call-site index 0. */
1852 if (action == -2)
1853 index = 0;
1854 /* Map no-action to otherwise unused call-site index -1. */
1855 else if (action == -1)
1856 index = -1;
1857 /* Otherwise, look it up in the table. */
1858 else
1859 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1860
1861 lp_info[i].call_site_index = index;
1862 }
97ecdf3e 1863}
8591d03a 1864
df4b504c 1865static void
35cb5232 1866sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
8591d03a 1867{
df4b504c 1868 int last_call_site = -2;
1869 rtx insn, mem;
1870
df4b504c 1871 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8591d03a 1872 {
df4b504c 1873 struct eh_region *region;
1874 int this_call_site;
1875 rtx note, before, p;
8591d03a 1876
df4b504c 1877 /* Reset value tracking at extended basic block boundaries. */
6d7dc5b9 1878 if (LABEL_P (insn))
df4b504c 1879 last_call_site = -2;
8591d03a 1880
df4b504c 1881 if (! INSN_P (insn))
1882 continue;
8591d03a 1883
df4b504c 1884 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1885 if (!note)
1886 {
1887 /* Calls (and trapping insns) without notes are outside any
1888 exception handling region in this function. Mark them as
1889 no action. */
6d7dc5b9 1890 if (CALL_P (insn)
df4b504c 1891 || (flag_non_call_exceptions
1892 && may_trap_p (PATTERN (insn))))
1893 this_call_site = -1;
1894 else
1895 continue;
1896 }
1897 else
1898 {
1899 /* Calls that are known to not throw need not be marked. */
1900 if (INTVAL (XEXP (note, 0)) <= 0)
1901 continue;
8591d03a 1902
df4b504c 1903 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1904 this_call_site = lp_info[region->region_number].call_site_index;
1905 }
8591d03a 1906
df4b504c 1907 if (this_call_site == last_call_site)
1908 continue;
1909
1910 /* Don't separate a call from it's argument loads. */
1911 before = insn;
6d7dc5b9 1912 if (CALL_P (insn))
ff385626 1913 before = find_first_parameter_load (insn, NULL_RTX);
97ecdf3e 1914
df4b504c 1915 start_sequence ();
82832d9a 1916 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1917 sjlj_fc_call_site_ofs);
df4b504c 1918 emit_move_insn (mem, GEN_INT (this_call_site));
1919 p = get_insns ();
1920 end_sequence ();
97bb6c17 1921
31d3e01c 1922 emit_insn_before (p, before);
df4b504c 1923 last_call_site = this_call_site;
1924 }
1925}
97ecdf3e 1926
df4b504c 1927/* Construct the SjLj_Function_Context. */
1928
1929static void
35cb5232 1930sjlj_emit_function_enter (rtx dispatch_label)
97ecdf3e 1931{
df4b504c 1932 rtx fn_begin, fc, mem, seq;
97ecdf3e 1933
df4b504c 1934 fc = cfun->eh->sjlj_fc;
97ecdf3e 1935
df4b504c 1936 start_sequence ();
de46d199 1937
b52a90a7 1938 /* We're storing this libcall's address into memory instead of
1939 calling it directly. Thus, we must call assemble_external_libcall
1940 here, as we can not depend on emit_library_call to do it for us. */
1941 assemble_external_libcall (eh_personality_libfunc);
e513d163 1942 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
df4b504c 1943 emit_move_insn (mem, eh_personality_libfunc);
1944
e513d163 1945 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
df4b504c 1946 if (cfun->uses_eh_lsda)
1947 {
1948 char buf[20];
134c13b5 1949 rtx sym;
1950
4781f9b9 1951 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
134c13b5 1952 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1953 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1954 emit_move_insn (mem, sym);
de46d199 1955 }
df4b504c 1956 else
1957 emit_move_insn (mem, const0_rtx);
1ed5443b 1958
df4b504c 1959#ifdef DONT_USE_BUILTIN_SETJMP
1960 {
1961 rtx x, note;
0ff18307 1962 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
df4b504c 1963 TYPE_MODE (integer_type_node), 1,
1964 plus_constant (XEXP (fc, 0),
1965 sjlj_fc_jbuf_ofs), Pmode);
1966
31b97e8f 1967 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
df4b504c 1968 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1969
1970 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2b96c5f6 1971 TYPE_MODE (integer_type_node), 0, dispatch_label);
df4b504c 1972 }
1973#else
1974 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1975 dispatch_label);
97ecdf3e 1976#endif
97ecdf3e 1977
df4b504c 1978 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1979 1, XEXP (fc, 0), Pmode);
97bb6c17 1980
df4b504c 1981 seq = get_insns ();
1982 end_sequence ();
97ecdf3e 1983
df4b504c 1984 /* ??? Instead of doing this at the beginning of the function,
1985 do this in a block that is at loop level 0 and dominates all
1986 can_throw_internal instructions. */
97ecdf3e 1987
df4b504c 1988 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
6d7dc5b9 1989 if (NOTE_P (fn_begin)
54f7a985 1990 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1991 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
df4b504c 1992 break;
54f7a985 1993 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1994 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
1995 else
1996 {
1997 rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
1998 for (; ; fn_begin = NEXT_INSN (fn_begin))
6d7dc5b9 1999 if ((NOTE_P (fn_begin)
54f7a985 2000 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2001 || fn_begin == last)
2002 break;
2003 emit_insn_after (seq, fn_begin);
2004 }
97ecdf3e 2005}
2006
df4b504c 2007/* Call back from expand_function_end to know where we should put
2008 the call to unwind_sjlj_unregister_libfunc if needed. */
97bb6c17 2009
df4b504c 2010void
35cb5232 2011sjlj_emit_function_exit_after (rtx after)
df4b504c 2012{
2013 cfun->eh->sjlj_exit_after = after;
2014}
97ecdf3e 2015
2016static void
35cb5232 2017sjlj_emit_function_exit (void)
df4b504c 2018{
2019 rtx seq;
54f7a985 2020 edge e;
97ecdf3e 2021
df4b504c 2022 start_sequence ();
fbba5463 2023
df4b504c 2024 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2025 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
d63ea2f2 2026
df4b504c 2027 seq = get_insns ();
2028 end_sequence ();
97ecdf3e 2029
df4b504c 2030 /* ??? Really this can be done in any block at loop level 0 that
2031 post-dominates all can_throw_internal instructions. This is
2032 the last possible moment. */
011a7f23 2033
54f7a985 2034 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
2035 if (e->flags & EDGE_FALLTHRU)
2036 break;
2037 if (e)
2038 {
2039 rtx insn;
2040
2041 /* Figure out whether the place we are supposed to insert libcall
2042 is inside the last basic block or after it. In the other case
2043 we need to emit to edge. */
611234b4 2044 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
4cde3e4d 2045 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
54f7a985 2046 {
4cde3e4d 2047 if (insn == cfun->eh->sjlj_exit_after)
2048 {
2049 if (LABEL_P (insn))
2050 insn = NEXT_INSN (insn);
2051 emit_insn_after (seq, insn);
2052 return;
2053 }
2054 if (insn == BB_END (e->src))
2055 break;
54f7a985 2056 }
4cde3e4d 2057 insert_insn_on_edge (seq, e);
54f7a985 2058 }
011a7f23 2059}
2060
df4b504c 2061static void
35cb5232 2062sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
fbba5463 2063{
df4b504c 2064 int i, first_reachable;
2065 rtx mem, dispatch, seq, fc;
54f7a985 2066 rtx before;
2067 basic_block bb;
2068 edge e;
df4b504c 2069
2070 fc = cfun->eh->sjlj_fc;
2071
2072 start_sequence ();
2073
2074 emit_label (dispatch_label);
1ed5443b 2075
df4b504c 2076#ifndef DONT_USE_BUILTIN_SETJMP
2077 expand_builtin_setjmp_receiver (dispatch_label);
2078#endif
2079
2080 /* Load up dispatch index, exc_ptr and filter values from the
2081 function context. */
e513d163 2082 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2083 sjlj_fc_call_site_ofs);
df4b504c 2084 dispatch = copy_to_reg (mem);
2085
e513d163 2086 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
a64908cd 2087 if (word_mode != ptr_mode)
df4b504c 2088 {
2089#ifdef POINTERS_EXTEND_UNSIGNED
a64908cd 2090 mem = convert_memory_address (ptr_mode, mem);
df4b504c 2091#else
a64908cd 2092 mem = convert_to_mode (ptr_mode, mem, 0);
df4b504c 2093#endif
2094 }
2095 emit_move_insn (cfun->eh->exc_ptr, mem);
2096
e513d163 2097 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
df4b504c 2098 emit_move_insn (cfun->eh->filter, mem);
97ecdf3e 2099
df4b504c 2100 /* Jump to one of the directly reachable regions. */
2101 /* ??? This really ought to be using a switch statement. */
2102
2103 first_reachable = 0;
2104 for (i = cfun->eh->last_region_number; i > 0; --i)
173f0bec 2105 {
d33336ad 2106 if (! lp_info[i].directly_reachable)
df4b504c 2107 continue;
173f0bec 2108
df4b504c 2109 if (! first_reachable)
2110 {
2111 first_reachable = i;
2112 continue;
2113 }
d63ea2f2 2114
2b96c5f6 2115 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2116 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
df4b504c 2117 cfun->eh->region_array[i]->post_landing_pad);
173f0bec 2118 }
011a7f23 2119
df4b504c 2120 seq = get_insns ();
2121 end_sequence ();
97ecdf3e 2122
54f7a985 2123 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
2124
2125 bb = emit_to_new_bb_before (seq, before);
2126 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2127 e->count = bb->count;
2128 e->probability = REG_BR_PROB_BASE;
fbba5463 2129}
2130
df4b504c 2131static void
35cb5232 2132sjlj_build_landing_pads (void)
fbba5463 2133{
df4b504c 2134 struct sjlj_lp_info *lp_info;
fbba5463 2135
f0af5a88 2136 lp_info = xcalloc (cfun->eh->last_region_number + 1,
2137 sizeof (struct sjlj_lp_info));
fbba5463 2138
df4b504c 2139 if (sjlj_find_directly_reachable_regions (lp_info))
2140 {
2141 rtx dispatch_label = gen_label_rtx ();
fbba5463 2142
df4b504c 2143 cfun->eh->sjlj_fc
2144 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2145 int_size_in_bytes (sjlj_fc_type_node),
2146 TYPE_ALIGN (sjlj_fc_type_node));
97ecdf3e 2147
df4b504c 2148 sjlj_assign_call_site_values (dispatch_label, lp_info);
2149 sjlj_mark_call_sites (lp_info);
173f0bec 2150
df4b504c 2151 sjlj_emit_function_enter (dispatch_label);
2152 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2153 sjlj_emit_function_exit ();
2154 }
173f0bec 2155
df4b504c 2156 free (lp_info);
97ecdf3e 2157}
fbba5463 2158
fbba5463 2159void
35cb5232 2160finish_eh_generation (void)
fbba5463 2161{
54f7a985 2162 basic_block bb;
2163
df4b504c 2164 /* Nothing to do if no regions created. */
2165 if (cfun->eh->region_tree == NULL)
fbba5463 2166 return;
2167
df4b504c 2168 /* The object here is to provide find_basic_blocks with detailed
2169 information (via reachable_handlers) on how exception control
2170 flows within the function. In this first pass, we can include
2171 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2172 regions, and hope that it will be useful in deleting unreachable
2173 handlers. Subsequently, we will generate landing pads which will
2174 connect many of the handlers, and then type information will not
2175 be effective. Still, this is a win over previous implementations. */
2176
df4b504c 2177 /* These registers are used by the landing pads. Make sure they
2178 have been generated. */
572fdaa3 2179 get_exception_pointer (cfun);
2180 get_exception_filter (cfun);
df4b504c 2181
2182 /* Construct the landing pads. */
2183
2184 assign_filter_values ();
2185 build_post_landing_pads ();
2186 connect_post_landing_pads ();
2187 if (USING_SJLJ_EXCEPTIONS)
2188 sjlj_build_landing_pads ();
2189 else
2190 dw2_build_landing_pads ();
fbba5463 2191
df4b504c 2192 cfun->eh->built_landing_pads = 1;
fbba5463 2193
df4b504c 2194 /* We've totally changed the CFG. Start over. */
2195 find_exception_handler_labels ();
54f7a985 2196 break_superblocks ();
2197 if (USING_SJLJ_EXCEPTIONS)
2198 commit_edge_insertions ();
2199 FOR_EACH_BB (bb)
2200 {
2201 edge e, next;
2202 bool eh = false;
2203 for (e = bb->succ; e; e = next)
2204 {
2205 next = e->succ_next;
2206 if (e->flags & EDGE_EH)
2207 {
2208 remove_edge (e);
2209 eh = true;
2210 }
2211 }
2212 if (eh)
4ee9c684 2213 rtl_make_eh_edge (NULL, bb, BB_END (bb));
54f7a985 2214 }
fbba5463 2215}
97ecdf3e 2216\f
8f8dcce4 2217static hashval_t
35cb5232 2218ehl_hash (const void *pentry)
8f8dcce4 2219{
2220 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2221
2222 /* 2^32 * ((sqrt(5) - 1) / 2) */
2223 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2224 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2225}
2226
2227static int
35cb5232 2228ehl_eq (const void *pentry, const void *pdata)
8f8dcce4 2229{
2230 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2231 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2232
2233 return entry->label == data->label;
2234}
2235
df4b504c 2236/* This section handles removing dead code for flow. */
732992fa 2237
8f8dcce4 2238/* Remove LABEL from exception_handler_label_map. */
732992fa 2239
df4b504c 2240static void
35cb5232 2241remove_exception_handler_label (rtx label)
732992fa 2242{
8f8dcce4 2243 struct ehl_map_entry **slot, tmp;
30618d5e 2244
8f8dcce4 2245 /* If exception_handler_label_map was not built yet,
f491db07 2246 there is nothing to do. */
1f3233d1 2247 if (cfun->eh->exception_handler_label_map == NULL)
f491db07 2248 return;
2249
8f8dcce4 2250 tmp.label = label;
2251 slot = (struct ehl_map_entry **)
1f3233d1 2252 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
611234b4 2253 gcc_assert (slot);
732992fa 2254
1f3233d1 2255 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
732992fa 2256}
2257
df4b504c 2258/* Splice REGION from the region tree etc. */
97bb6c17 2259
98b4572e 2260static void
35cb5232 2261remove_eh_handler (struct eh_region *region)
97ecdf3e 2262{
cd7e4c20 2263 struct eh_region **pp, **pp_start, *p, *outer, *inner;
df4b504c 2264 rtx lab;
97ecdf3e 2265
df4b504c 2266 /* For the benefit of efficiently handling REG_EH_REGION notes,
2267 replace this region in the region array with its containing
2268 region. Note that previous region deletions may result in
8f8dcce4 2269 multiple copies of this region in the array, so we have a
2270 list of alternate numbers by which we are known. */
2271
cd7e4c20 2272 outer = region->outer;
2273 cfun->eh->region_array[region->region_number] = outer;
8f8dcce4 2274 if (region->aka)
2275 {
2276 int i;
2277 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
cd7e4c20 2278 { cfun->eh->region_array[i] = outer; });
8f8dcce4 2279 }
2280
cd7e4c20 2281 if (outer)
8f8dcce4 2282 {
cd7e4c20 2283 if (!outer->aka)
1f3233d1 2284 outer->aka = BITMAP_GGC_ALLOC ();
8f8dcce4 2285 if (region->aka)
cd7e4c20 2286 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2287 bitmap_set_bit (outer->aka, region->region_number);
8f8dcce4 2288 }
df4b504c 2289
2290 if (cfun->eh->built_landing_pads)
2291 lab = region->landing_pad;
2292 else
2293 lab = region->label;
2294 if (lab)
2295 remove_exception_handler_label (lab);
2296
cd7e4c20 2297 if (outer)
2298 pp_start = &outer->inner;
df4b504c 2299 else
cd7e4c20 2300 pp_start = &cfun->eh->region_tree;
2301 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
df4b504c 2302 continue;
cd7e4c20 2303 *pp = region->next_peer;
97bb6c17 2304
cd7e4c20 2305 inner = region->inner;
2306 if (inner)
97ecdf3e 2307 {
cd7e4c20 2308 for (p = inner; p->next_peer ; p = p->next_peer)
2309 p->outer = outer;
2310 p->outer = outer;
2311
2312 p->next_peer = *pp_start;
2313 *pp_start = inner;
97ecdf3e 2314 }
98b4572e 2315
df4b504c 2316 if (region->type == ERT_CATCH)
2317 {
2318 struct eh_region *try, *next, *prev;
98b4572e 2319
df4b504c 2320 for (try = region->next_peer;
2321 try->type == ERT_CATCH;
2322 try = try->next_peer)
2323 continue;
611234b4 2324 gcc_assert (try->type == ERT_TRY);
98b4572e 2325
df4b504c 2326 next = region->u.catch.next_catch;
2327 prev = region->u.catch.prev_catch;
98b4572e 2328
df4b504c 2329 if (next)
2330 next->u.catch.prev_catch = prev;
2331 else
2332 try->u.try.last_catch = prev;
2333 if (prev)
2334 prev->u.catch.next_catch = next;
2335 else
2336 {
2337 try->u.try.catch = next;
2338 if (! next)
2339 remove_eh_handler (try);
2340 }
2341 }
97ecdf3e 2342}
2343
df4b504c 2344/* LABEL heads a basic block that is about to be deleted. If this
2345 label corresponds to an exception region, we may be able to
2346 delete the region. */
97ecdf3e 2347
2348void
35cb5232 2349maybe_remove_eh_handler (rtx label)
97ecdf3e 2350{
8f8dcce4 2351 struct ehl_map_entry **slot, tmp;
2352 struct eh_region *region;
97ecdf3e 2353
df4b504c 2354 /* ??? After generating landing pads, it's not so simple to determine
2355 if the region data is completely unused. One must examine the
2356 landing pad and the post landing pad, and whether an inner try block
2357 is referencing the catch handlers directly. */
2358 if (cfun->eh->built_landing_pads)
97ecdf3e 2359 return;
2360
8f8dcce4 2361 tmp.label = label;
2362 slot = (struct ehl_map_entry **)
1f3233d1 2363 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
8f8dcce4 2364 if (! slot)
2365 return;
2366 region = (*slot)->region;
2367 if (! region)
2368 return;
2369
2370 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2371 because there is no path to the fallback call to terminate.
2372 But the region continues to affect call-site data until there
2373 are no more contained calls, which we don't see here. */
2374 if (region->type == ERT_MUST_NOT_THROW)
a7b0c170 2375 {
1f3233d1 2376 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
8f8dcce4 2377 region->label = NULL_RTX;
a7b0c170 2378 }
8f8dcce4 2379 else
2380 remove_eh_handler (region);
2381}
2382
2383/* Invokes CALLBACK for every exception handler label. Only used by old
2384 loop hackery; should not be used by new code. */
2385
2386void
35cb5232 2387for_each_eh_label (void (*callback) (rtx))
8f8dcce4 2388{
1f3233d1 2389 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
48b3d385 2390 (void *) &callback);
a7b0c170 2391}
2392
8f8dcce4 2393static int
35cb5232 2394for_each_eh_label_1 (void **pentry, void *data)
8f8dcce4 2395{
2396 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
48b3d385 2397 void (*callback) (rtx) = *(void (**) (rtx)) data;
8f8dcce4 2398
2399 (*callback) (entry->label);
2400 return 1;
2401}
bf6102ae 2402
2403/* Invoke CALLBACK for every exception region in the current function. */
2404
2405void
2406for_each_eh_region (void (*callback) (struct eh_region *))
2407{
2408 int i, n = cfun->eh->last_region_number;
2409 for (i = 1; i <= n; ++i)
2410 {
2411 struct eh_region *region = cfun->eh->region_array[i];
2412 if (region)
2413 (*callback) (region);
2414 }
2415}
df4b504c 2416\f
2417/* This section describes CFG exception edges for flow. */
a7b0c170 2418
df4b504c 2419/* For communicating between calls to reachable_next_level. */
4ee9c684 2420struct reachable_info
a7b0c170 2421{
df4b504c 2422 tree types_caught;
2423 tree types_allowed;
4ee9c684 2424 void (*callback) (struct eh_region *, void *);
2425 void *callback_data;
2426 bool saw_any_handlers;
df4b504c 2427};
a7b0c170 2428
df4b504c 2429/* A subroutine of reachable_next_level. Return true if TYPE, or a
2430 base class of TYPE, is in HANDLED. */
a7b0c170 2431
4ee9c684 2432int
35cb5232 2433check_handled (tree handled, tree type)
a7b0c170 2434{
df4b504c 2435 tree t;
2436
2437 /* We can check for exact matches without front-end help. */
2438 if (! lang_eh_type_covers)
d3a0267f 2439 {
df4b504c 2440 for (t = handled; t ; t = TREE_CHAIN (t))
2441 if (TREE_VALUE (t) == type)
2442 return 1;
2443 }
2444 else
2445 {
2446 for (t = handled; t ; t = TREE_CHAIN (t))
2447 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2448 return 1;
d3a0267f 2449 }
df4b504c 2450
2451 return 0;
a7b0c170 2452}
2453
df4b504c 2454/* A subroutine of reachable_next_level. If we are collecting a list
2455 of handlers, add one. After landing pad generation, reference
2456 it instead of the handlers themselves. Further, the handlers are
1ed5443b 2457 all wired together, so by referencing one, we've got them all.
df4b504c 2458 Before landing pad generation we reference each handler individually.
2459
2460 LP_REGION contains the landing pad; REGION is the handler. */
a7b0c170 2461
2462static void
4ee9c684 2463add_reachable_handler (struct reachable_info *info,
2464 struct eh_region *lp_region, struct eh_region *region)
a7b0c170 2465{
df4b504c 2466 if (! info)
2467 return;
2468
4ee9c684 2469 info->saw_any_handlers = true;
2470
df4b504c 2471 if (cfun->eh->built_landing_pads)
4ee9c684 2472 info->callback (lp_region, info->callback_data);
df4b504c 2473 else
4ee9c684 2474 info->callback (region, info->callback_data);
a7b0c170 2475}
2476
1ed5443b 2477/* Process one level of exception regions for reachability.
df4b504c 2478 If TYPE_THROWN is non-null, then it is the *exact* type being
2479 propagated. If INFO is non-null, then collect handler labels
2480 and caught/allowed type information between invocations. */
a7b0c170 2481
df4b504c 2482static enum reachable_code
35cb5232 2483reachable_next_level (struct eh_region *region, tree type_thrown,
2484 struct reachable_info *info)
a7b0c170 2485{
df4b504c 2486 switch (region->type)
2487 {
2488 case ERT_CLEANUP:
2489 /* Before landing-pad generation, we model control flow
2490 directly to the individual handlers. In this way we can
2491 see that catch handler types may shadow one another. */
2492 add_reachable_handler (info, region, region);
2493 return RNL_MAYBE_CAUGHT;
2494
2495 case ERT_TRY:
2496 {
2497 struct eh_region *c;
2498 enum reachable_code ret = RNL_NOT_CAUGHT;
3c3bb268 2499
df4b504c 2500 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2501 {
2502 /* A catch-all handler ends the search. */
5c15c916 2503 if (c->u.catch.type_list == NULL)
df4b504c 2504 {
2505 add_reachable_handler (info, region, c);
2506 return RNL_CAUGHT;
2507 }
2508
2509 if (type_thrown)
2510 {
5d4f270c 2511 /* If we have at least one type match, end the search. */
5c15c916 2512 tree tp_node = c->u.catch.type_list;
1ed5443b 2513
5c15c916 2514 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
df4b504c 2515 {
5c15c916 2516 tree type = TREE_VALUE (tp_node);
2517
2518 if (type == type_thrown
2519 || (lang_eh_type_covers
2520 && (*lang_eh_type_covers) (type, type_thrown)))
2521 {
2522 add_reachable_handler (info, region, c);
2523 return RNL_CAUGHT;
2524 }
df4b504c 2525 }
2526
2527 /* If we have definitive information of a match failure,
2528 the catch won't trigger. */
2529 if (lang_eh_type_covers)
2530 return RNL_NOT_CAUGHT;
2531 }
2532
5c15c916 2533 /* At this point, we either don't know what type is thrown or
2534 don't have front-end assistance to help deciding if it is
2535 covered by one of the types in the list for this region.
1ed5443b 2536
5c15c916 2537 We'd then like to add this region to the list of reachable
2538 handlers since it is indeed potentially reachable based on the
1ed5443b 2539 information we have.
2540
5c15c916 2541 Actually, this handler is for sure not reachable if all the
2542 types it matches have already been caught. That is, it is only
2543 potentially reachable if at least one of the types it catches
2544 has not been previously caught. */
2545
df4b504c 2546 if (! info)
2547 ret = RNL_MAYBE_CAUGHT;
5c15c916 2548 else
df4b504c 2549 {
5c15c916 2550 tree tp_node = c->u.catch.type_list;
2551 bool maybe_reachable = false;
df4b504c 2552
5c15c916 2553 /* Compute the potential reachability of this handler and
2554 update the list of types caught at the same time. */
2555 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2556 {
2557 tree type = TREE_VALUE (tp_node);
2558
2559 if (! check_handled (info->types_caught, type))
2560 {
2561 info->types_caught
2562 = tree_cons (NULL, type, info->types_caught);
1ed5443b 2563
5c15c916 2564 maybe_reachable = true;
2565 }
2566 }
1ed5443b 2567
5c15c916 2568 if (maybe_reachable)
2569 {
2570 add_reachable_handler (info, region, c);
1ed5443b 2571
5c15c916 2572 /* ??? If the catch type is a base class of every allowed
2573 type, then we know we can stop the search. */
2574 ret = RNL_MAYBE_CAUGHT;
2575 }
df4b504c 2576 }
2577 }
a7b0c170 2578
df4b504c 2579 return ret;
2580 }
a7b0c170 2581
df4b504c 2582 case ERT_ALLOWED_EXCEPTIONS:
2583 /* An empty list of types definitely ends the search. */
2584 if (region->u.allowed.type_list == NULL_TREE)
2585 {
2586 add_reachable_handler (info, region, region);
2587 return RNL_CAUGHT;
2588 }
a7b0c170 2589
df4b504c 2590 /* Collect a list of lists of allowed types for use in detecting
2591 when a catch may be transformed into a catch-all. */
2592 if (info)
2593 info->types_allowed = tree_cons (NULL_TREE,
2594 region->u.allowed.type_list,
2595 info->types_allowed);
1ed5443b 2596
734c98be 2597 /* If we have definitive information about the type hierarchy,
df4b504c 2598 then we can tell if the thrown type will pass through the
2599 filter. */
2600 if (type_thrown && lang_eh_type_covers)
2601 {
2602 if (check_handled (region->u.allowed.type_list, type_thrown))
2603 return RNL_NOT_CAUGHT;
2604 else
2605 {
2606 add_reachable_handler (info, region, region);
2607 return RNL_CAUGHT;
2608 }
2609 }
c788feb1 2610
df4b504c 2611 add_reachable_handler (info, region, region);
2612 return RNL_MAYBE_CAUGHT;
c788feb1 2613
df4b504c 2614 case ERT_CATCH:
98667efb 2615 /* Catch regions are handled by their controlling try region. */
df4b504c 2616 return RNL_NOT_CAUGHT;
c788feb1 2617
df4b504c 2618 case ERT_MUST_NOT_THROW:
2619 /* Here we end our search, since no exceptions may propagate.
2620 If we've touched down at some landing pad previous, then the
2621 explicit function call we generated may be used. Otherwise
2622 the call is made by the runtime. */
4ee9c684 2623 if (info && info->saw_any_handlers)
c788feb1 2624 {
df4b504c 2625 add_reachable_handler (info, region, region);
ff385626 2626 return RNL_CAUGHT;
c788feb1 2627 }
df4b504c 2628 else
2629 return RNL_BLOCKED;
c788feb1 2630
df4b504c 2631 case ERT_THROW:
2632 case ERT_FIXUP:
1ed5443b 2633 case ERT_UNKNOWN:
df4b504c 2634 /* Shouldn't see these here. */
611234b4 2635 gcc_unreachable ();
df4b504c 2636 break;
611234b4 2637 default:
2638 gcc_unreachable ();
c788feb1 2639 }
3c3bb268 2640}
97ecdf3e 2641
4ee9c684 2642/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
97ecdf3e 2643
4ee9c684 2644void
2645foreach_reachable_handler (int region_number, bool is_resx,
2646 void (*callback) (struct eh_region *, void *),
2647 void *callback_data)
97ecdf3e 2648{
df4b504c 2649 struct reachable_info info;
2650 struct eh_region *region;
2651 tree type_thrown;
97ecdf3e 2652
df4b504c 2653 memset (&info, 0, sizeof (info));
4ee9c684 2654 info.callback = callback;
2655 info.callback_data = callback_data;
97ecdf3e 2656
df4b504c 2657 region = cfun->eh->region_array[region_number];
f929a98a 2658
df4b504c 2659 type_thrown = NULL_TREE;
4ee9c684 2660 if (is_resx)
385df8c5 2661 {
2662 /* A RESX leaves a region instead of entering it. Thus the
2663 region itself may have been deleted out from under us. */
2664 if (region == NULL)
4ee9c684 2665 return;
385df8c5 2666 region = region->outer;
2667 }
2668 else if (region->type == ERT_THROW)
df4b504c 2669 {
2670 type_thrown = region->u.throw.type;
2671 region = region->outer;
2672 }
8b4f3d64 2673
0d3f1801 2674 while (region)
2675 {
2676 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
ce9beb5c 2677 break;
0d3f1801 2678 /* If we have processed one cleanup, there is no point in
2679 processing any more of them. Each cleanup will have an edge
2680 to the next outer cleanup region, so the flow graph will be
2681 accurate. */
2682 if (region->type == ERT_CLEANUP)
2683 region = region->u.cleanup.prev_try;
2684 else
2685 region = region->outer;
2686 }
4ee9c684 2687}
2688
2689/* Retrieve a list of labels of exception handlers which can be
2690 reached by a given insn. */
2691
2692static void
2693arh_to_landing_pad (struct eh_region *region, void *data)
2694{
2695 rtx *p_handlers = data;
2696 if (! *p_handlers)
2697 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2698}
2699
2700static void
2701arh_to_label (struct eh_region *region, void *data)
2702{
2703 rtx *p_handlers = data;
2704 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2705}
2706
2707rtx
2708reachable_handlers (rtx insn)
2709{
2710 bool is_resx = false;
2711 rtx handlers = NULL;
2712 int region_number;
2713
6d7dc5b9 2714 if (JUMP_P (insn)
4ee9c684 2715 && GET_CODE (PATTERN (insn)) == RESX)
2716 {
2717 region_number = XINT (PATTERN (insn), 0);
2718 is_resx = true;
2719 }
2720 else
2721 {
2722 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2723 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2724 return NULL;
2725 region_number = INTVAL (XEXP (note, 0));
2726 }
35cb5232 2727
4ee9c684 2728 foreach_reachable_handler (region_number, is_resx,
2729 (cfun->eh->built_landing_pads
2730 ? arh_to_landing_pad
2731 : arh_to_label),
2732 &handlers);
2733
2734 return handlers;
f929a98a 2735}
2736
df4b504c 2737/* Determine if the given INSN can throw an exception that is caught
2738 within the function. */
97ecdf3e 2739
df4b504c 2740bool
4ee9c684 2741can_throw_internal_1 (int region_number)
97ecdf3e 2742{
df4b504c 2743 struct eh_region *region;
2744 tree type_thrown;
4ee9c684 2745
2746 region = cfun->eh->region_array[region_number];
2747
2748 type_thrown = NULL_TREE;
2749 if (region->type == ERT_THROW)
2750 {
2751 type_thrown = region->u.throw.type;
2752 region = region->outer;
2753 }
2754
2755 /* If this exception is ignored by each and every containing region,
2756 then control passes straight out. The runtime may handle some
2757 regions, which also do not require processing internally. */
2758 for (; region; region = region->outer)
2759 {
2760 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2761 if (how == RNL_BLOCKED)
2762 return false;
2763 if (how != RNL_NOT_CAUGHT)
2764 return true;
2765 }
2766
2767 return false;
2768}
2769
2770bool
2771can_throw_internal (rtx insn)
2772{
df4b504c 2773 rtx note;
d63ea2f2 2774
df4b504c 2775 if (! INSN_P (insn))
2776 return false;
97bb6c17 2777
6d7dc5b9 2778 if (JUMP_P (insn)
bcb141cd 2779 && GET_CODE (PATTERN (insn)) == RESX
2780 && XINT (PATTERN (insn), 0) > 0)
2781 return can_throw_internal_1 (XINT (PATTERN (insn), 0));
2782
6d7dc5b9 2783 if (NONJUMP_INSN_P (insn)
df4b504c 2784 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2785 insn = XVECEXP (PATTERN (insn), 0, 0);
97ecdf3e 2786
df4b504c 2787 /* Every insn that might throw has an EH_REGION note. */
2788 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2789 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2790 return false;
97ecdf3e 2791
4ee9c684 2792 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
2793}
2794
2795/* Determine if the given INSN can throw an exception that is
2796 visible outside the function. */
2797
2798bool
2799can_throw_external_1 (int region_number)
2800{
2801 struct eh_region *region;
2802 tree type_thrown;
2803
2804 region = cfun->eh->region_array[region_number];
97ecdf3e 2805
df4b504c 2806 type_thrown = NULL_TREE;
2807 if (region->type == ERT_THROW)
2808 {
2809 type_thrown = region->u.throw.type;
2810 region = region->outer;
2811 }
97ecdf3e 2812
4ee9c684 2813 /* If the exception is caught or blocked by any containing region,
2814 then it is not seen by any calling function. */
2815 for (; region ; region = region->outer)
2816 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2817 return false;
97ecdf3e 2818
4ee9c684 2819 return true;
df4b504c 2820}
97ecdf3e 2821
df4b504c 2822bool
35cb5232 2823can_throw_external (rtx insn)
97ecdf3e 2824{
df4b504c 2825 rtx note;
97ecdf3e 2826
df4b504c 2827 if (! INSN_P (insn))
2828 return false;
2829
6d7dc5b9 2830 if (NONJUMP_INSN_P (insn)
df4b504c 2831 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2832 insn = XVECEXP (PATTERN (insn), 0, 0);
2833
df4b504c 2834 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2835 if (!note)
2836 {
2837 /* Calls (and trapping insns) without notes are outside any
2838 exception handling region in this function. We have to
2839 assume it might throw. Given that the front end and middle
2840 ends mark known NOTHROW functions, this isn't so wildly
2841 inaccurate. */
6d7dc5b9 2842 return (CALL_P (insn)
df4b504c 2843 || (flag_non_call_exceptions
2844 && may_trap_p (PATTERN (insn))));
2845 }
2846 if (INTVAL (XEXP (note, 0)) <= 0)
2847 return false;
2848
4ee9c684 2849 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
97ecdf3e 2850}
da5038a3 2851
da2f1613 2852/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
ed74c60e 2853
04396483 2854void
35cb5232 2855set_nothrow_function_flags (void)
da5038a3 2856{
2857 rtx insn;
35cb5232 2858
da2f1613 2859 TREE_NOTHROW (current_function_decl) = 1;
da5038a3 2860
04396483 2861 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2862 something that can throw an exception. We specifically exempt
2863 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2864 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2865 is optimistic. */
da5038a3 2866
04396483 2867 cfun->all_throwers_are_sibcalls = 1;
2868
2869 if (! flag_exceptions)
2870 return;
35cb5232 2871
da5038a3 2872 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
df4b504c 2873 if (can_throw_external (insn))
04396483 2874 {
da2f1613 2875 TREE_NOTHROW (current_function_decl) = 0;
04396483 2876
6d7dc5b9 2877 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
04396483 2878 {
2879 cfun->all_throwers_are_sibcalls = 0;
2880 return;
2881 }
2882 }
2883
df4b504c 2884 for (insn = current_function_epilogue_delay_list; insn;
2885 insn = XEXP (insn, 1))
04396483 2886 if (can_throw_external (insn))
2887 {
da2f1613 2888 TREE_NOTHROW (current_function_decl) = 0;
b9cf3f63 2889
6d7dc5b9 2890 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
04396483 2891 {
2892 cfun->all_throwers_are_sibcalls = 0;
2893 return;
2894 }
2895 }
da5038a3 2896}
df4b504c 2897
447a9eb9 2898\f
df4b504c 2899/* Various hooks for unwind library. */
447a9eb9 2900
2901/* Do any necessary initialization to access arbitrary stack frames.
2902 On the SPARC, this means flushing the register windows. */
2903
2904void
35cb5232 2905expand_builtin_unwind_init (void)
447a9eb9 2906{
2907 /* Set this so all the registers get saved in our frame; we need to be
6312a35e 2908 able to copy the saved values for any registers from frames we unwind. */
447a9eb9 2909 current_function_has_nonlocal_label = 1;
2910
2911#ifdef SETUP_FRAME_ADDRESSES
2912 SETUP_FRAME_ADDRESSES ();
2913#endif
2914}
2915
df4b504c 2916rtx
35cb5232 2917expand_builtin_eh_return_data_regno (tree arglist)
df4b504c 2918{
2919 tree which = TREE_VALUE (arglist);
2920 unsigned HOST_WIDE_INT iwhich;
2921
2922 if (TREE_CODE (which) != INTEGER_CST)
2923 {
2924 error ("argument of `__builtin_eh_return_regno' must be constant");
2925 return constm1_rtx;
2926 }
2927
2928 iwhich = tree_low_cst (which, 1);
2929 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2930 if (iwhich == INVALID_REGNUM)
2931 return constm1_rtx;
2932
2933#ifdef DWARF_FRAME_REGNUM
2934 iwhich = DWARF_FRAME_REGNUM (iwhich);
2935#else
2936 iwhich = DBX_REGISTER_NUMBER (iwhich);
2937#endif
2938
1ed5443b 2939 return GEN_INT (iwhich);
df4b504c 2940}
2941
447a9eb9 2942/* Given a value extracted from the return address register or stack slot,
2943 return the actual address encoded in that value. */
2944
2945rtx
35cb5232 2946expand_builtin_extract_return_addr (tree addr_tree)
447a9eb9 2947{
2948 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
df4b504c 2949
52a14611 2950 if (GET_MODE (addr) != Pmode
2951 && GET_MODE (addr) != VOIDmode)
2952 {
2953#ifdef POINTERS_EXTEND_UNSIGNED
2954 addr = convert_memory_address (Pmode, addr);
2955#else
2956 addr = convert_to_mode (Pmode, addr, 0);
2957#endif
2958 }
2959
df4b504c 2960 /* First mask out any unwanted bits. */
2961#ifdef MASK_RETURN_ADDR
6de9716c 2962 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
df4b504c 2963#endif
2964
2965 /* Then adjust to find the real return address. */
2966#if defined (RETURN_ADDR_OFFSET)
2967 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2968#endif
2969
2970 return addr;
447a9eb9 2971}
2972
2973/* Given an actual address in addr_tree, do any necessary encoding
2974 and return the value to be stored in the return address register or
2975 stack slot so the epilogue will return to that address. */
2976
2977rtx
35cb5232 2978expand_builtin_frob_return_addr (tree addr_tree)
447a9eb9 2979{
479e4d5e 2980 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
df4b504c 2981
85d654dd 2982 addr = convert_memory_address (Pmode, addr);
be275a4a 2983
447a9eb9 2984#ifdef RETURN_ADDR_OFFSET
df4b504c 2985 addr = force_reg (Pmode, addr);
447a9eb9 2986 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2987#endif
df4b504c 2988
447a9eb9 2989 return addr;
2990}
2991
df4b504c 2992/* Set up the epilogue with the magic bits we'll need to return to the
2993 exception handler. */
447a9eb9 2994
df4b504c 2995void
35cb5232 2996expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2997 tree handler_tree)
447a9eb9 2998{
cd4e2223 2999 rtx tmp;
447a9eb9 3000
cd4e2223 3001#ifdef EH_RETURN_STACKADJ_RTX
3002 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
85d654dd 3003 tmp = convert_memory_address (Pmode, tmp);
cd4e2223 3004 if (!cfun->eh->ehr_stackadj)
3005 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3006 else if (tmp != cfun->eh->ehr_stackadj)
3007 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
be275a4a 3008#endif
3009
cd4e2223 3010 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
85d654dd 3011 tmp = convert_memory_address (Pmode, tmp);
cd4e2223 3012 if (!cfun->eh->ehr_handler)
3013 cfun->eh->ehr_handler = copy_to_reg (tmp);
3014 else if (tmp != cfun->eh->ehr_handler)
3015 emit_move_insn (cfun->eh->ehr_handler, tmp);
447a9eb9 3016
cd4e2223 3017 if (!cfun->eh->ehr_label)
3018 cfun->eh->ehr_label = gen_label_rtx ();
df4b504c 3019 emit_jump (cfun->eh->ehr_label);
173f0bec 3020}
3021
ec37ccb4 3022void
35cb5232 3023expand_eh_return (void)
447a9eb9 3024{
cd4e2223 3025 rtx around_label;
447a9eb9 3026
df4b504c 3027 if (! cfun->eh->ehr_label)
ec37ccb4 3028 return;
447a9eb9 3029
df4b504c 3030 current_function_calls_eh_return = 1;
447a9eb9 3031
cd4e2223 3032#ifdef EH_RETURN_STACKADJ_RTX
3033 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3034#endif
3035
df4b504c 3036 around_label = gen_label_rtx ();
df4b504c 3037 emit_jump (around_label);
447a9eb9 3038
df4b504c 3039 emit_label (cfun->eh->ehr_label);
3040 clobber_return_register ();
447a9eb9 3041
cd4e2223 3042#ifdef EH_RETURN_STACKADJ_RTX
3043 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3044#endif
3045
df4b504c 3046#ifdef HAVE_eh_return
3047 if (HAVE_eh_return)
cd4e2223 3048 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
df4b504c 3049 else
ec37ccb4 3050#endif
df4b504c 3051 {
cd4e2223 3052#ifdef EH_RETURN_HANDLER_RTX
3053 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3054#else
3055 error ("__builtin_eh_return not supported on this target");
3056#endif
df4b504c 3057 }
ec37ccb4 3058
df4b504c 3059 emit_label (around_label);
ec37ccb4 3060}
26093bf4 3061
3062/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3063 POINTERS_EXTEND_UNSIGNED and return it. */
3064
3065rtx
3066expand_builtin_extend_pointer (tree addr_tree)
3067{
3068 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3069 int extend;
3070
3071#ifdef POINTERS_EXTEND_UNSIGNED
3072 extend = POINTERS_EXTEND_UNSIGNED;
3073#else
3074 /* The previous EH code did an unsigned extend by default, so we do this also
3075 for consistency. */
3076 extend = 1;
3077#endif
3078
3079 return convert_modes (word_mode, ptr_mode, addr, extend);
3080}
cac66fd5 3081\f
98668546 3082/* In the following functions, we represent entries in the action table
4a82352a 3083 as 1-based indices. Special cases are:
98668546 3084
3085 0: null action record, non-null landing pad; implies cleanups
3086 -1: null action record, null landing pad; implies no action
3087 -2: no call-site entry; implies must_not_throw
3088 -3: we have yet to process outer regions
3089
3090 Further, no special cases apply to the "next" field of the record.
3091 For next, 0 means end of list. */
3092
df4b504c 3093struct action_record
3094{
3095 int offset;
3096 int filter;
3097 int next;
3098};
cac66fd5 3099
df4b504c 3100static int
35cb5232 3101action_record_eq (const void *pentry, const void *pdata)
df4b504c 3102{
3103 const struct action_record *entry = (const struct action_record *) pentry;
3104 const struct action_record *data = (const struct action_record *) pdata;
3105 return entry->filter == data->filter && entry->next == data->next;
3106}
cac66fd5 3107
df4b504c 3108static hashval_t
35cb5232 3109action_record_hash (const void *pentry)
df4b504c 3110{
3111 const struct action_record *entry = (const struct action_record *) pentry;
3112 return entry->next * 1009 + entry->filter;
3113}
cac66fd5 3114
df4b504c 3115static int
35cb5232 3116add_action_record (htab_t ar_hash, int filter, int next)
cac66fd5 3117{
df4b504c 3118 struct action_record **slot, *new, tmp;
3119
3120 tmp.filter = filter;
3121 tmp.next = next;
3122 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
cac66fd5 3123
df4b504c 3124 if ((new = *slot) == NULL)
cac66fd5 3125 {
f0af5a88 3126 new = xmalloc (sizeof (*new));
df4b504c 3127 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3128 new->filter = filter;
3129 new->next = next;
3130 *slot = new;
3131
3132 /* The filter value goes in untouched. The link to the next
3133 record is a "self-relative" byte offset, or zero to indicate
3134 that there is no next record. So convert the absolute 1 based
4a82352a 3135 indices we've been carrying around into a displacement. */
df4b504c 3136
3137 push_sleb128 (&cfun->eh->action_record_data, filter);
3138 if (next)
3139 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3140 push_sleb128 (&cfun->eh->action_record_data, next);
cac66fd5 3141 }
cac66fd5 3142
df4b504c 3143 return new->offset;
3144}
cac66fd5 3145
df4b504c 3146static int
35cb5232 3147collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
cac66fd5 3148{
df4b504c 3149 struct eh_region *c;
3150 int next;
cac66fd5 3151
df4b504c 3152 /* If we've reached the top of the region chain, then we have
3153 no actions, and require no landing pad. */
3154 if (region == NULL)
3155 return -1;
3156
3157 switch (region->type)
cac66fd5 3158 {
df4b504c 3159 case ERT_CLEANUP:
3160 /* A cleanup adds a zero filter to the beginning of the chain, but
3161 there are special cases to look out for. If there are *only*
3162 cleanups along a path, then it compresses to a zero action.
3163 Further, if there are multiple cleanups along a path, we only
3164 need to represent one of them, as that is enough to trigger
3165 entry to the landing pad at runtime. */
3166 next = collect_one_action_chain (ar_hash, region->outer);
3167 if (next <= 0)
3168 return 0;
3169 for (c = region->outer; c ; c = c->outer)
3170 if (c->type == ERT_CLEANUP)
3171 return next;
3172 return add_action_record (ar_hash, 0, next);
3173
3174 case ERT_TRY:
3175 /* Process the associated catch regions in reverse order.
3176 If there's a catch-all handler, then we don't need to
3177 search outer regions. Use a magic -3 value to record
3fb1e43b 3178 that we haven't done the outer search. */
df4b504c 3179 next = -3;
3180 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3181 {
5c15c916 3182 if (c->u.catch.type_list == NULL)
3183 {
3184 /* Retrieve the filter from the head of the filter list
3185 where we have stored it (see assign_filter_values). */
ce9beb5c 3186 int filter
3187 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3188
3189 next = add_action_record (ar_hash, filter, 0);
5c15c916 3190 }
df4b504c 3191 else
3192 {
5c15c916 3193 /* Once the outer search is done, trigger an action record for
3194 each filter we have. */
3195 tree flt_node;
3196
df4b504c 3197 if (next == -3)
3198 {
3199 next = collect_one_action_chain (ar_hash, region->outer);
98668546 3200
3201 /* If there is no next action, terminate the chain. */
3202 if (next == -1)
df4b504c 3203 next = 0;
98668546 3204 /* If all outer actions are cleanups or must_not_throw,
3205 we'll have no action record for it, since we had wanted
3206 to encode these states in the call-site record directly.
3207 Add a cleanup action to the chain to catch these. */
3208 else if (next <= 0)
3209 next = add_action_record (ar_hash, 0, 0);
df4b504c 3210 }
1ed5443b 3211
5c15c916 3212 flt_node = c->u.catch.filter_list;
3213 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3214 {
3215 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3216 next = add_action_record (ar_hash, filter, next);
3217 }
df4b504c 3218 }
3219 }
3220 return next;
3221
3222 case ERT_ALLOWED_EXCEPTIONS:
3223 /* An exception specification adds its filter to the
3224 beginning of the chain. */
3225 next = collect_one_action_chain (ar_hash, region->outer);
3ec8c2a1 3226
3227 /* If there is no next action, terminate the chain. */
3228 if (next == -1)
3229 next = 0;
3230 /* If all outer actions are cleanups or must_not_throw,
3231 we'll have no action record for it, since we had wanted
3232 to encode these states in the call-site record directly.
3233 Add a cleanup action to the chain to catch these. */
3234 else if (next <= 0)
3235 next = add_action_record (ar_hash, 0, 0);
3236
3237 return add_action_record (ar_hash, region->u.allowed.filter, next);
df4b504c 3238
3239 case ERT_MUST_NOT_THROW:
3240 /* A must-not-throw region with no inner handlers or cleanups
3241 requires no call-site entry. Note that this differs from
3242 the no handler or cleanup case in that we do require an lsda
3243 to be generated. Return a magic -2 value to record this. */
3244 return -2;
3245
3246 case ERT_CATCH:
3247 case ERT_THROW:
3248 /* CATCH regions are handled in TRY above. THROW regions are
3249 for optimization information only and produce no output. */
3250 return collect_one_action_chain (ar_hash, region->outer);
3251
3252 default:
611234b4 3253 gcc_unreachable ();
cac66fd5 3254 }
3255}
3256
df4b504c 3257static int
35cb5232 3258add_call_site (rtx landing_pad, int action)
cac66fd5 3259{
df4b504c 3260 struct call_site_record *data = cfun->eh->call_site_data;
3261 int used = cfun->eh->call_site_data_used;
3262 int size = cfun->eh->call_site_data_size;
cac66fd5 3263
df4b504c 3264 if (used >= size)
3265 {
3266 size = (size ? size * 2 : 64);
f0af5a88 3267 data = ggc_realloc (data, sizeof (*data) * size);
df4b504c 3268 cfun->eh->call_site_data = data;
3269 cfun->eh->call_site_data_size = size;
3270 }
cac66fd5 3271
df4b504c 3272 data[used].landing_pad = landing_pad;
3273 data[used].action = action;
cac66fd5 3274
df4b504c 3275 cfun->eh->call_site_data_used = used + 1;
cac66fd5 3276
df4b504c 3277 return used + call_site_base;
cac66fd5 3278}
3279
df4b504c 3280/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3281 The new note numbers will not refer to region numbers, but
3282 instead to call site entries. */
cac66fd5 3283
df4b504c 3284void
35cb5232 3285convert_to_eh_region_ranges (void)
cac66fd5 3286{
df4b504c 3287 rtx insn, iter, note;
3288 htab_t ar_hash;
3289 int last_action = -3;
3290 rtx last_action_insn = NULL_RTX;
3291 rtx last_landing_pad = NULL_RTX;
3292 rtx first_no_action_insn = NULL_RTX;
97b330ca 3293 int call_site = 0;
cac66fd5 3294
df4b504c 3295 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3296 return;
cac66fd5 3297
df4b504c 3298 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
cac66fd5 3299
df4b504c 3300 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
cac66fd5 3301
df4b504c 3302 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3303 if (INSN_P (iter))
3304 {
3305 struct eh_region *region;
3306 int this_action;
3307 rtx this_landing_pad;
cac66fd5 3308
df4b504c 3309 insn = iter;
6d7dc5b9 3310 if (NONJUMP_INSN_P (insn)
df4b504c 3311 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3312 insn = XVECEXP (PATTERN (insn), 0, 0);
da5038a3 3313
df4b504c 3314 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3315 if (!note)
3316 {
6d7dc5b9 3317 if (! (CALL_P (insn)
df4b504c 3318 || (flag_non_call_exceptions
3319 && may_trap_p (PATTERN (insn)))))
3320 continue;
3321 this_action = -1;
3322 region = NULL;
3323 }
3324 else
3325 {
3326 if (INTVAL (XEXP (note, 0)) <= 0)
3327 continue;
3328 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3329 this_action = collect_one_action_chain (ar_hash, region);
3330 }
3331
3332 /* Existence of catch handlers, or must-not-throw regions
3333 implies that an lsda is needed (even if empty). */
3334 if (this_action != -1)
3335 cfun->uses_eh_lsda = 1;
3336
3337 /* Delay creation of region notes for no-action regions
3338 until we're sure that an lsda will be required. */
3339 else if (last_action == -3)
3340 {
3341 first_no_action_insn = iter;
3342 last_action = -1;
3343 }
da5038a3 3344
df4b504c 3345 /* Cleanups and handlers may share action chains but not
3346 landing pads. Collect the landing pad for this region. */
3347 if (this_action >= 0)
3348 {
3349 struct eh_region *o;
3350 for (o = region; ! o->landing_pad ; o = o->outer)
3351 continue;
3352 this_landing_pad = o->landing_pad;
3353 }
3354 else
3355 this_landing_pad = NULL_RTX;
da5038a3 3356
df4b504c 3357 /* Differing actions or landing pads implies a change in call-site
3358 info, which implies some EH_REGION note should be emitted. */
3359 if (last_action != this_action
3360 || last_landing_pad != this_landing_pad)
3361 {
3362 /* If we'd not seen a previous action (-3) or the previous
3363 action was must-not-throw (-2), then we do not need an
3364 end note. */
3365 if (last_action >= -1)
3366 {
3367 /* If we delayed the creation of the begin, do it now. */
3368 if (first_no_action_insn)
3369 {
3370 call_site = add_call_site (NULL_RTX, 0);
3371 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3372 first_no_action_insn);
3373 NOTE_EH_HANDLER (note) = call_site;
3374 first_no_action_insn = NULL_RTX;
3375 }
3376
3377 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3378 last_action_insn);
3379 NOTE_EH_HANDLER (note) = call_site;
3380 }
3381
3382 /* If the new action is must-not-throw, then no region notes
3383 are created. */
3384 if (this_action >= -1)
3385 {
1ed5443b 3386 call_site = add_call_site (this_landing_pad,
df4b504c 3387 this_action < 0 ? 0 : this_action);
3388 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3389 NOTE_EH_HANDLER (note) = call_site;
3390 }
3391
3392 last_action = this_action;
3393 last_landing_pad = this_landing_pad;
3394 }
3395 last_action_insn = iter;
3396 }
da5038a3 3397
df4b504c 3398 if (last_action >= -1 && ! first_no_action_insn)
da5038a3 3399 {
df4b504c 3400 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3401 NOTE_EH_HANDLER (note) = call_site;
da5038a3 3402 }
3403
df4b504c 3404 htab_delete (ar_hash);
3405}
da5038a3 3406
df4b504c 3407\f
3408static void
35cb5232 3409push_uleb128 (varray_type *data_area, unsigned int value)
df4b504c 3410{
3411 do
3412 {
3413 unsigned char byte = value & 0x7f;
3414 value >>= 7;
3415 if (value)
3416 byte |= 0x80;
3417 VARRAY_PUSH_UCHAR (*data_area, byte);
3418 }
3419 while (value);
3420}
da5038a3 3421
df4b504c 3422static void
35cb5232 3423push_sleb128 (varray_type *data_area, int value)
df4b504c 3424{
3425 unsigned char byte;
3426 int more;
da5038a3 3427
df4b504c 3428 do
da5038a3 3429 {
df4b504c 3430 byte = value & 0x7f;
3431 value >>= 7;
3432 more = ! ((value == 0 && (byte & 0x40) == 0)
3433 || (value == -1 && (byte & 0x40) != 0));
3434 if (more)
3435 byte |= 0x80;
3436 VARRAY_PUSH_UCHAR (*data_area, byte);
da5038a3 3437 }
df4b504c 3438 while (more);
3439}
da5038a3 3440
df4b504c 3441\f
df4b504c 3442#ifndef HAVE_AS_LEB128
3443static int
35cb5232 3444dw2_size_of_call_site_table (void)
da5038a3 3445{
df4b504c 3446 int n = cfun->eh->call_site_data_used;
3447 int size = n * (4 + 4 + 4);
3448 int i;
da5038a3 3449
df4b504c 3450 for (i = 0; i < n; ++i)
3451 {
3452 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3453 size += size_of_uleb128 (cs->action);
3454 }
8b4f3d64 3455
df4b504c 3456 return size;
3457}
3458
3459static int
35cb5232 3460sjlj_size_of_call_site_table (void)
df4b504c 3461{
3462 int n = cfun->eh->call_site_data_used;
3463 int size = 0;
3464 int i;
cac66fd5 3465
df4b504c 3466 for (i = 0; i < n; ++i)
da5038a3 3467 {
df4b504c 3468 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3469 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3470 size += size_of_uleb128 (cs->action);
da5038a3 3471 }
df4b504c 3472
3473 return size;
3474}
3475#endif
3476
3477static void
35cb5232 3478dw2_output_call_site_table (void)
df4b504c 3479{
df4b504c 3480 int n = cfun->eh->call_site_data_used;
3481 int i;
3482
3483 for (i = 0; i < n; ++i)
da5038a3 3484 {
df4b504c 3485 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3486 char reg_start_lab[32];
3487 char reg_end_lab[32];
3488 char landing_pad_lab[32];
3489
3490 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3491 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3492
3493 if (cs->landing_pad)
3494 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3495 CODE_LABEL_NUMBER (cs->landing_pad));
3496
3497 /* ??? Perhaps use insn length scaling if the assembler supports
3498 generic arithmetic. */
3499 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3500 data4 if the function is small enough. */
3501#ifdef HAVE_AS_LEB128
2d754264 3502 dw2_asm_output_delta_uleb128 (reg_start_lab,
3503 current_function_func_begin_label,
df4b504c 3504 "region %d start", i);
3505 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3506 "length");
3507 if (cs->landing_pad)
2d754264 3508 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3509 current_function_func_begin_label,
df4b504c 3510 "landing pad");
3511 else
3512 dw2_asm_output_data_uleb128 (0, "landing pad");
3513#else
2d754264 3514 dw2_asm_output_delta (4, reg_start_lab,
3515 current_function_func_begin_label,
df4b504c 3516 "region %d start", i);
3517 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3518 if (cs->landing_pad)
2d754264 3519 dw2_asm_output_delta (4, landing_pad_lab,
3520 current_function_func_begin_label,
df4b504c 3521 "landing pad");
3522 else
3523 dw2_asm_output_data (4, 0, "landing pad");
3524#endif
3525 dw2_asm_output_data_uleb128 (cs->action, "action");
da5038a3 3526 }
3527
df4b504c 3528 call_site_base += n;
3529}
3530
3531static void
35cb5232 3532sjlj_output_call_site_table (void)
df4b504c 3533{
3534 int n = cfun->eh->call_site_data_used;
3535 int i;
da5038a3 3536
df4b504c 3537 for (i = 0; i < n; ++i)
da5038a3 3538 {
df4b504c 3539 struct call_site_record *cs = &cfun->eh->call_site_data[i];
b9cf3f63 3540
df4b504c 3541 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3542 "region %d landing pad", i);
3543 dw2_asm_output_data_uleb128 (cs->action, "action");
3544 }
b9cf3f63 3545
df4b504c 3546 call_site_base += n;
da5038a3 3547}
3548
d5720b0c 3549/* Tell assembler to switch to the section for the exception handling
3550 table. */
3551
3552void
35cb5232 3553default_exception_section (void)
d5720b0c 3554{
3555 if (targetm.have_named_sections)
3556 {
d5720b0c 3557 int flags;
d5720b0c 3558#ifdef HAVE_LD_RO_RW_SECTION_MIXING
01bffb8a 3559 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3560
d5720b0c 3561 flags = (! flag_pic
3562 || ((tt_format & 0x70) != DW_EH_PE_absptr
3563 && (tt_format & 0x70) != DW_EH_PE_aligned))
3564 ? 0 : SECTION_WRITE;
3565#else
3566 flags = SECTION_WRITE;
3567#endif
3568 named_section_flags (".gcc_except_table", flags);
3569 }
3570 else if (flag_pic)
3571 data_section ();
3572 else
3573 readonly_data_section ();
3574}
3575
df4b504c 3576void
35cb5232 3577output_function_exception_table (void)
df4b504c 3578{
ad5818ae 3579 int tt_format, cs_format, lp_format, i, n;
df4b504c 3580#ifdef HAVE_AS_LEB128
3581 char ttype_label[32];
3582 char cs_after_size_label[32];
3583 char cs_end_label[32];
3584#else
3585 int call_site_len;
3586#endif
3587 int have_tt_data;
97b330ca 3588 int tt_format_size = 0;
da5038a3 3589
df4b504c 3590 /* Not all functions need anything. */
3591 if (! cfun->uses_eh_lsda)
3592 return;
8b4f3d64 3593
8ec87476 3594#ifdef TARGET_UNWIND_INFO
3595 /* TODO: Move this into target file. */
ad5818ae 3596 fputs ("\t.personality\t", asm_out_file);
3597 output_addr_const (asm_out_file, eh_personality_libfunc);
3598 fputs ("\n\t.handlerdata\n", asm_out_file);
3599 /* Note that varasm still thinks we're in the function's code section.
3600 The ".endp" directive that will immediately follow will take us back. */
3601#else
883b2e73 3602 targetm.asm_out.exception_section ();
ad5818ae 3603#endif
df4b504c 3604
3605 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3606 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3607
631413ff 3608 /* Indicate the format of the @TType entries. */
3609 if (! have_tt_data)
3610 tt_format = DW_EH_PE_omit;
3611 else
3612 {
3613 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3614#ifdef HAVE_AS_LEB128
4781f9b9 3615 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3616 current_function_funcdef_no);
631413ff 3617#endif
3618 tt_format_size = size_of_encoded_value (tt_format);
3619
4aa1f91c 3620 assemble_align (tt_format_size * BITS_PER_UNIT);
631413ff 3621 }
df4b504c 3622
883b2e73 3623 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4781f9b9 3624 current_function_funcdef_no);
df4b504c 3625
3626 /* The LSDA header. */
3627
3628 /* Indicate the format of the landing pad start pointer. An omitted
3629 field implies @LPStart == @Start. */
3630 /* Currently we always put @LPStart == @Start. This field would
3631 be most useful in moving the landing pads completely out of
3632 line to another section, but it could also be used to minimize
3633 the size of uleb128 landing pad offsets. */
ad5818ae 3634 lp_format = DW_EH_PE_omit;
3635 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3636 eh_data_format_name (lp_format));
df4b504c 3637
3638 /* @LPStart pointer would go here. */
3639
ad5818ae 3640 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3641 eh_data_format_name (tt_format));
df4b504c 3642
3643#ifndef HAVE_AS_LEB128
3644 if (USING_SJLJ_EXCEPTIONS)
3645 call_site_len = sjlj_size_of_call_site_table ();
3646 else
3647 call_site_len = dw2_size_of_call_site_table ();
3648#endif
3649
3650 /* A pc-relative 4-byte displacement to the @TType data. */
3651 if (have_tt_data)
3652 {
3653#ifdef HAVE_AS_LEB128
3654 char ttype_after_disp_label[32];
1ed5443b 3655 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4781f9b9 3656 current_function_funcdef_no);
df4b504c 3657 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3658 "@TType base offset");
3659 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3660#else
3661 /* Ug. Alignment queers things. */
631413ff 3662 unsigned int before_disp, after_disp, last_disp, disp;
df4b504c 3663
df4b504c 3664 before_disp = 1 + 1;
3665 after_disp = (1 + size_of_uleb128 (call_site_len)
3666 + call_site_len
3667 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
631413ff 3668 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3669 * tt_format_size));
df4b504c 3670
3671 disp = after_disp;
3672 do
da5038a3 3673 {
df4b504c 3674 unsigned int disp_size, pad;
3675
3676 last_disp = disp;
3677 disp_size = size_of_uleb128 (disp);
3678 pad = before_disp + disp_size + after_disp;
631413ff 3679 if (pad % tt_format_size)
3680 pad = tt_format_size - (pad % tt_format_size);
df4b504c 3681 else
3682 pad = 0;
3683 disp = after_disp + pad;
da5038a3 3684 }
df4b504c 3685 while (disp != last_disp);
3686
3687 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3688#endif
da5038a3 3689 }
da5038a3 3690
df4b504c 3691 /* Indicate the format of the call-site offsets. */
3692#ifdef HAVE_AS_LEB128
ad5818ae 3693 cs_format = DW_EH_PE_uleb128;
df4b504c 3694#else
ad5818ae 3695 cs_format = DW_EH_PE_udata4;
df4b504c 3696#endif
ad5818ae 3697 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3698 eh_data_format_name (cs_format));
df4b504c 3699
3700#ifdef HAVE_AS_LEB128
3701 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4781f9b9 3702 current_function_funcdef_no);
df4b504c 3703 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4781f9b9 3704 current_function_funcdef_no);
df4b504c 3705 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3706 "Call-site table length");
3707 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3708 if (USING_SJLJ_EXCEPTIONS)
3709 sjlj_output_call_site_table ();
3710 else
3711 dw2_output_call_site_table ();
3712 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3713#else
3714 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3715 if (USING_SJLJ_EXCEPTIONS)
3716 sjlj_output_call_site_table ();
3717 else
3718 dw2_output_call_site_table ();
3719#endif
3720
3721 /* ??? Decode and interpret the data for flag_debug_asm. */
3722 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3723 for (i = 0; i < n; ++i)
3724 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3725 (i ? NULL : "Action record table"));
da5038a3 3726
df4b504c 3727 if (have_tt_data)
4aa1f91c 3728 assemble_align (tt_format_size * BITS_PER_UNIT);
da5038a3 3729
df4b504c 3730 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3731 while (i-- > 0)
da5038a3 3732 {
df4b504c 3733 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
52ec67c4 3734 rtx value;
df4b504c 3735
3736 if (type == NULL_TREE)
3b919cde 3737 value = const0_rtx;
df4b504c 3738 else
3b919cde 3739 {
3740 struct cgraph_varpool_node *node;
3741
3742 type = lookup_type_for_runtime (type);
3743 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3744
3745 /* Let cgraph know that the rtti decl is used. Not all of the
3746 paths below go through assemble_integer, which would take
3747 care of this for us. */
4ee9c684 3748 STRIP_NOPS (type);
7a4f3d8a 3749 if (TREE_CODE (type) == ADDR_EXPR)
3750 {
88b71346 3751 type = TREE_OPERAND (type, 0);
e24e8df7 3752 if (TREE_CODE (type) == VAR_DECL)
3753 {
3754 node = cgraph_varpool_node (type);
3755 if (node)
3756 cgraph_varpool_mark_needed_node (node);
3757 }
7a4f3d8a 3758 }
611234b4 3759 else
3760 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3b919cde 3761 }
df4b504c 3762
52ec67c4 3763 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3764 assemble_integer (value, tt_format_size,
3765 tt_format_size * BITS_PER_UNIT, 1);
3766 else
ff385626 3767 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
da5038a3 3768 }
df4b504c 3769
3770#ifdef HAVE_AS_LEB128
3771 if (have_tt_data)
3772 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3773#endif
3774
3775 /* ??? Decode and interpret the data for flag_debug_asm. */
3776 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3777 for (i = 0; i < n; ++i)
3778 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3779 (i ? NULL : "Exception specification table"));
3780
3781 function_section (current_function_decl);
da5038a3 3782}
1f3233d1 3783
3784#include "gt-except.h"