]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
backport: basic-block.h: Include vec.h, errors.h.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
3f2c5d1a 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3897f229 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
1322177d 6This file is part of GCC.
4956d07c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
4956d07c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
4956d07c
MS
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
4956d07c
MS
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4977bab6
ZW
52#include "coretypes.h"
53#include "tm.h"
4956d07c
MS
54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
4956d07c 57#include "function.h"
4956d07c 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
4956d07c 60#include "insn-config.h"
52a11cbf
RH
61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
4956d07c 65#include "output.h"
52a11cbf
RH
66#include "dwarf2asm.h"
67#include "dwarf2out.h"
2a1ee410 68#include "dwarf2.h"
10f0ad3d 69#include "toplev.h"
52a11cbf 70#include "hashtab.h"
2b12ffe0 71#include "intl.h"
87ff9c8e 72#include "ggc.h"
b1474bb7 73#include "tm_p.h"
07c9d2eb 74#include "target.h"
f1e639b1 75#include "langhooks.h"
dd07abd7 76#include "cgraph.h"
52a11cbf
RH
77
78/* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
52a11cbf
RH
80#ifndef EH_RETURN_DATA_REGNO
81#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
82#endif
83
27a36778 84
52a11cbf
RH
85/* Protect cleanup actions with must-not-throw regions, with a call
86 to the given failure handler. */
502b8322 87tree (*lang_protect_cleanup_actions) (void);
27a36778 88
52a11cbf 89/* Return true if type A catches type B. */
502b8322 90int (*lang_eh_type_covers) (tree a, tree b);
27a36778 91
52a11cbf 92/* Map a type to a runtime object to match type. */
502b8322 93tree (*lang_eh_runtime_type) (tree);
4956d07c 94
6a58eee9
RH
95/* A hash table of label to region number. */
96
e2500fed 97struct ehl_map_entry GTY(())
6a58eee9
RH
98{
99 rtx label;
100 struct eh_region *region;
101};
102
21c157b4 103static GTY(()) int call_site_base;
e2500fed
GK
104static GTY ((param_is (union tree_node)))
105 htab_t type_to_runtime_map;
52a11cbf
RH
106
107/* Describe the SjLj_Function_Context structure. */
e2500fed 108static GTY(()) tree sjlj_fc_type_node;
52a11cbf
RH
109static int sjlj_fc_call_site_ofs;
110static int sjlj_fc_data_ofs;
111static int sjlj_fc_personality_ofs;
112static int sjlj_fc_lsda_ofs;
113static int sjlj_fc_jbuf_ofs;
114\f
115/* Describes one exception region. */
e2500fed 116struct eh_region GTY(())
52a11cbf
RH
117{
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
956d6950 120
52a11cbf
RH
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
956d6950 124
52a11cbf
RH
125 /* An identifier for this region. */
126 int region_number;
71038426 127
6a58eee9
RH
128 /* When a region is deleted, its parents inherit the REG_EH_REGION
129 numbers already assigned. */
130 bitmap aka;
131
52a11cbf
RH
132 /* Each region does exactly one thing. */
133 enum eh_region_type
6de9cd9a 134 {
572202a7
RK
135 ERT_UNKNOWN = 0,
136 ERT_CLEANUP,
52a11cbf
RH
137 ERT_TRY,
138 ERT_CATCH,
139 ERT_ALLOWED_EXCEPTIONS,
140 ERT_MUST_NOT_THROW,
141 ERT_THROW,
142 ERT_FIXUP
143 } type;
144
eaec9b3d 145 /* Holds the action to perform based on the preceding type. */
e2500fed 146 union eh_region_u {
52a11cbf
RH
147 /* A list of catch blocks, a surrounding try block,
148 and the label for continuing after a catch. */
e2500fed 149 struct eh_region_u_try {
52a11cbf
RH
150 struct eh_region *catch;
151 struct eh_region *last_catch;
152 struct eh_region *prev_try;
153 rtx continue_label;
e2500fed 154 } GTY ((tag ("ERT_TRY"))) try;
52a11cbf 155
6d41a92f
OH
156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
e2500fed 158 struct eh_region_u_catch {
52a11cbf
RH
159 struct eh_region *next_catch;
160 struct eh_region *prev_catch;
6d41a92f
OH
161 tree type_list;
162 tree filter_list;
e2500fed 163 } GTY ((tag ("ERT_CATCH"))) catch;
52a11cbf
RH
164
165 /* A tree_list of allowed types. */
e2500fed 166 struct eh_region_u_allowed {
52a11cbf
RH
167 tree type_list;
168 int filter;
e2500fed 169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
52a11cbf 170
3f2c5d1a 171 /* The type given by a call to "throw foo();", or discovered
52a11cbf 172 for a throw. */
e2500fed 173 struct eh_region_u_throw {
52a11cbf 174 tree type;
e2500fed 175 } GTY ((tag ("ERT_THROW"))) throw;
52a11cbf
RH
176
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
e2500fed 179 struct eh_region_u_cleanup {
52a11cbf 180 tree exp;
bafb714b 181 struct eh_region *prev_try;
e2500fed 182 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
52a11cbf
RH
183
184 /* The real region (by expression and by pointer) that fixup code
185 should live in. */
e2500fed 186 struct eh_region_u_fixup {
52a11cbf
RH
187 tree cleanup_exp;
188 struct eh_region *real_region;
1bddbeb4 189 bool resolved;
e2500fed
GK
190 } GTY ((tag ("ERT_FIXUP"))) fixup;
191 } GTY ((desc ("%0.type"))) u;
52a11cbf 192
47c84870
JM
193 /* Entry point for this region's handler before landing pads are built. */
194 rtx label;
6de9cd9a 195 tree tree_label;
52a11cbf 196
47c84870 197 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
198 rtx landing_pad;
199
47c84870 200 /* Entry point for this region's handler from an inner region. */
52a11cbf 201 rtx post_landing_pad;
47c84870
JM
202
203 /* The RESX insn for handing off control to the next outermost handler,
204 if appropriate. */
205 rtx resume;
b2dd096b
MM
206
207 /* True if something in this region may throw. */
208 unsigned may_contain_throw : 1;
52a11cbf 209};
71038426 210
e2500fed
GK
211struct call_site_record GTY(())
212{
213 rtx landing_pad;
214 int action;
215};
216
52a11cbf 217/* Used to save exception status for each function. */
e2500fed 218struct eh_status GTY(())
52a11cbf
RH
219{
220 /* The tree of all regions for this function. */
221 struct eh_region *region_tree;
e6cfb550 222
52a11cbf 223 /* The same information as an indexable array. */
e2500fed 224 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
e6cfb550 225
52a11cbf
RH
226 /* The most recently open region. */
227 struct eh_region *cur_region;
e6cfb550 228
52a11cbf
RH
229 /* This is the region for which we are processing catch blocks. */
230 struct eh_region *try_region;
71038426 231
52a11cbf
RH
232 rtx filter;
233 rtx exc_ptr;
4956d07c 234
52a11cbf
RH
235 int built_landing_pads;
236 int last_region_number;
e6cfb550 237
52a11cbf
RH
238 varray_type ttype_data;
239 varray_type ehspec_data;
240 varray_type action_record_data;
6814a8a0 241
e2500fed
GK
242 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
243
502b8322 244 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
e2500fed 245 call_site_data;
52a11cbf
RH
246 int call_site_data_used;
247 int call_site_data_size;
248
249 rtx ehr_stackadj;
250 rtx ehr_handler;
251 rtx ehr_label;
252
253 rtx sjlj_fc;
254 rtx sjlj_exit_after;
255};
e6cfb550 256
52a11cbf 257\f
502b8322
AJ
258static int t2r_eq (const void *, const void *);
259static hashval_t t2r_hash (const void *);
260static void add_type_for_runtime (tree);
261static tree lookup_type_for_runtime (tree);
262
502b8322
AJ
263static void resolve_fixup_regions (void);
264static void remove_fixup_regions (void);
265static void remove_unreachable_regions (rtx);
266static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
267
268static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
269 struct inline_remap *);
270static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
271static int ttypes_filter_eq (const void *, const void *);
272static hashval_t ttypes_filter_hash (const void *);
273static int ehspec_filter_eq (const void *, const void *);
274static hashval_t ehspec_filter_hash (const void *);
275static int add_ttypes_entry (htab_t, tree);
276static int add_ehspec_entry (htab_t, htab_t, tree);
277static void assign_filter_values (void);
278static void build_post_landing_pads (void);
279static void connect_post_landing_pads (void);
280static void dw2_build_landing_pads (void);
52a11cbf
RH
281
282struct sjlj_lp_info;
502b8322
AJ
283static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
284static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
285static void sjlj_mark_call_sites (struct sjlj_lp_info *);
286static void sjlj_emit_function_enter (rtx);
287static void sjlj_emit_function_exit (void);
288static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
289static void sjlj_build_landing_pads (void);
290
291static hashval_t ehl_hash (const void *);
292static int ehl_eq (const void *, const void *);
293static void add_ehl_entry (rtx, struct eh_region *);
294static void remove_exception_handler_label (rtx);
295static void remove_eh_handler (struct eh_region *);
296static int for_each_eh_label_1 (void **, void *);
52a11cbf 297
52a11cbf
RH
298/* The return value of reachable_next_level. */
299enum reachable_code
300{
301 /* The given exception is not processed by the given region. */
302 RNL_NOT_CAUGHT,
303 /* The given exception may need processing by the given region. */
304 RNL_MAYBE_CAUGHT,
305 /* The given exception is completely processed by the given region. */
306 RNL_CAUGHT,
307 /* The given exception is completely processed by the runtime. */
308 RNL_BLOCKED
309};
e6cfb550 310
6de9cd9a 311struct reachable_info;
502b8322
AJ
312static enum reachable_code reachable_next_level (struct eh_region *, tree,
313 struct reachable_info *);
314
315static int action_record_eq (const void *, const void *);
316static hashval_t action_record_hash (const void *);
317static int add_action_record (htab_t, int, int);
318static int collect_one_action_chain (htab_t, struct eh_region *);
319static int add_call_site (rtx, int);
320
321static void push_uleb128 (varray_type *, unsigned int);
322static void push_sleb128 (varray_type *, int);
52a11cbf 323#ifndef HAVE_AS_LEB128
502b8322
AJ
324static int dw2_size_of_call_site_table (void);
325static int sjlj_size_of_call_site_table (void);
52a11cbf 326#endif
502b8322
AJ
327static void dw2_output_call_site_table (void);
328static void sjlj_output_call_site_table (void);
e6cfb550 329
52a11cbf
RH
330\f
331/* Routine to see if exception handling is turned on.
cc2902df 332 DO_WARN is nonzero if we want to inform the user that exception
3f2c5d1a 333 handling is turned off.
4956d07c 334
52a11cbf
RH
335 This is used to ensure that -fexceptions has been specified if the
336 compiler tries to use any exception-specific functions. */
4956d07c 337
52a11cbf 338int
502b8322 339doing_eh (int do_warn)
52a11cbf
RH
340{
341 if (! flag_exceptions)
342 {
343 static int warned = 0;
344 if (! warned && do_warn)
345 {
346 error ("exception handling disabled, use -fexceptions to enable");
347 warned = 1;
348 }
349 return 0;
350 }
351 return 1;
4956d07c
MS
352}
353
52a11cbf
RH
354\f
355void
502b8322 356init_eh (void)
4956d07c 357{
52a11cbf
RH
358 if (! flag_exceptions)
359 return;
4956d07c 360
e2500fed 361 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
4956d07c 362
52a11cbf
RH
363 /* Create the SjLj_Function_Context structure. This should match
364 the definition in unwind-sjlj.c. */
365 if (USING_SJLJ_EXCEPTIONS)
366 {
367 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 368
ae2bcd98 369 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
9a0d1e1b 370
52a11cbf
RH
371 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
372 build_pointer_type (sjlj_fc_type_node));
373 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 374
52a11cbf
RH
375 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
376 integer_type_node);
377 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 378
7d60be94 379 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
ae2bcd98 380 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
b0c48229 381 tmp);
52a11cbf
RH
382 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
383 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 384
52a11cbf
RH
385 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
386 ptr_type_node);
387 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 388
52a11cbf
RH
389 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
390 ptr_type_node);
391 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 392
52a11cbf
RH
393#ifdef DONT_USE_BUILTIN_SETJMP
394#ifdef JMP_BUF_SIZE
7d60be94 395 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
52a11cbf
RH
396#else
397 /* Should be large enough for most systems, if it is not,
398 JMP_BUF_SIZE should be defined with the proper value. It will
399 also tend to be larger than necessary for most systems, a more
400 optimal port will define JMP_BUF_SIZE. */
7d60be94 401 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
52a11cbf
RH
402#endif
403#else
83810fcb 404 /* builtin_setjmp takes a pointer to 5 words. */
7d60be94 405 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
52a11cbf
RH
406#endif
407 tmp = build_index_type (tmp);
408 tmp = build_array_type (ptr_type_node, tmp);
409 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
410#ifdef DONT_USE_BUILTIN_SETJMP
411 /* We don't know what the alignment requirements of the
412 runtime's jmp_buf has. Overestimate. */
413 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
414 DECL_USER_ALIGN (f_jbuf) = 1;
415#endif
416 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
417
418 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
419 TREE_CHAIN (f_prev) = f_cs;
420 TREE_CHAIN (f_cs) = f_data;
421 TREE_CHAIN (f_data) = f_per;
422 TREE_CHAIN (f_per) = f_lsda;
423 TREE_CHAIN (f_lsda) = f_jbuf;
424
425 layout_type (sjlj_fc_type_node);
426
427 /* Cache the interesting field offsets so that we have
428 easy access from rtl. */
429 sjlj_fc_call_site_ofs
430 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
431 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
432 sjlj_fc_data_ofs
433 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
434 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
435 sjlj_fc_personality_ofs
436 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
437 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
438 sjlj_fc_lsda_ofs
439 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
440 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
441 sjlj_fc_jbuf_ofs
442 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
443 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
444 }
4956d07c
MS
445}
446
52a11cbf 447void
502b8322 448init_eh_for_function (void)
4956d07c 449{
703ad42b 450 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
6a58eee9 451}
52a11cbf 452\f
6de9cd9a
DN
453/* Routines to generate the exception tree somewhat directly.
454 These are used from tree-eh.c when processing exception related
455 nodes during tree optimization. */
456
457static struct eh_region *
458gen_eh_region (enum eh_region_type type, struct eh_region *outer)
459{
460 struct eh_region *new;
461
462#ifdef ENABLE_CHECKING
5b0264cb 463 gcc_assert (doing_eh (0));
6de9cd9a
DN
464#endif
465
466 /* Insert a new blank region as a leaf in the tree. */
467 new = ggc_alloc_cleared (sizeof (*new));
468 new->type = type;
469 new->outer = outer;
470 if (outer)
471 {
472 new->next_peer = outer->inner;
473 outer->inner = new;
474 }
475 else
476 {
477 new->next_peer = cfun->eh->region_tree;
478 cfun->eh->region_tree = new;
479 }
480
481 new->region_number = ++cfun->eh->last_region_number;
482
483 return new;
484}
485
486struct eh_region *
487gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
488{
489 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
490 cleanup->u.cleanup.prev_try = prev_try;
491 return cleanup;
492}
493
494struct eh_region *
495gen_eh_region_try (struct eh_region *outer)
496{
497 return gen_eh_region (ERT_TRY, outer);
498}
499
500struct eh_region *
501gen_eh_region_catch (struct eh_region *t, tree type_or_list)
502{
503 struct eh_region *c, *l;
504 tree type_list, type_node;
505
506 /* Ensure to always end up with a type list to normalize further
507 processing, then register each type against the runtime types map. */
508 type_list = type_or_list;
509 if (type_or_list)
510 {
511 if (TREE_CODE (type_or_list) != TREE_LIST)
512 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
513
514 type_node = type_list;
515 for (; type_node; type_node = TREE_CHAIN (type_node))
516 add_type_for_runtime (TREE_VALUE (type_node));
517 }
518
519 c = gen_eh_region (ERT_CATCH, t->outer);
520 c->u.catch.type_list = type_list;
521 l = t->u.try.last_catch;
522 c->u.catch.prev_catch = l;
523 if (l)
524 l->u.catch.next_catch = c;
525 else
526 t->u.try.catch = c;
527 t->u.try.last_catch = c;
528
529 return c;
530}
531
532struct eh_region *
533gen_eh_region_allowed (struct eh_region *outer, tree allowed)
534{
535 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
536 region->u.allowed.type_list = allowed;
537
538 for (; allowed ; allowed = TREE_CHAIN (allowed))
539 add_type_for_runtime (TREE_VALUE (allowed));
540
541 return region;
542}
543
544struct eh_region *
545gen_eh_region_must_not_throw (struct eh_region *outer)
546{
547 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
548}
549
550int
551get_eh_region_number (struct eh_region *region)
552{
553 return region->region_number;
554}
555
556bool
557get_eh_region_may_contain_throw (struct eh_region *region)
558{
559 return region->may_contain_throw;
560}
561
562tree
563get_eh_region_tree_label (struct eh_region *region)
564{
565 return region->tree_label;
566}
567
568void
569set_eh_region_tree_label (struct eh_region *region, tree lab)
570{
571 region->tree_label = lab;
572}
573\f
6de9cd9a
DN
574void
575expand_resx_expr (tree exp)
576{
577 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
578 struct eh_region *reg = cfun->eh->region_array[region_nr];
579
580 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
581 emit_barrier ();
582}
583
b2dd096b
MM
584/* Note that the current EH region (if any) may contain a throw, or a
585 call to a function which itself may contain a throw. */
586
587void
6de9cd9a 588note_eh_region_may_contain_throw (struct eh_region *region)
b2dd096b 589{
b2dd096b
MM
590 while (region && !region->may_contain_throw)
591 {
592 region->may_contain_throw = 1;
593 region = region->outer;
594 }
595}
596
6de9cd9a
DN
597void
598note_current_region_may_contain_throw (void)
599{
600 note_eh_region_may_contain_throw (cfun->eh->cur_region);
601}
602
603
47c84870 604/* Return an rtl expression for a pointer to the exception object
52a11cbf 605 within a handler. */
4956d07c
MS
606
607rtx
502b8322 608get_exception_pointer (struct function *fun)
4956d07c 609{
86c99549
RH
610 rtx exc_ptr = fun->eh->exc_ptr;
611 if (fun == cfun && ! exc_ptr)
52a11cbf 612 {
26b10ae0 613 exc_ptr = gen_reg_rtx (ptr_mode);
86c99549 614 fun->eh->exc_ptr = exc_ptr;
52a11cbf
RH
615 }
616 return exc_ptr;
617}
4956d07c 618
47c84870
JM
619/* Return an rtl expression for the exception dispatch filter
620 within a handler. */
621
6de9cd9a 622rtx
502b8322 623get_exception_filter (struct function *fun)
47c84870 624{
86c99549
RH
625 rtx filter = fun->eh->filter;
626 if (fun == cfun && ! filter)
47c84870 627 {
93f90be6 628 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
86c99549 629 fun->eh->filter = filter;
47c84870
JM
630 }
631 return filter;
632}
52a11cbf
RH
633\f
634/* This section is for the exception handling specific optimization pass. */
154bba13 635
ac45df5d 636/* Random access the exception region tree. */
154bba13 637
6de9cd9a 638void
502b8322 639collect_eh_region_array (void)
154bba13 640{
52a11cbf 641 struct eh_region **array, *i;
154bba13 642
52a11cbf
RH
643 i = cfun->eh->region_tree;
644 if (! i)
645 return;
154bba13 646
e2500fed
GK
647 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
648 * sizeof (*array));
52a11cbf 649 cfun->eh->region_array = array;
154bba13 650
52a11cbf
RH
651 while (1)
652 {
653 array[i->region_number] = i;
654
655 /* If there are sub-regions, process them. */
656 if (i->inner)
657 i = i->inner;
658 /* If there are peers, process them. */
659 else if (i->next_peer)
660 i = i->next_peer;
661 /* Otherwise, step back up the tree to the next peer. */
662 else
663 {
664 do {
665 i = i->outer;
666 if (i == NULL)
667 return;
668 } while (i->next_peer == NULL);
669 i = i->next_peer;
670 }
671 }
27a36778
MS
672}
673
1bddbeb4
RH
674static void
675resolve_one_fixup_region (struct eh_region *fixup)
676{
677 struct eh_region *cleanup, *real;
678 int j, n;
679
680 n = cfun->eh->last_region_number;
681 cleanup = 0;
682
683 for (j = 1; j <= n; ++j)
684 {
685 cleanup = cfun->eh->region_array[j];
686 if (cleanup && cleanup->type == ERT_CLEANUP
687 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
688 break;
689 }
5b0264cb 690 gcc_assert (j <= n);
1bddbeb4
RH
691
692 real = cleanup->outer;
693 if (real && real->type == ERT_FIXUP)
694 {
695 if (!real->u.fixup.resolved)
696 resolve_one_fixup_region (real);
697 real = real->u.fixup.real_region;
698 }
699
700 fixup->u.fixup.real_region = real;
701 fixup->u.fixup.resolved = true;
702}
703
52a11cbf 704static void
502b8322 705resolve_fixup_regions (void)
27a36778 706{
1bddbeb4 707 int i, n = cfun->eh->last_region_number;
27a36778 708
52a11cbf
RH
709 for (i = 1; i <= n; ++i)
710 {
711 struct eh_region *fixup = cfun->eh->region_array[i];
27a36778 712
1bddbeb4 713 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
52a11cbf 714 continue;
27a36778 715
1bddbeb4 716 resolve_one_fixup_region (fixup);
52a11cbf 717 }
27a36778 718}
27a36778 719
52a11cbf
RH
720/* Now that we've discovered what region actually encloses a fixup,
721 we can shuffle pointers and remove them from the tree. */
27a36778
MS
722
723static void
502b8322 724remove_fixup_regions (void)
27a36778 725{
52a11cbf 726 int i;
45053eaf
RH
727 rtx insn, note;
728 struct eh_region *fixup;
27a36778 729
45053eaf
RH
730 /* Walk the insn chain and adjust the REG_EH_REGION numbers
731 for instructions referencing fixup regions. This is only
732 strictly necessary for fixup regions with no parent, but
733 doesn't hurt to do it for all regions. */
734 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
735 if (INSN_P (insn)
736 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
737 && INTVAL (XEXP (note, 0)) > 0
738 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
739 && fixup->type == ERT_FIXUP)
740 {
741 if (fixup->u.fixup.real_region)
2b1e2382 742 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
45053eaf
RH
743 else
744 remove_note (insn, note);
745 }
746
747 /* Remove the fixup regions from the tree. */
52a11cbf
RH
748 for (i = cfun->eh->last_region_number; i > 0; --i)
749 {
45053eaf 750 fixup = cfun->eh->region_array[i];
52a11cbf
RH
751 if (! fixup)
752 continue;
27a36778 753
52a11cbf
RH
754 /* Allow GC to maybe free some memory. */
755 if (fixup->type == ERT_CLEANUP)
0fb7aeda 756 fixup->u.cleanup.exp = NULL_TREE;
27a36778 757
52a11cbf
RH
758 if (fixup->type != ERT_FIXUP)
759 continue;
27a36778 760
52a11cbf
RH
761 if (fixup->inner)
762 {
763 struct eh_region *parent, *p, **pp;
27a36778 764
52a11cbf 765 parent = fixup->u.fixup.real_region;
27a36778 766
52a11cbf
RH
767 /* Fix up the children's parent pointers; find the end of
768 the list. */
769 for (p = fixup->inner; ; p = p->next_peer)
770 {
771 p->outer = parent;
772 if (! p->next_peer)
773 break;
774 }
27a36778 775
52a11cbf
RH
776 /* In the tree of cleanups, only outer-inner ordering matters.
777 So link the children back in anywhere at the correct level. */
778 if (parent)
779 pp = &parent->inner;
780 else
781 pp = &cfun->eh->region_tree;
782 p->next_peer = *pp;
783 *pp = fixup->inner;
784 fixup->inner = NULL;
785 }
27a36778 786
52a11cbf
RH
787 remove_eh_handler (fixup);
788 }
27a36778
MS
789}
790
655dd289
JJ
791/* Remove all regions whose labels are not reachable from insns. */
792
793static void
502b8322 794remove_unreachable_regions (rtx insns)
655dd289
JJ
795{
796 int i, *uid_region_num;
797 bool *reachable;
798 struct eh_region *r;
799 rtx insn;
800
801 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
802 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
803
804 for (i = cfun->eh->last_region_number; i > 0; --i)
805 {
806 r = cfun->eh->region_array[i];
807 if (!r || r->region_number != i)
808 continue;
809
810 if (r->resume)
0fb7aeda 811 {
5b0264cb 812 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
655dd289 813 uid_region_num[INSN_UID (r->resume)] = i;
0fb7aeda 814 }
655dd289 815 if (r->label)
0fb7aeda 816 {
5b0264cb 817 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
655dd289 818 uid_region_num[INSN_UID (r->label)] = i;
0fb7aeda 819 }
655dd289
JJ
820 }
821
822 for (insn = insns; insn; insn = NEXT_INSN (insn))
6ce2bcb7 823 reachable[uid_region_num[INSN_UID (insn)]] = true;
655dd289
JJ
824
825 for (i = cfun->eh->last_region_number; i > 0; --i)
826 {
827 r = cfun->eh->region_array[i];
828 if (r && r->region_number == i && !reachable[i])
829 {
6de9cd9a
DN
830 bool kill_it = true;
831 switch (r->type)
832 {
833 case ERT_THROW:
834 /* Don't remove ERT_THROW regions if their outer region
835 is reachable. */
836 if (r->outer && reachable[r->outer->region_number])
837 kill_it = false;
838 break;
839
840 case ERT_MUST_NOT_THROW:
841 /* MUST_NOT_THROW regions are implementable solely in the
1ea7e6ad 842 runtime, but their existence continues to affect calls
6de9cd9a
DN
843 within that region. Never delete them here. */
844 kill_it = false;
845 break;
846
847 case ERT_TRY:
848 {
849 /* TRY regions are reachable if any of its CATCH regions
850 are reachable. */
851 struct eh_region *c;
852 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
853 if (reachable[c->region_number])
854 {
855 kill_it = false;
856 break;
857 }
858 break;
859 }
655dd289 860
6de9cd9a
DN
861 default:
862 break;
863 }
864
865 if (kill_it)
866 remove_eh_handler (r);
655dd289
JJ
867 }
868 }
869
870 free (reachable);
871 free (uid_region_num);
872}
873
52a11cbf
RH
874/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
875 can_throw instruction in the region. */
27a36778
MS
876
877static void
502b8322 878convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
27a36778 879{
52a11cbf
RH
880 int *sp = orig_sp;
881 rtx insn, next;
27a36778 882
52a11cbf
RH
883 for (insn = *pinsns; insn ; insn = next)
884 {
885 next = NEXT_INSN (insn);
4b4bf941 886 if (NOTE_P (insn))
52a11cbf
RH
887 {
888 int kind = NOTE_LINE_NUMBER (insn);
889 if (kind == NOTE_INSN_EH_REGION_BEG
890 || kind == NOTE_INSN_EH_REGION_END)
891 {
892 if (kind == NOTE_INSN_EH_REGION_BEG)
893 {
894 struct eh_region *r;
27a36778 895
52a11cbf
RH
896 *sp++ = cur;
897 cur = NOTE_EH_HANDLER (insn);
27a36778 898
52a11cbf
RH
899 r = cfun->eh->region_array[cur];
900 if (r->type == ERT_FIXUP)
901 {
902 r = r->u.fixup.real_region;
903 cur = r ? r->region_number : 0;
904 }
905 else if (r->type == ERT_CATCH)
906 {
907 r = r->outer;
908 cur = r ? r->region_number : 0;
909 }
910 }
911 else
912 cur = *--sp;
913
52a11cbf
RH
914 if (insn == *pinsns)
915 *pinsns = next;
916 remove_insn (insn);
917 continue;
918 }
919 }
920 else if (INSN_P (insn))
921 {
a944ceb9
RH
922 if (cur > 0
923 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
924 /* Calls can always potentially throw exceptions, unless
925 they have a REG_EH_REGION note with a value of 0 or less.
926 Which should be the only possible kind so far. */
4b4bf941 927 && (CALL_P (insn)
a944ceb9
RH
928 /* If we wanted exceptions for non-call insns, then
929 any may_trap_p instruction could throw. */
930 || (flag_non_call_exceptions
931 && GET_CODE (PATTERN (insn)) != CLOBBER
932 && GET_CODE (PATTERN (insn)) != USE
933 && may_trap_p (PATTERN (insn)))))
52a11cbf 934 {
a944ceb9 935 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
52a11cbf
RH
936 REG_NOTES (insn));
937 }
52a11cbf
RH
938 }
939 }
27a36778 940
5b0264cb 941 gcc_assert (sp == orig_sp);
52a11cbf 942}
27a36778 943
6de9cd9a
DN
944static void
945collect_rtl_labels_from_trees (void)
946{
947 int i, n = cfun->eh->last_region_number;
948 for (i = 1; i <= n; ++i)
949 {
950 struct eh_region *reg = cfun->eh->region_array[i];
951 if (reg && reg->tree_label)
952 reg->label = DECL_RTL_IF_SET (reg->tree_label);
953 }
954}
955
52a11cbf 956void
502b8322 957convert_from_eh_region_ranges (void)
52a11cbf 958{
6de9cd9a
DN
959 rtx insns = get_insns ();
960
961 if (cfun->eh->region_array)
962 {
963 /* If the region array already exists, assume we're coming from
964 optimize_function_tree. In this case all we need to do is
965 collect the rtl labels that correspond to the tree labels
966 that we allocated earlier. */
967 collect_rtl_labels_from_trees ();
968 }
969 else
970 {
971 int *stack;
27a36778 972
6de9cd9a
DN
973 collect_eh_region_array ();
974 resolve_fixup_regions ();
27a36778 975
6de9cd9a
DN
976 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
977 convert_from_eh_region_ranges_1 (&insns, stack, 0);
978 free (stack);
979
980 remove_fixup_regions ();
981 }
27a36778 982
655dd289 983 remove_unreachable_regions (insns);
27a36778
MS
984}
985
6a58eee9 986static void
502b8322 987add_ehl_entry (rtx label, struct eh_region *region)
6a58eee9
RH
988{
989 struct ehl_map_entry **slot, *entry;
990
991 LABEL_PRESERVE_P (label) = 1;
992
703ad42b 993 entry = ggc_alloc (sizeof (*entry));
6a58eee9
RH
994 entry->label = label;
995 entry->region = region;
996
997 slot = (struct ehl_map_entry **)
e2500fed 998 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
6f3d0447
RH
999
1000 /* Before landing pad creation, each exception handler has its own
1001 label. After landing pad creation, the exception handlers may
1002 share landing pads. This is ok, since maybe_remove_eh_handler
1003 only requires the 1-1 mapping before landing pad creation. */
5b0264cb 1004 gcc_assert (!*slot || cfun->eh->built_landing_pads);
6f3d0447 1005
6a58eee9
RH
1006 *slot = entry;
1007}
1008
52a11cbf 1009void
502b8322 1010find_exception_handler_labels (void)
27a36778 1011{
52a11cbf 1012 int i;
27a36778 1013
e2500fed
GK
1014 if (cfun->eh->exception_handler_label_map)
1015 htab_empty (cfun->eh->exception_handler_label_map);
6a58eee9
RH
1016 else
1017 {
1018 /* ??? The expansion factor here (3/2) must be greater than the htab
1019 occupancy factor (4/3) to avoid unnecessary resizing. */
e2500fed
GK
1020 cfun->eh->exception_handler_label_map
1021 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1022 ehl_hash, ehl_eq, NULL);
6a58eee9 1023 }
27a36778 1024
52a11cbf
RH
1025 if (cfun->eh->region_tree == NULL)
1026 return;
27a36778 1027
52a11cbf
RH
1028 for (i = cfun->eh->last_region_number; i > 0; --i)
1029 {
1030 struct eh_region *region = cfun->eh->region_array[i];
1031 rtx lab;
27a36778 1032
655dd289 1033 if (! region || region->region_number != i)
52a11cbf
RH
1034 continue;
1035 if (cfun->eh->built_landing_pads)
1036 lab = region->landing_pad;
1037 else
1038 lab = region->label;
27a36778 1039
52a11cbf 1040 if (lab)
6a58eee9 1041 add_ehl_entry (lab, region);
27a36778
MS
1042 }
1043
52a11cbf
RH
1044 /* For sjlj exceptions, need the return label to remain live until
1045 after landing pad generation. */
1046 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
6a58eee9 1047 add_ehl_entry (return_label, NULL);
27a36778
MS
1048}
1049
93f82d60 1050bool
502b8322 1051current_function_has_exception_handlers (void)
93f82d60
RH
1052{
1053 int i;
1054
1055 for (i = cfun->eh->last_region_number; i > 0; --i)
1056 {
1057 struct eh_region *region = cfun->eh->region_array[i];
1058
1059 if (! region || region->region_number != i)
1060 continue;
1061 if (region->type != ERT_THROW)
1062 return true;
1063 }
1064
1065 return false;
1066}
52a11cbf
RH
1067\f
1068static struct eh_region *
502b8322 1069duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
4956d07c 1070{
703ad42b 1071 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
4956d07c 1072
52a11cbf
RH
1073 n->region_number = o->region_number + cfun->eh->last_region_number;
1074 n->type = o->type;
4956d07c 1075
52a11cbf
RH
1076 switch (n->type)
1077 {
1078 case ERT_CLEANUP:
1079 case ERT_MUST_NOT_THROW:
1080 break;
27a36778 1081
52a11cbf
RH
1082 case ERT_TRY:
1083 if (o->u.try.continue_label)
1084 n->u.try.continue_label
1085 = get_label_from_map (map,
1086 CODE_LABEL_NUMBER (o->u.try.continue_label));
1087 break;
27a36778 1088
52a11cbf 1089 case ERT_CATCH:
6d41a92f 1090 n->u.catch.type_list = o->u.catch.type_list;
52a11cbf 1091 break;
27a36778 1092
52a11cbf
RH
1093 case ERT_ALLOWED_EXCEPTIONS:
1094 n->u.allowed.type_list = o->u.allowed.type_list;
1095 break;
1096
1097 case ERT_THROW:
1098 n->u.throw.type = o->u.throw.type;
3f2c5d1a 1099
52a11cbf 1100 default:
5b0264cb 1101 gcc_unreachable ();
52a11cbf
RH
1102 }
1103
1104 if (o->label)
1105 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
47c84870 1106 if (o->resume)
e7b9b18e 1107 {
47c84870 1108 n->resume = map->insn_map[INSN_UID (o->resume)];
5b0264cb 1109 gcc_assert (n->resume);
27a36778 1110 }
4956d07c 1111
52a11cbf 1112 return n;
4956d07c
MS
1113}
1114
52a11cbf 1115static void
502b8322 1116duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
4c581243 1117{
52a11cbf 1118 struct eh_region *n = n_array[o->region_number];
4c581243 1119
52a11cbf
RH
1120 switch (n->type)
1121 {
1122 case ERT_TRY:
1123 n->u.try.catch = n_array[o->u.try.catch->region_number];
1124 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1125 break;
12670d88 1126
52a11cbf
RH
1127 case ERT_CATCH:
1128 if (o->u.catch.next_catch)
0fb7aeda 1129 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
52a11cbf 1130 if (o->u.catch.prev_catch)
0fb7aeda 1131 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
52a11cbf 1132 break;
12670d88 1133
52a11cbf
RH
1134 default:
1135 break;
1136 }
4956d07c 1137
52a11cbf
RH
1138 if (o->outer)
1139 n->outer = n_array[o->outer->region_number];
1140 if (o->inner)
1141 n->inner = n_array[o->inner->region_number];
1142 if (o->next_peer)
1143 n->next_peer = n_array[o->next_peer->region_number];
3f2c5d1a 1144}
52a11cbf
RH
1145
1146int
502b8322 1147duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
4956d07c 1148{
52a11cbf
RH
1149 int ifun_last_region_number = ifun->eh->last_region_number;
1150 struct eh_region **n_array, *root, *cur;
1151 int i;
4956d07c 1152
52a11cbf
RH
1153 if (ifun_last_region_number == 0)
1154 return 0;
4956d07c 1155
52a11cbf 1156 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
4956d07c 1157
52a11cbf 1158 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1159 {
52a11cbf
RH
1160 cur = ifun->eh->region_array[i];
1161 if (!cur || cur->region_number != i)
1162 continue;
1163 n_array[i] = duplicate_eh_region_1 (cur, map);
27a36778 1164 }
52a11cbf 1165 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1166 {
52a11cbf
RH
1167 cur = ifun->eh->region_array[i];
1168 if (!cur || cur->region_number != i)
1169 continue;
1170 duplicate_eh_region_2 (cur, n_array);
1171 }
27a36778 1172
52a11cbf
RH
1173 root = n_array[ifun->eh->region_tree->region_number];
1174 cur = cfun->eh->cur_region;
1175 if (cur)
1176 {
1177 struct eh_region *p = cur->inner;
1178 if (p)
1179 {
1180 while (p->next_peer)
1181 p = p->next_peer;
1182 p->next_peer = root;
1183 }
1184 else
1185 cur->inner = root;
27a36778 1186
52a11cbf 1187 for (i = 1; i <= ifun_last_region_number; ++i)
b24a9e88 1188 if (n_array[i] && n_array[i]->outer == NULL)
52a11cbf
RH
1189 n_array[i]->outer = cur;
1190 }
1191 else
1192 {
1193 struct eh_region *p = cfun->eh->region_tree;
1194 if (p)
1195 {
1196 while (p->next_peer)
1197 p = p->next_peer;
1198 p->next_peer = root;
1199 }
1200 else
1201 cfun->eh->region_tree = root;
27a36778 1202 }
1e4ceb6f 1203
52a11cbf 1204 free (n_array);
1e4ceb6f 1205
52a11cbf
RH
1206 i = cfun->eh->last_region_number;
1207 cfun->eh->last_region_number = i + ifun_last_region_number;
1208 return i;
4956d07c
MS
1209}
1210
52a11cbf 1211\f
52a11cbf 1212static int
502b8322 1213t2r_eq (const void *pentry, const void *pdata)
9762d48d 1214{
52a11cbf
RH
1215 tree entry = (tree) pentry;
1216 tree data = (tree) pdata;
9762d48d 1217
52a11cbf 1218 return TREE_PURPOSE (entry) == data;
9762d48d
JM
1219}
1220
52a11cbf 1221static hashval_t
502b8322 1222t2r_hash (const void *pentry)
52a11cbf
RH
1223{
1224 tree entry = (tree) pentry;
fd917e0d 1225 return TREE_HASH (TREE_PURPOSE (entry));
52a11cbf 1226}
9762d48d 1227
52a11cbf 1228static void
502b8322 1229add_type_for_runtime (tree type)
52a11cbf
RH
1230{
1231 tree *slot;
9762d48d 1232
52a11cbf 1233 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
fd917e0d 1234 TREE_HASH (type), INSERT);
52a11cbf
RH
1235 if (*slot == NULL)
1236 {
1237 tree runtime = (*lang_eh_runtime_type) (type);
1238 *slot = tree_cons (type, runtime, NULL_TREE);
1239 }
1240}
3f2c5d1a 1241
52a11cbf 1242static tree
502b8322 1243lookup_type_for_runtime (tree type)
52a11cbf
RH
1244{
1245 tree *slot;
b37f006b 1246
52a11cbf 1247 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
fd917e0d 1248 TREE_HASH (type), NO_INSERT);
b37f006b 1249
a1f300c0 1250 /* We should have always inserted the data earlier. */
52a11cbf
RH
1251 return TREE_VALUE (*slot);
1252}
9762d48d 1253
52a11cbf
RH
1254\f
1255/* Represent an entry in @TTypes for either catch actions
1256 or exception filter actions. */
e2500fed 1257struct ttypes_filter GTY(())
52a11cbf
RH
1258{
1259 tree t;
1260 int filter;
1261};
b37f006b 1262
52a11cbf
RH
1263/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1264 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 1265
52a11cbf 1266static int
502b8322 1267ttypes_filter_eq (const void *pentry, const void *pdata)
52a11cbf
RH
1268{
1269 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1270 tree data = (tree) pdata;
b37f006b 1271
52a11cbf 1272 return entry->t == data;
9762d48d
JM
1273}
1274
52a11cbf 1275static hashval_t
502b8322 1276ttypes_filter_hash (const void *pentry)
52a11cbf
RH
1277{
1278 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
fd917e0d 1279 return TREE_HASH (entry->t);
52a11cbf 1280}
4956d07c 1281
52a11cbf
RH
1282/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1283 exception specification list we are thinking about adding. */
1284/* ??? Currently we use the type lists in the order given. Someone
1285 should put these in some canonical order. */
1286
1287static int
502b8322 1288ehspec_filter_eq (const void *pentry, const void *pdata)
4956d07c 1289{
52a11cbf
RH
1290 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1291 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1292
1293 return type_list_equal (entry->t, data->t);
4956d07c
MS
1294}
1295
52a11cbf 1296/* Hash function for exception specification lists. */
4956d07c 1297
52a11cbf 1298static hashval_t
502b8322 1299ehspec_filter_hash (const void *pentry)
4956d07c 1300{
52a11cbf
RH
1301 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1302 hashval_t h = 0;
1303 tree list;
1304
1305 for (list = entry->t; list ; list = TREE_CHAIN (list))
fd917e0d 1306 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
52a11cbf 1307 return h;
4956d07c
MS
1308}
1309
fd917e0d
JM
1310/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1311 to speed up the search. Return the filter value to be used. */
4956d07c 1312
52a11cbf 1313static int
502b8322 1314add_ttypes_entry (htab_t ttypes_hash, tree type)
4956d07c 1315{
52a11cbf 1316 struct ttypes_filter **slot, *n;
4956d07c 1317
52a11cbf 1318 slot = (struct ttypes_filter **)
fd917e0d 1319 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
52a11cbf
RH
1320
1321 if ((n = *slot) == NULL)
4956d07c 1322 {
52a11cbf 1323 /* Filter value is a 1 based table index. */
12670d88 1324
703ad42b 1325 n = xmalloc (sizeof (*n));
52a11cbf
RH
1326 n->t = type;
1327 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1328 *slot = n;
1329
1330 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 1331 }
52a11cbf
RH
1332
1333 return n->filter;
4956d07c
MS
1334}
1335
52a11cbf
RH
1336/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1337 to speed up the search. Return the filter value to be used. */
1338
1339static int
502b8322 1340add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
12670d88 1341{
52a11cbf
RH
1342 struct ttypes_filter **slot, *n;
1343 struct ttypes_filter dummy;
12670d88 1344
52a11cbf
RH
1345 dummy.t = list;
1346 slot = (struct ttypes_filter **)
1347 htab_find_slot (ehspec_hash, &dummy, INSERT);
1348
1349 if ((n = *slot) == NULL)
1350 {
1351 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1352
703ad42b 1353 n = xmalloc (sizeof (*n));
52a11cbf
RH
1354 n->t = list;
1355 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1356 *slot = n;
1357
1358 /* Look up each type in the list and encode its filter
1359 value as a uleb128. Terminate the list with 0. */
1360 for (; list ; list = TREE_CHAIN (list))
3f2c5d1a 1361 push_uleb128 (&cfun->eh->ehspec_data,
52a11cbf
RH
1362 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1363 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1364 }
1365
1366 return n->filter;
12670d88
RK
1367}
1368
52a11cbf
RH
1369/* Generate the action filter values to be used for CATCH and
1370 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1371 we use lots of landing pads, and so every type or list can share
1372 the same filter value, which saves table space. */
1373
1374static void
502b8322 1375assign_filter_values (void)
9a0d1e1b 1376{
52a11cbf
RH
1377 int i;
1378 htab_t ttypes, ehspec;
9a9deafc 1379
52a11cbf
RH
1380 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1381 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1382
52a11cbf
RH
1383 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1384 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1385
52a11cbf
RH
1386 for (i = cfun->eh->last_region_number; i > 0; --i)
1387 {
1388 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1389
52a11cbf
RH
1390 /* Mind we don't process a region more than once. */
1391 if (!r || r->region_number != i)
1392 continue;
9a0d1e1b 1393
52a11cbf
RH
1394 switch (r->type)
1395 {
1396 case ERT_CATCH:
6d41a92f
OH
1397 /* Whatever type_list is (NULL or true list), we build a list
1398 of filters for the region. */
1399 r->u.catch.filter_list = NULL_TREE;
1400
1401 if (r->u.catch.type_list != NULL)
1402 {
1403 /* Get a filter value for each of the types caught and store
1404 them in the region's dedicated list. */
1405 tree tp_node = r->u.catch.type_list;
1406
1407 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1408 {
1409 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
7d60be94 1410 tree flt_node = build_int_cst (NULL_TREE, flt);
3f2c5d1a
RS
1411
1412 r->u.catch.filter_list
6d41a92f
OH
1413 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1414 }
1415 }
1416 else
1417 {
1418 /* Get a filter value for the NULL list also since it will need
1419 an action record anyway. */
1420 int flt = add_ttypes_entry (ttypes, NULL);
7d60be94 1421 tree flt_node = build_int_cst (NULL_TREE, flt);
3f2c5d1a
RS
1422
1423 r->u.catch.filter_list
6d41a92f
OH
1424 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1425 }
3f2c5d1a 1426
52a11cbf 1427 break;
bf71cd2e 1428
52a11cbf
RH
1429 case ERT_ALLOWED_EXCEPTIONS:
1430 r->u.allowed.filter
1431 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1432 break;
bf71cd2e 1433
52a11cbf
RH
1434 default:
1435 break;
1436 }
1437 }
1438
1439 htab_delete (ttypes);
1440 htab_delete (ehspec);
1441}
1442
12c3874e
JH
1443/* Emit SEQ into basic block just before INSN (that is assumed to be
1444 first instruction of some existing BB and return the newly
1445 produced block. */
1446static basic_block
1447emit_to_new_bb_before (rtx seq, rtx insn)
1448{
1449 rtx last;
1450 basic_block bb;
a61bf177 1451 edge e;
628f6a4e 1452 edge_iterator ei;
a61bf177
JH
1453
1454 /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
1455 call), we don't want it to go into newly created landing pad or other EH
1456 construct. */
628f6a4e 1457 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
a61bf177
JH
1458 if (e->flags & EDGE_FALLTHRU)
1459 force_nonfallthru (e);
628f6a4e
BE
1460 else
1461 ei_next (&ei);
12c3874e 1462 last = emit_insn_before (seq, insn);
4b4bf941 1463 if (BARRIER_P (last))
12c3874e
JH
1464 last = PREV_INSN (last);
1465 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1466 update_bb_for_insn (bb);
1467 bb->flags |= BB_SUPERBLOCK;
1468 return bb;
1469}
1470
ac850948
JM
1471/* Generate the code to actually handle exceptions, which will follow the
1472 landing pads. */
1473
52a11cbf 1474static void
502b8322 1475build_post_landing_pads (void)
52a11cbf
RH
1476{
1477 int i;
bf71cd2e 1478
52a11cbf 1479 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1480 {
52a11cbf
RH
1481 struct eh_region *region = cfun->eh->region_array[i];
1482 rtx seq;
bf71cd2e 1483
52a11cbf
RH
1484 /* Mind we don't process a region more than once. */
1485 if (!region || region->region_number != i)
1486 continue;
1487
1488 switch (region->type)
987009bf 1489 {
52a11cbf
RH
1490 case ERT_TRY:
1491 /* ??? Collect the set of all non-overlapping catch handlers
1492 all the way up the chain until blocked by a cleanup. */
1493 /* ??? Outer try regions can share landing pads with inner
1494 try regions if the types are completely non-overlapping,
a1f300c0 1495 and there are no intervening cleanups. */
bf71cd2e 1496
52a11cbf 1497 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1498
52a11cbf 1499 start_sequence ();
bf71cd2e 1500
52a11cbf 1501 emit_label (region->post_landing_pad);
bf71cd2e 1502
52a11cbf
RH
1503 /* ??? It is mighty inconvenient to call back into the
1504 switch statement generation code in expand_end_case.
1505 Rapid prototyping sez a sequence of ifs. */
1506 {
1507 struct eh_region *c;
1508 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1509 {
6d41a92f 1510 if (c->u.catch.type_list == NULL)
a944ceb9 1511 emit_jump (c->label);
52a11cbf 1512 else
6d41a92f
OH
1513 {
1514 /* Need for one cmp/jump per type caught. Each type
1515 list entry has a matching entry in the filter list
1516 (see assign_filter_values). */
1517 tree tp_node = c->u.catch.type_list;
1518 tree flt_node = c->u.catch.filter_list;
1519
1520 for (; tp_node; )
1521 {
1522 emit_cmp_and_jump_insns
1523 (cfun->eh->filter,
1524 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
93f90be6
FJ
1525 EQ, NULL_RTX,
1526 targetm.eh_return_filter_mode (), 0, c->label);
6d41a92f
OH
1527
1528 tp_node = TREE_CHAIN (tp_node);
1529 flt_node = TREE_CHAIN (flt_node);
1530 }
1531 }
52a11cbf
RH
1532 }
1533 }
bf71cd2e 1534
47c84870
JM
1535 /* We delay the generation of the _Unwind_Resume until we generate
1536 landing pads. We emit a marker here so as to get good control
1537 flow data in the meantime. */
1538 region->resume
1539 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1540 emit_barrier ();
1541
52a11cbf
RH
1542 seq = get_insns ();
1543 end_sequence ();
e6cfb550 1544
12c3874e
JH
1545 emit_to_new_bb_before (seq, region->u.try.catch->label);
1546
52a11cbf 1547 break;
bf71cd2e 1548
52a11cbf
RH
1549 case ERT_ALLOWED_EXCEPTIONS:
1550 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1551
52a11cbf 1552 start_sequence ();
f54a7f6f 1553
52a11cbf 1554 emit_label (region->post_landing_pad);
f54a7f6f 1555
52a11cbf
RH
1556 emit_cmp_and_jump_insns (cfun->eh->filter,
1557 GEN_INT (region->u.allowed.filter),
93f90be6
FJ
1558 EQ, NULL_RTX,
1559 targetm.eh_return_filter_mode (), 0, region->label);
f54a7f6f 1560
47c84870
JM
1561 /* We delay the generation of the _Unwind_Resume until we generate
1562 landing pads. We emit a marker here so as to get good control
1563 flow data in the meantime. */
1564 region->resume
1565 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1566 emit_barrier ();
1567
52a11cbf
RH
1568 seq = get_insns ();
1569 end_sequence ();
1570
12c3874e 1571 emit_to_new_bb_before (seq, region->label);
52a11cbf 1572 break;
f54a7f6f 1573
52a11cbf 1574 case ERT_CLEANUP:
125ca8fd 1575 case ERT_MUST_NOT_THROW:
a944ceb9 1576 region->post_landing_pad = region->label;
125ca8fd
RH
1577 break;
1578
52a11cbf
RH
1579 case ERT_CATCH:
1580 case ERT_THROW:
1581 /* Nothing to do. */
1582 break;
1583
1584 default:
5b0264cb 1585 gcc_unreachable ();
52a11cbf
RH
1586 }
1587 }
1588}
1e4ceb6f 1589
47c84870
JM
1590/* Replace RESX patterns with jumps to the next handler if any, or calls to
1591 _Unwind_Resume otherwise. */
1592
1e4ceb6f 1593static void
502b8322 1594connect_post_landing_pads (void)
1e4ceb6f 1595{
52a11cbf 1596 int i;
76fc91c7 1597
52a11cbf
RH
1598 for (i = cfun->eh->last_region_number; i > 0; --i)
1599 {
1600 struct eh_region *region = cfun->eh->region_array[i];
1601 struct eh_region *outer;
47c84870 1602 rtx seq;
12c3874e 1603 rtx barrier;
1e4ceb6f 1604
52a11cbf
RH
1605 /* Mind we don't process a region more than once. */
1606 if (!region || region->region_number != i)
1607 continue;
1e4ceb6f 1608
47c84870
JM
1609 /* If there is no RESX, or it has been deleted by flow, there's
1610 nothing to fix up. */
1611 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 1612 continue;
76fc91c7 1613
52a11cbf
RH
1614 /* Search for another landing pad in this function. */
1615 for (outer = region->outer; outer ; outer = outer->outer)
1616 if (outer->post_landing_pad)
1617 break;
1e4ceb6f 1618
52a11cbf 1619 start_sequence ();
12670d88 1620
52a11cbf 1621 if (outer)
12c3874e
JH
1622 {
1623 edge e;
1624 basic_block src, dest;
1625
1626 emit_jump (outer->post_landing_pad);
1627 src = BLOCK_FOR_INSN (region->resume);
1628 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
628f6a4e
BE
1629 while (EDGE_COUNT (src->succs) > 0)
1630 remove_edge (EDGE_SUCC (src, 0));
12c3874e
JH
1631 e = make_edge (src, dest, 0);
1632 e->probability = REG_BR_PROB_BASE;
1633 e->count = src->count;
1634 }
52a11cbf 1635 else
29c246a7
HPN
1636 {
1637 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1638 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1639
1640 /* What we just emitted was a throwing libcall, so it got a
1641 barrier automatically added after it. If the last insn in
1642 the libcall sequence isn't the barrier, it's because the
1643 target emits multiple insns for a call, and there are insns
1644 after the actual call insn (which are redundant and would be
1645 optimized away). The barrier is inserted exactly after the
1646 call insn, so let's go get that and delete the insns after
1647 it, because below we need the barrier to be the last insn in
1648 the sequence. */
1649 delete_insns_since (NEXT_INSN (last_call_insn ()));
1650 }
4956d07c 1651
52a11cbf
RH
1652 seq = get_insns ();
1653 end_sequence ();
12c3874e
JH
1654 barrier = emit_insn_before (seq, region->resume);
1655 /* Avoid duplicate barrier. */
5b0264cb 1656 gcc_assert (BARRIER_P (barrier));
12c3874e 1657 delete_insn (barrier);
53c17031 1658 delete_insn (region->resume);
6de9cd9a
DN
1659
1660 /* ??? From tree-ssa we can wind up with catch regions whose
1661 label is not instantiated, but whose resx is present. Now
1662 that we've dealt with the resx, kill the region. */
1663 if (region->label == NULL && region->type == ERT_CLEANUP)
1664 remove_eh_handler (region);
52a11cbf
RH
1665 }
1666}
1667
1668\f
1669static void
502b8322 1670dw2_build_landing_pads (void)
4956d07c 1671{
ae0ed63a
JM
1672 int i;
1673 unsigned int j;
4956d07c 1674
52a11cbf
RH
1675 for (i = cfun->eh->last_region_number; i > 0; --i)
1676 {
1677 struct eh_region *region = cfun->eh->region_array[i];
1678 rtx seq;
12c3874e 1679 basic_block bb;
5c701bb1 1680 bool clobbers_hard_regs = false;
12c3874e 1681 edge e;
4956d07c 1682
52a11cbf
RH
1683 /* Mind we don't process a region more than once. */
1684 if (!region || region->region_number != i)
1685 continue;
1418bb67 1686
52a11cbf
RH
1687 if (region->type != ERT_CLEANUP
1688 && region->type != ERT_TRY
1689 && region->type != ERT_ALLOWED_EXCEPTIONS)
1690 continue;
12670d88 1691
52a11cbf 1692 start_sequence ();
4956d07c 1693
52a11cbf
RH
1694 region->landing_pad = gen_label_rtx ();
1695 emit_label (region->landing_pad);
4956d07c 1696
52a11cbf
RH
1697#ifdef HAVE_exception_receiver
1698 if (HAVE_exception_receiver)
1699 emit_insn (gen_exception_receiver ());
1700 else
1701#endif
1702#ifdef HAVE_nonlocal_goto_receiver
1703 if (HAVE_nonlocal_goto_receiver)
1704 emit_insn (gen_nonlocal_goto_receiver ());
1705 else
1706#endif
1707 { /* Nothing */ }
4956d07c 1708
52a11cbf
RH
1709 /* If the eh_return data registers are call-saved, then we
1710 won't have considered them clobbered from the call that
1711 threw. Kill them now. */
1712 for (j = 0; ; ++j)
1713 {
1714 unsigned r = EH_RETURN_DATA_REGNO (j);
1715 if (r == INVALID_REGNUM)
1716 break;
1717 if (! call_used_regs[r])
5c701bb1
JS
1718 {
1719 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1720 clobbers_hard_regs = true;
1721 }
1722 }
1723
1724 if (clobbers_hard_regs)
1725 {
1726 /* @@@ This is a kludge. Not all machine descriptions define a
1727 blockage insn, but we must not allow the code we just generated
1728 to be reordered by scheduling. So emit an ASM_INPUT to act as
2ba84f36 1729 blockage insn. */
5c701bb1 1730 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
52a11cbf 1731 }
e701eb4d 1732
52a11cbf 1733 emit_move_insn (cfun->eh->exc_ptr,
26b10ae0 1734 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
52a11cbf 1735 emit_move_insn (cfun->eh->filter,
93f90be6
FJ
1736 gen_rtx_REG (targetm.eh_return_filter_mode (),
1737 EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 1738
52a11cbf
RH
1739 seq = get_insns ();
1740 end_sequence ();
5816cb14 1741
12c3874e
JH
1742 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1743 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1744 e->count = bb->count;
1745 e->probability = REG_BR_PROB_BASE;
52a11cbf 1746 }
4956d07c
MS
1747}
1748
52a11cbf
RH
1749\f
1750struct sjlj_lp_info
1751{
1752 int directly_reachable;
1753 int action_index;
1754 int dispatch_index;
1755 int call_site_index;
1756};
4956d07c 1757
52a11cbf 1758static bool
502b8322 1759sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
4956d07c 1760{
52a11cbf
RH
1761 rtx insn;
1762 bool found_one = false;
4956d07c 1763
52a11cbf
RH
1764 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1765 {
1766 struct eh_region *region;
98ce21b3 1767 enum reachable_code rc;
52a11cbf
RH
1768 tree type_thrown;
1769 rtx note;
4956d07c 1770
52a11cbf
RH
1771 if (! INSN_P (insn))
1772 continue;
0d3453df 1773
52a11cbf
RH
1774 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1775 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1776 continue;
5dfa7520 1777
52a11cbf 1778 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 1779
52a11cbf
RH
1780 type_thrown = NULL_TREE;
1781 if (region->type == ERT_THROW)
1782 {
1783 type_thrown = region->u.throw.type;
1784 region = region->outer;
1785 }
12670d88 1786
52a11cbf
RH
1787 /* Find the first containing region that might handle the exception.
1788 That's the landing pad to which we will transfer control. */
98ce21b3 1789 rc = RNL_NOT_CAUGHT;
52a11cbf 1790 for (; region; region = region->outer)
98ce21b3 1791 {
6de9cd9a 1792 rc = reachable_next_level (region, type_thrown, NULL);
98ce21b3
RH
1793 if (rc != RNL_NOT_CAUGHT)
1794 break;
1795 }
1796 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
52a11cbf
RH
1797 {
1798 lp_info[region->region_number].directly_reachable = 1;
1799 found_one = true;
1800 }
1801 }
4956d07c 1802
52a11cbf
RH
1803 return found_one;
1804}
e701eb4d
JM
1805
1806static void
502b8322 1807sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
e701eb4d 1808{
52a11cbf
RH
1809 htab_t ar_hash;
1810 int i, index;
1811
1812 /* First task: build the action table. */
1813
1814 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1815 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1816
1817 for (i = cfun->eh->last_region_number; i > 0; --i)
1818 if (lp_info[i].directly_reachable)
e6cfb550 1819 {
52a11cbf
RH
1820 struct eh_region *r = cfun->eh->region_array[i];
1821 r->landing_pad = dispatch_label;
1822 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1823 if (lp_info[i].action_index != -1)
1824 cfun->uses_eh_lsda = 1;
e6cfb550 1825 }
e701eb4d 1826
52a11cbf 1827 htab_delete (ar_hash);
76fc91c7 1828
52a11cbf
RH
1829 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1830 landing pad label for the region. For sjlj though, there is one
1831 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 1832
52a11cbf
RH
1833 A region receives a dispatch index if it is directly reachable
1834 and requires in-function processing. Regions that share post-landing
eaec9b3d 1835 pads may share dispatch indices. */
52a11cbf
RH
1836 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1837 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 1838
52a11cbf
RH
1839 index = 0;
1840 for (i = cfun->eh->last_region_number; i > 0; --i)
98ce21b3 1841 if (lp_info[i].directly_reachable)
52a11cbf 1842 lp_info[i].dispatch_index = index++;
76fc91c7 1843
52a11cbf
RH
1844 /* Finally: assign call-site values. If dwarf2 terms, this would be
1845 the region number assigned by convert_to_eh_region_ranges, but
1846 handles no-action and must-not-throw differently. */
76fc91c7 1847
52a11cbf
RH
1848 call_site_base = 1;
1849 for (i = cfun->eh->last_region_number; i > 0; --i)
1850 if (lp_info[i].directly_reachable)
1851 {
1852 int action = lp_info[i].action_index;
1853
1854 /* Map must-not-throw to otherwise unused call-site index 0. */
1855 if (action == -2)
1856 index = 0;
1857 /* Map no-action to otherwise unused call-site index -1. */
1858 else if (action == -1)
1859 index = -1;
1860 /* Otherwise, look it up in the table. */
1861 else
1862 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1863
1864 lp_info[i].call_site_index = index;
1865 }
4956d07c 1866}
27a36778 1867
52a11cbf 1868static void
502b8322 1869sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
27a36778 1870{
52a11cbf
RH
1871 int last_call_site = -2;
1872 rtx insn, mem;
1873
52a11cbf 1874 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 1875 {
52a11cbf
RH
1876 struct eh_region *region;
1877 int this_call_site;
1878 rtx note, before, p;
27a36778 1879
52a11cbf 1880 /* Reset value tracking at extended basic block boundaries. */
4b4bf941 1881 if (LABEL_P (insn))
52a11cbf 1882 last_call_site = -2;
27a36778 1883
52a11cbf
RH
1884 if (! INSN_P (insn))
1885 continue;
27a36778 1886
52a11cbf
RH
1887 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1888 if (!note)
1889 {
1890 /* Calls (and trapping insns) without notes are outside any
1891 exception handling region in this function. Mark them as
1892 no action. */
4b4bf941 1893 if (CALL_P (insn)
52a11cbf
RH
1894 || (flag_non_call_exceptions
1895 && may_trap_p (PATTERN (insn))))
1896 this_call_site = -1;
1897 else
1898 continue;
1899 }
1900 else
1901 {
1902 /* Calls that are known to not throw need not be marked. */
1903 if (INTVAL (XEXP (note, 0)) <= 0)
1904 continue;
27a36778 1905
52a11cbf
RH
1906 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1907 this_call_site = lp_info[region->region_number].call_site_index;
1908 }
27a36778 1909
52a11cbf
RH
1910 if (this_call_site == last_call_site)
1911 continue;
1912
1913 /* Don't separate a call from it's argument loads. */
1914 before = insn;
4b4bf941 1915 if (CALL_P (insn))
0fb7aeda 1916 before = find_first_parameter_load (insn, NULL_RTX);
4956d07c 1917
52a11cbf 1918 start_sequence ();
fd2c57a9
AH
1919 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1920 sjlj_fc_call_site_ofs);
52a11cbf
RH
1921 emit_move_insn (mem, GEN_INT (this_call_site));
1922 p = get_insns ();
1923 end_sequence ();
12670d88 1924
2f937369 1925 emit_insn_before (p, before);
52a11cbf
RH
1926 last_call_site = this_call_site;
1927 }
1928}
4956d07c 1929
52a11cbf
RH
1930/* Construct the SjLj_Function_Context. */
1931
1932static void
502b8322 1933sjlj_emit_function_enter (rtx dispatch_label)
4956d07c 1934{
52a11cbf 1935 rtx fn_begin, fc, mem, seq;
4956d07c 1936
52a11cbf 1937 fc = cfun->eh->sjlj_fc;
4956d07c 1938
52a11cbf 1939 start_sequence ();
8a4451aa 1940
8979edec
JL
1941 /* We're storing this libcall's address into memory instead of
1942 calling it directly. Thus, we must call assemble_external_libcall
1943 here, as we can not depend on emit_library_call to do it for us. */
1944 assemble_external_libcall (eh_personality_libfunc);
f4ef873c 1945 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
52a11cbf
RH
1946 emit_move_insn (mem, eh_personality_libfunc);
1947
f4ef873c 1948 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
52a11cbf
RH
1949 if (cfun->uses_eh_lsda)
1950 {
1951 char buf[20];
86bdf071
RE
1952 rtx sym;
1953
df696a75 1954 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
86bdf071
RE
1955 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1956 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1957 emit_move_insn (mem, sym);
8a4451aa 1958 }
52a11cbf
RH
1959 else
1960 emit_move_insn (mem, const0_rtx);
3f2c5d1a 1961
52a11cbf
RH
1962#ifdef DONT_USE_BUILTIN_SETJMP
1963 {
1964 rtx x, note;
9defc9b7 1965 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
52a11cbf
RH
1966 TYPE_MODE (integer_type_node), 1,
1967 plus_constant (XEXP (fc, 0),
1968 sjlj_fc_jbuf_ofs), Pmode);
1969
2e040219 1970 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
52a11cbf
RH
1971 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1972
1973 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
a06ef755 1974 TYPE_MODE (integer_type_node), 0, dispatch_label);
52a11cbf
RH
1975 }
1976#else
1977 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1978 dispatch_label);
4956d07c 1979#endif
4956d07c 1980
52a11cbf
RH
1981 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1982 1, XEXP (fc, 0), Pmode);
12670d88 1983
52a11cbf
RH
1984 seq = get_insns ();
1985 end_sequence ();
4956d07c 1986
52a11cbf
RH
1987 /* ??? Instead of doing this at the beginning of the function,
1988 do this in a block that is at loop level 0 and dominates all
1989 can_throw_internal instructions. */
4956d07c 1990
52a11cbf 1991 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
4b4bf941 1992 if (NOTE_P (fn_begin)
12c3874e
JH
1993 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1994 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
52a11cbf 1995 break;
12c3874e 1996 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
628f6a4e 1997 insert_insn_on_edge (seq, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
12c3874e
JH
1998 else
1999 {
628f6a4e 2000 rtx last = BB_END (EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest);
12c3874e 2001 for (; ; fn_begin = NEXT_INSN (fn_begin))
4b4bf941 2002 if ((NOTE_P (fn_begin)
12c3874e
JH
2003 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2004 || fn_begin == last)
2005 break;
2006 emit_insn_after (seq, fn_begin);
2007 }
4956d07c
MS
2008}
2009
52a11cbf
RH
2010/* Call back from expand_function_end to know where we should put
2011 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 2012
52a11cbf 2013void
502b8322 2014sjlj_emit_function_exit_after (rtx after)
52a11cbf
RH
2015{
2016 cfun->eh->sjlj_exit_after = after;
2017}
4956d07c
MS
2018
2019static void
502b8322 2020sjlj_emit_function_exit (void)
52a11cbf
RH
2021{
2022 rtx seq;
12c3874e 2023 edge e;
628f6a4e 2024 edge_iterator ei;
4956d07c 2025
52a11cbf 2026 start_sequence ();
ce152ef8 2027
52a11cbf
RH
2028 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2029 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 2030
52a11cbf
RH
2031 seq = get_insns ();
2032 end_sequence ();
4956d07c 2033
52a11cbf
RH
2034 /* ??? Really this can be done in any block at loop level 0 that
2035 post-dominates all can_throw_internal instructions. This is
2036 the last possible moment. */
9a0d1e1b 2037
628f6a4e 2038 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
12c3874e
JH
2039 if (e->flags & EDGE_FALLTHRU)
2040 break;
2041 if (e)
2042 {
2043 rtx insn;
2044
2045 /* Figure out whether the place we are supposed to insert libcall
2046 is inside the last basic block or after it. In the other case
2047 we need to emit to edge. */
5b0264cb 2048 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
f6a41d17 2049 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
12c3874e 2050 {
f6a41d17
RH
2051 if (insn == cfun->eh->sjlj_exit_after)
2052 {
2053 if (LABEL_P (insn))
2054 insn = NEXT_INSN (insn);
2055 emit_insn_after (seq, insn);
2056 return;
2057 }
2058 if (insn == BB_END (e->src))
2059 break;
12c3874e 2060 }
f6a41d17 2061 insert_insn_on_edge (seq, e);
12c3874e 2062 }
9a0d1e1b
AM
2063}
2064
52a11cbf 2065static void
502b8322 2066sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
ce152ef8 2067{
52a11cbf
RH
2068 int i, first_reachable;
2069 rtx mem, dispatch, seq, fc;
12c3874e
JH
2070 rtx before;
2071 basic_block bb;
2072 edge e;
52a11cbf
RH
2073
2074 fc = cfun->eh->sjlj_fc;
2075
2076 start_sequence ();
2077
2078 emit_label (dispatch_label);
3f2c5d1a 2079
52a11cbf
RH
2080#ifndef DONT_USE_BUILTIN_SETJMP
2081 expand_builtin_setjmp_receiver (dispatch_label);
2082#endif
2083
2084 /* Load up dispatch index, exc_ptr and filter values from the
2085 function context. */
f4ef873c
RK
2086 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2087 sjlj_fc_call_site_ofs);
52a11cbf
RH
2088 dispatch = copy_to_reg (mem);
2089
f4ef873c 2090 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
f920765d 2091 if (word_mode != ptr_mode)
52a11cbf
RH
2092 {
2093#ifdef POINTERS_EXTEND_UNSIGNED
f920765d 2094 mem = convert_memory_address (ptr_mode, mem);
52a11cbf 2095#else
f920765d 2096 mem = convert_to_mode (ptr_mode, mem, 0);
52a11cbf
RH
2097#endif
2098 }
2099 emit_move_insn (cfun->eh->exc_ptr, mem);
2100
f4ef873c 2101 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
52a11cbf 2102 emit_move_insn (cfun->eh->filter, mem);
4956d07c 2103
52a11cbf
RH
2104 /* Jump to one of the directly reachable regions. */
2105 /* ??? This really ought to be using a switch statement. */
2106
2107 first_reachable = 0;
2108 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 2109 {
98ce21b3 2110 if (! lp_info[i].directly_reachable)
52a11cbf 2111 continue;
a1622f83 2112
52a11cbf
RH
2113 if (! first_reachable)
2114 {
2115 first_reachable = i;
2116 continue;
2117 }
e6cfb550 2118
a06ef755
RK
2119 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2120 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
52a11cbf 2121 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 2122 }
9a0d1e1b 2123
52a11cbf
RH
2124 seq = get_insns ();
2125 end_sequence ();
4956d07c 2126
12c3874e
JH
2127 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
2128
2129 bb = emit_to_new_bb_before (seq, before);
2130 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2131 e->count = bb->count;
2132 e->probability = REG_BR_PROB_BASE;
ce152ef8
AM
2133}
2134
52a11cbf 2135static void
502b8322 2136sjlj_build_landing_pads (void)
ce152ef8 2137{
52a11cbf 2138 struct sjlj_lp_info *lp_info;
ce152ef8 2139
703ad42b
KG
2140 lp_info = xcalloc (cfun->eh->last_region_number + 1,
2141 sizeof (struct sjlj_lp_info));
ce152ef8 2142
52a11cbf
RH
2143 if (sjlj_find_directly_reachable_regions (lp_info))
2144 {
2145 rtx dispatch_label = gen_label_rtx ();
ce152ef8 2146
52a11cbf
RH
2147 cfun->eh->sjlj_fc
2148 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2149 int_size_in_bytes (sjlj_fc_type_node),
2150 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 2151
52a11cbf
RH
2152 sjlj_assign_call_site_values (dispatch_label, lp_info);
2153 sjlj_mark_call_sites (lp_info);
a1622f83 2154
52a11cbf
RH
2155 sjlj_emit_function_enter (dispatch_label);
2156 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2157 sjlj_emit_function_exit ();
2158 }
a1622f83 2159
52a11cbf 2160 free (lp_info);
4956d07c 2161}
ce152ef8 2162
ce152ef8 2163void
502b8322 2164finish_eh_generation (void)
ce152ef8 2165{
12c3874e
JH
2166 basic_block bb;
2167
52a11cbf
RH
2168 /* Nothing to do if no regions created. */
2169 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
2170 return;
2171
52a11cbf
RH
2172 /* The object here is to provide find_basic_blocks with detailed
2173 information (via reachable_handlers) on how exception control
2174 flows within the function. In this first pass, we can include
2175 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2176 regions, and hope that it will be useful in deleting unreachable
2177 handlers. Subsequently, we will generate landing pads which will
2178 connect many of the handlers, and then type information will not
2179 be effective. Still, this is a win over previous implementations. */
2180
52a11cbf
RH
2181 /* These registers are used by the landing pads. Make sure they
2182 have been generated. */
86c99549
RH
2183 get_exception_pointer (cfun);
2184 get_exception_filter (cfun);
52a11cbf
RH
2185
2186 /* Construct the landing pads. */
2187
2188 assign_filter_values ();
2189 build_post_landing_pads ();
2190 connect_post_landing_pads ();
2191 if (USING_SJLJ_EXCEPTIONS)
2192 sjlj_build_landing_pads ();
2193 else
2194 dw2_build_landing_pads ();
ce152ef8 2195
52a11cbf 2196 cfun->eh->built_landing_pads = 1;
ce152ef8 2197
52a11cbf
RH
2198 /* We've totally changed the CFG. Start over. */
2199 find_exception_handler_labels ();
12c3874e
JH
2200 break_superblocks ();
2201 if (USING_SJLJ_EXCEPTIONS)
2202 commit_edge_insertions ();
2203 FOR_EACH_BB (bb)
2204 {
628f6a4e
BE
2205 edge e;
2206 edge_iterator ei;
12c3874e 2207 bool eh = false;
628f6a4e 2208 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
12c3874e 2209 {
12c3874e
JH
2210 if (e->flags & EDGE_EH)
2211 {
2212 remove_edge (e);
2213 eh = true;
2214 }
628f6a4e
BE
2215 else
2216 ei_next (&ei);
12c3874e
JH
2217 }
2218 if (eh)
6de9cd9a 2219 rtl_make_eh_edge (NULL, bb, BB_END (bb));
12c3874e 2220 }
ce152ef8 2221}
4956d07c 2222\f
6a58eee9 2223static hashval_t
502b8322 2224ehl_hash (const void *pentry)
6a58eee9
RH
2225{
2226 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2227
2228 /* 2^32 * ((sqrt(5) - 1) / 2) */
2229 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2230 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2231}
2232
2233static int
502b8322 2234ehl_eq (const void *pentry, const void *pdata)
6a58eee9
RH
2235{
2236 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2237 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2238
2239 return entry->label == data->label;
2240}
2241
52a11cbf 2242/* This section handles removing dead code for flow. */
154bba13 2243
6a58eee9 2244/* Remove LABEL from exception_handler_label_map. */
154bba13 2245
52a11cbf 2246static void
502b8322 2247remove_exception_handler_label (rtx label)
154bba13 2248{
6a58eee9 2249 struct ehl_map_entry **slot, tmp;
100d81d4 2250
6a58eee9 2251 /* If exception_handler_label_map was not built yet,
655dd289 2252 there is nothing to do. */
e2500fed 2253 if (cfun->eh->exception_handler_label_map == NULL)
655dd289
JJ
2254 return;
2255
6a58eee9
RH
2256 tmp.label = label;
2257 slot = (struct ehl_map_entry **)
e2500fed 2258 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
5b0264cb 2259 gcc_assert (slot);
154bba13 2260
e2500fed 2261 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
154bba13
TT
2262}
2263
52a11cbf 2264/* Splice REGION from the region tree etc. */
12670d88 2265
f19c9228 2266static void
502b8322 2267remove_eh_handler (struct eh_region *region)
4956d07c 2268{
ff2c46ac 2269 struct eh_region **pp, **pp_start, *p, *outer, *inner;
52a11cbf 2270 rtx lab;
4956d07c 2271
52a11cbf
RH
2272 /* For the benefit of efficiently handling REG_EH_REGION notes,
2273 replace this region in the region array with its containing
2274 region. Note that previous region deletions may result in
6a58eee9
RH
2275 multiple copies of this region in the array, so we have a
2276 list of alternate numbers by which we are known. */
2277
ff2c46ac
RH
2278 outer = region->outer;
2279 cfun->eh->region_array[region->region_number] = outer;
6a58eee9
RH
2280 if (region->aka)
2281 {
2282 int i;
87c476a2
ZD
2283 bitmap_iterator bi;
2284
2285 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2286 {
2287 cfun->eh->region_array[i] = outer;
2288 }
6a58eee9
RH
2289 }
2290
ff2c46ac 2291 if (outer)
6a58eee9 2292 {
ff2c46ac 2293 if (!outer->aka)
e2500fed 2294 outer->aka = BITMAP_GGC_ALLOC ();
6a58eee9 2295 if (region->aka)
ff2c46ac
RH
2296 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2297 bitmap_set_bit (outer->aka, region->region_number);
6a58eee9 2298 }
52a11cbf
RH
2299
2300 if (cfun->eh->built_landing_pads)
2301 lab = region->landing_pad;
2302 else
2303 lab = region->label;
2304 if (lab)
2305 remove_exception_handler_label (lab);
2306
ff2c46ac
RH
2307 if (outer)
2308 pp_start = &outer->inner;
52a11cbf 2309 else
ff2c46ac
RH
2310 pp_start = &cfun->eh->region_tree;
2311 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
52a11cbf 2312 continue;
ff2c46ac 2313 *pp = region->next_peer;
12670d88 2314
ff2c46ac
RH
2315 inner = region->inner;
2316 if (inner)
4956d07c 2317 {
ff2c46ac
RH
2318 for (p = inner; p->next_peer ; p = p->next_peer)
2319 p->outer = outer;
2320 p->outer = outer;
2321
2322 p->next_peer = *pp_start;
2323 *pp_start = inner;
4956d07c 2324 }
f19c9228 2325
52a11cbf
RH
2326 if (region->type == ERT_CATCH)
2327 {
2328 struct eh_region *try, *next, *prev;
f19c9228 2329
52a11cbf
RH
2330 for (try = region->next_peer;
2331 try->type == ERT_CATCH;
2332 try = try->next_peer)
2333 continue;
5b0264cb 2334 gcc_assert (try->type == ERT_TRY);
f19c9228 2335
52a11cbf
RH
2336 next = region->u.catch.next_catch;
2337 prev = region->u.catch.prev_catch;
f19c9228 2338
52a11cbf
RH
2339 if (next)
2340 next->u.catch.prev_catch = prev;
2341 else
2342 try->u.try.last_catch = prev;
2343 if (prev)
2344 prev->u.catch.next_catch = next;
2345 else
2346 {
2347 try->u.try.catch = next;
2348 if (! next)
2349 remove_eh_handler (try);
2350 }
2351 }
4956d07c
MS
2352}
2353
52a11cbf
RH
2354/* LABEL heads a basic block that is about to be deleted. If this
2355 label corresponds to an exception region, we may be able to
2356 delete the region. */
4956d07c
MS
2357
2358void
502b8322 2359maybe_remove_eh_handler (rtx label)
4956d07c 2360{
6a58eee9
RH
2361 struct ehl_map_entry **slot, tmp;
2362 struct eh_region *region;
4956d07c 2363
52a11cbf
RH
2364 /* ??? After generating landing pads, it's not so simple to determine
2365 if the region data is completely unused. One must examine the
2366 landing pad and the post landing pad, and whether an inner try block
2367 is referencing the catch handlers directly. */
2368 if (cfun->eh->built_landing_pads)
4956d07c
MS
2369 return;
2370
6a58eee9
RH
2371 tmp.label = label;
2372 slot = (struct ehl_map_entry **)
e2500fed 2373 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
6a58eee9
RH
2374 if (! slot)
2375 return;
2376 region = (*slot)->region;
2377 if (! region)
2378 return;
2379
2380 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2381 because there is no path to the fallback call to terminate.
2382 But the region continues to affect call-site data until there
2383 are no more contained calls, which we don't see here. */
2384 if (region->type == ERT_MUST_NOT_THROW)
87ff9c8e 2385 {
e2500fed 2386 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
6a58eee9 2387 region->label = NULL_RTX;
87ff9c8e 2388 }
6a58eee9
RH
2389 else
2390 remove_eh_handler (region);
2391}
2392
2393/* Invokes CALLBACK for every exception handler label. Only used by old
2394 loop hackery; should not be used by new code. */
2395
2396void
502b8322 2397for_each_eh_label (void (*callback) (rtx))
6a58eee9 2398{
e2500fed 2399 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
3897f229 2400 (void *) &callback);
87ff9c8e
RH
2401}
2402
6a58eee9 2403static int
502b8322 2404for_each_eh_label_1 (void **pentry, void *data)
6a58eee9
RH
2405{
2406 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
3897f229 2407 void (*callback) (rtx) = *(void (**) (rtx)) data;
6a58eee9
RH
2408
2409 (*callback) (entry->label);
2410 return 1;
2411}
f698d217
SB
2412
2413/* Invoke CALLBACK for every exception region in the current function. */
2414
2415void
2416for_each_eh_region (void (*callback) (struct eh_region *))
2417{
2418 int i, n = cfun->eh->last_region_number;
2419 for (i = 1; i <= n; ++i)
2420 {
2421 struct eh_region *region = cfun->eh->region_array[i];
2422 if (region)
2423 (*callback) (region);
2424 }
2425}
52a11cbf
RH
2426\f
2427/* This section describes CFG exception edges for flow. */
87ff9c8e 2428
52a11cbf 2429/* For communicating between calls to reachable_next_level. */
6de9cd9a 2430struct reachable_info
87ff9c8e 2431{
52a11cbf
RH
2432 tree types_caught;
2433 tree types_allowed;
6de9cd9a
DN
2434 void (*callback) (struct eh_region *, void *);
2435 void *callback_data;
2436 bool saw_any_handlers;
52a11cbf 2437};
87ff9c8e 2438
52a11cbf
RH
2439/* A subroutine of reachable_next_level. Return true if TYPE, or a
2440 base class of TYPE, is in HANDLED. */
87ff9c8e 2441
6de9cd9a 2442int
502b8322 2443check_handled (tree handled, tree type)
87ff9c8e 2444{
52a11cbf
RH
2445 tree t;
2446
2447 /* We can check for exact matches without front-end help. */
2448 if (! lang_eh_type_covers)
f54a7f6f 2449 {
52a11cbf
RH
2450 for (t = handled; t ; t = TREE_CHAIN (t))
2451 if (TREE_VALUE (t) == type)
2452 return 1;
2453 }
2454 else
2455 {
2456 for (t = handled; t ; t = TREE_CHAIN (t))
2457 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2458 return 1;
f54a7f6f 2459 }
52a11cbf
RH
2460
2461 return 0;
87ff9c8e
RH
2462}
2463
52a11cbf
RH
2464/* A subroutine of reachable_next_level. If we are collecting a list
2465 of handlers, add one. After landing pad generation, reference
2466 it instead of the handlers themselves. Further, the handlers are
3f2c5d1a 2467 all wired together, so by referencing one, we've got them all.
52a11cbf
RH
2468 Before landing pad generation we reference each handler individually.
2469
2470 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2471
2472static void
6de9cd9a
DN
2473add_reachable_handler (struct reachable_info *info,
2474 struct eh_region *lp_region, struct eh_region *region)
87ff9c8e 2475{
52a11cbf
RH
2476 if (! info)
2477 return;
2478
6de9cd9a
DN
2479 info->saw_any_handlers = true;
2480
52a11cbf 2481 if (cfun->eh->built_landing_pads)
6de9cd9a 2482 info->callback (lp_region, info->callback_data);
52a11cbf 2483 else
6de9cd9a 2484 info->callback (region, info->callback_data);
87ff9c8e
RH
2485}
2486
3f2c5d1a 2487/* Process one level of exception regions for reachability.
52a11cbf
RH
2488 If TYPE_THROWN is non-null, then it is the *exact* type being
2489 propagated. If INFO is non-null, then collect handler labels
2490 and caught/allowed type information between invocations. */
87ff9c8e 2491
52a11cbf 2492static enum reachable_code
502b8322
AJ
2493reachable_next_level (struct eh_region *region, tree type_thrown,
2494 struct reachable_info *info)
87ff9c8e 2495{
52a11cbf
RH
2496 switch (region->type)
2497 {
2498 case ERT_CLEANUP:
2499 /* Before landing-pad generation, we model control flow
2500 directly to the individual handlers. In this way we can
2501 see that catch handler types may shadow one another. */
2502 add_reachable_handler (info, region, region);
2503 return RNL_MAYBE_CAUGHT;
2504
2505 case ERT_TRY:
2506 {
2507 struct eh_region *c;
2508 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2509
52a11cbf
RH
2510 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2511 {
2512 /* A catch-all handler ends the search. */
6d41a92f 2513 if (c->u.catch.type_list == NULL)
52a11cbf
RH
2514 {
2515 add_reachable_handler (info, region, c);
2516 return RNL_CAUGHT;
2517 }
2518
2519 if (type_thrown)
2520 {
a8154559 2521 /* If we have at least one type match, end the search. */
6d41a92f 2522 tree tp_node = c->u.catch.type_list;
3f2c5d1a 2523
6d41a92f 2524 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
52a11cbf 2525 {
6d41a92f
OH
2526 tree type = TREE_VALUE (tp_node);
2527
2528 if (type == type_thrown
2529 || (lang_eh_type_covers
2530 && (*lang_eh_type_covers) (type, type_thrown)))
2531 {
2532 add_reachable_handler (info, region, c);
2533 return RNL_CAUGHT;
2534 }
52a11cbf
RH
2535 }
2536
2537 /* If we have definitive information of a match failure,
2538 the catch won't trigger. */
2539 if (lang_eh_type_covers)
2540 return RNL_NOT_CAUGHT;
2541 }
2542
6d41a92f
OH
2543 /* At this point, we either don't know what type is thrown or
2544 don't have front-end assistance to help deciding if it is
2545 covered by one of the types in the list for this region.
3f2c5d1a 2546
6d41a92f
OH
2547 We'd then like to add this region to the list of reachable
2548 handlers since it is indeed potentially reachable based on the
3f2c5d1a
RS
2549 information we have.
2550
6d41a92f
OH
2551 Actually, this handler is for sure not reachable if all the
2552 types it matches have already been caught. That is, it is only
2553 potentially reachable if at least one of the types it catches
2554 has not been previously caught. */
2555
52a11cbf
RH
2556 if (! info)
2557 ret = RNL_MAYBE_CAUGHT;
6d41a92f 2558 else
52a11cbf 2559 {
6d41a92f
OH
2560 tree tp_node = c->u.catch.type_list;
2561 bool maybe_reachable = false;
52a11cbf 2562
6d41a92f
OH
2563 /* Compute the potential reachability of this handler and
2564 update the list of types caught at the same time. */
2565 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2566 {
2567 tree type = TREE_VALUE (tp_node);
2568
2569 if (! check_handled (info->types_caught, type))
2570 {
2571 info->types_caught
2572 = tree_cons (NULL, type, info->types_caught);
3f2c5d1a 2573
6d41a92f
OH
2574 maybe_reachable = true;
2575 }
2576 }
3f2c5d1a 2577
6d41a92f
OH
2578 if (maybe_reachable)
2579 {
2580 add_reachable_handler (info, region, c);
3f2c5d1a 2581
6d41a92f
OH
2582 /* ??? If the catch type is a base class of every allowed
2583 type, then we know we can stop the search. */
2584 ret = RNL_MAYBE_CAUGHT;
2585 }
52a11cbf
RH
2586 }
2587 }
87ff9c8e 2588
52a11cbf
RH
2589 return ret;
2590 }
87ff9c8e 2591
52a11cbf
RH
2592 case ERT_ALLOWED_EXCEPTIONS:
2593 /* An empty list of types definitely ends the search. */
2594 if (region->u.allowed.type_list == NULL_TREE)
2595 {
2596 add_reachable_handler (info, region, region);
2597 return RNL_CAUGHT;
2598 }
87ff9c8e 2599
52a11cbf
RH
2600 /* Collect a list of lists of allowed types for use in detecting
2601 when a catch may be transformed into a catch-all. */
2602 if (info)
2603 info->types_allowed = tree_cons (NULL_TREE,
2604 region->u.allowed.type_list,
2605 info->types_allowed);
3f2c5d1a 2606
684d9f3b 2607 /* If we have definitive information about the type hierarchy,
52a11cbf
RH
2608 then we can tell if the thrown type will pass through the
2609 filter. */
2610 if (type_thrown && lang_eh_type_covers)
2611 {
2612 if (check_handled (region->u.allowed.type_list, type_thrown))
2613 return RNL_NOT_CAUGHT;
2614 else
2615 {
2616 add_reachable_handler (info, region, region);
2617 return RNL_CAUGHT;
2618 }
2619 }
21cd906e 2620
52a11cbf
RH
2621 add_reachable_handler (info, region, region);
2622 return RNL_MAYBE_CAUGHT;
21cd906e 2623
52a11cbf 2624 case ERT_CATCH:
fbe5a4a6 2625 /* Catch regions are handled by their controlling try region. */
52a11cbf 2626 return RNL_NOT_CAUGHT;
21cd906e 2627
52a11cbf
RH
2628 case ERT_MUST_NOT_THROW:
2629 /* Here we end our search, since no exceptions may propagate.
2630 If we've touched down at some landing pad previous, then the
2631 explicit function call we generated may be used. Otherwise
2632 the call is made by the runtime. */
6de9cd9a 2633 if (info && info->saw_any_handlers)
21cd906e 2634 {
52a11cbf 2635 add_reachable_handler (info, region, region);
0fb7aeda 2636 return RNL_CAUGHT;
21cd906e 2637 }
52a11cbf
RH
2638 else
2639 return RNL_BLOCKED;
21cd906e 2640
52a11cbf
RH
2641 case ERT_THROW:
2642 case ERT_FIXUP:
3f2c5d1a 2643 case ERT_UNKNOWN:
52a11cbf 2644 /* Shouldn't see these here. */
5b0264cb 2645 gcc_unreachable ();
52a11cbf 2646 break;
5b0264cb
NS
2647 default:
2648 gcc_unreachable ();
21cd906e 2649 }
fa51b01b 2650}
4956d07c 2651
6de9cd9a 2652/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
4956d07c 2653
6de9cd9a
DN
2654void
2655foreach_reachable_handler (int region_number, bool is_resx,
2656 void (*callback) (struct eh_region *, void *),
2657 void *callback_data)
4956d07c 2658{
52a11cbf
RH
2659 struct reachable_info info;
2660 struct eh_region *region;
2661 tree type_thrown;
4956d07c 2662
52a11cbf 2663 memset (&info, 0, sizeof (info));
6de9cd9a
DN
2664 info.callback = callback;
2665 info.callback_data = callback_data;
4956d07c 2666
52a11cbf 2667 region = cfun->eh->region_array[region_number];
fb13d4d0 2668
52a11cbf 2669 type_thrown = NULL_TREE;
6de9cd9a 2670 if (is_resx)
7f206d8f
RH
2671 {
2672 /* A RESX leaves a region instead of entering it. Thus the
2673 region itself may have been deleted out from under us. */
2674 if (region == NULL)
6de9cd9a 2675 return;
7f206d8f
RH
2676 region = region->outer;
2677 }
2678 else if (region->type == ERT_THROW)
52a11cbf
RH
2679 {
2680 type_thrown = region->u.throw.type;
2681 region = region->outer;
2682 }
fac62ecf 2683
bafb714b
MM
2684 while (region)
2685 {
2686 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
a944ceb9 2687 break;
bafb714b
MM
2688 /* If we have processed one cleanup, there is no point in
2689 processing any more of them. Each cleanup will have an edge
2690 to the next outer cleanup region, so the flow graph will be
2691 accurate. */
2692 if (region->type == ERT_CLEANUP)
2693 region = region->u.cleanup.prev_try;
2694 else
2695 region = region->outer;
2696 }
6de9cd9a
DN
2697}
2698
2699/* Retrieve a list of labels of exception handlers which can be
2700 reached by a given insn. */
2701
2702static void
2703arh_to_landing_pad (struct eh_region *region, void *data)
2704{
2705 rtx *p_handlers = data;
2706 if (! *p_handlers)
2707 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2708}
2709
2710static void
2711arh_to_label (struct eh_region *region, void *data)
2712{
2713 rtx *p_handlers = data;
2714 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2715}
2716
2717rtx
2718reachable_handlers (rtx insn)
2719{
2720 bool is_resx = false;
2721 rtx handlers = NULL;
2722 int region_number;
2723
4b4bf941 2724 if (JUMP_P (insn)
6de9cd9a
DN
2725 && GET_CODE (PATTERN (insn)) == RESX)
2726 {
2727 region_number = XINT (PATTERN (insn), 0);
2728 is_resx = true;
2729 }
2730 else
2731 {
2732 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2733 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2734 return NULL;
2735 region_number = INTVAL (XEXP (note, 0));
2736 }
502b8322 2737
6de9cd9a
DN
2738 foreach_reachable_handler (region_number, is_resx,
2739 (cfun->eh->built_landing_pads
2740 ? arh_to_landing_pad
2741 : arh_to_label),
2742 &handlers);
2743
2744 return handlers;
fb13d4d0
JM
2745}
2746
52a11cbf
RH
2747/* Determine if the given INSN can throw an exception that is caught
2748 within the function. */
4956d07c 2749
52a11cbf 2750bool
6de9cd9a 2751can_throw_internal_1 (int region_number)
4956d07c 2752{
52a11cbf
RH
2753 struct eh_region *region;
2754 tree type_thrown;
6de9cd9a
DN
2755
2756 region = cfun->eh->region_array[region_number];
2757
2758 type_thrown = NULL_TREE;
2759 if (region->type == ERT_THROW)
2760 {
2761 type_thrown = region->u.throw.type;
2762 region = region->outer;
2763 }
2764
2765 /* If this exception is ignored by each and every containing region,
2766 then control passes straight out. The runtime may handle some
2767 regions, which also do not require processing internally. */
2768 for (; region; region = region->outer)
2769 {
2770 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2771 if (how == RNL_BLOCKED)
2772 return false;
2773 if (how != RNL_NOT_CAUGHT)
2774 return true;
2775 }
2776
2777 return false;
2778}
2779
2780bool
2781can_throw_internal (rtx insn)
2782{
52a11cbf 2783 rtx note;
e6cfb550 2784
52a11cbf
RH
2785 if (! INSN_P (insn))
2786 return false;
12670d88 2787
4b4bf941 2788 if (JUMP_P (insn)
0620be18
JH
2789 && GET_CODE (PATTERN (insn)) == RESX
2790 && XINT (PATTERN (insn), 0) > 0)
2791 return can_throw_internal_1 (XINT (PATTERN (insn), 0));
2792
4b4bf941 2793 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
2794 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2795 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 2796
52a11cbf
RH
2797 /* Every insn that might throw has an EH_REGION note. */
2798 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2799 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2800 return false;
4956d07c 2801
6de9cd9a
DN
2802 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
2803}
2804
2805/* Determine if the given INSN can throw an exception that is
2806 visible outside the function. */
2807
2808bool
2809can_throw_external_1 (int region_number)
2810{
2811 struct eh_region *region;
2812 tree type_thrown;
2813
2814 region = cfun->eh->region_array[region_number];
4956d07c 2815
52a11cbf
RH
2816 type_thrown = NULL_TREE;
2817 if (region->type == ERT_THROW)
2818 {
2819 type_thrown = region->u.throw.type;
2820 region = region->outer;
2821 }
4956d07c 2822
6de9cd9a
DN
2823 /* If the exception is caught or blocked by any containing region,
2824 then it is not seen by any calling function. */
2825 for (; region ; region = region->outer)
2826 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2827 return false;
4956d07c 2828
6de9cd9a 2829 return true;
52a11cbf 2830}
4956d07c 2831
52a11cbf 2832bool
502b8322 2833can_throw_external (rtx insn)
4956d07c 2834{
52a11cbf 2835 rtx note;
4956d07c 2836
52a11cbf
RH
2837 if (! INSN_P (insn))
2838 return false;
2839
4b4bf941 2840 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
2841 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2842 insn = XVECEXP (PATTERN (insn), 0, 0);
2843
52a11cbf
RH
2844 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2845 if (!note)
2846 {
2847 /* Calls (and trapping insns) without notes are outside any
2848 exception handling region in this function. We have to
2849 assume it might throw. Given that the front end and middle
2850 ends mark known NOTHROW functions, this isn't so wildly
2851 inaccurate. */
4b4bf941 2852 return (CALL_P (insn)
52a11cbf
RH
2853 || (flag_non_call_exceptions
2854 && may_trap_p (PATTERN (insn))));
2855 }
2856 if (INTVAL (XEXP (note, 0)) <= 0)
2857 return false;
2858
6de9cd9a 2859 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
4956d07c 2860}
1ef1bf06 2861
97b0ade3 2862/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
6814a8a0 2863
b6128b8c 2864void
502b8322 2865set_nothrow_function_flags (void)
1ef1bf06
AM
2866{
2867 rtx insn;
502b8322 2868
97b0ade3 2869 TREE_NOTHROW (current_function_decl) = 1;
1ef1bf06 2870
b6128b8c
SH
2871 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2872 something that can throw an exception. We specifically exempt
2873 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2874 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2875 is optimistic. */
1ef1bf06 2876
b6128b8c
SH
2877 cfun->all_throwers_are_sibcalls = 1;
2878
2879 if (! flag_exceptions)
2880 return;
502b8322 2881
1ef1bf06 2882 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf 2883 if (can_throw_external (insn))
b6128b8c 2884 {
97b0ade3 2885 TREE_NOTHROW (current_function_decl) = 0;
b6128b8c 2886
4b4bf941 2887 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
b6128b8c
SH
2888 {
2889 cfun->all_throwers_are_sibcalls = 0;
2890 return;
2891 }
2892 }
2893
52a11cbf
RH
2894 for (insn = current_function_epilogue_delay_list; insn;
2895 insn = XEXP (insn, 1))
b6128b8c
SH
2896 if (can_throw_external (insn))
2897 {
97b0ade3 2898 TREE_NOTHROW (current_function_decl) = 0;
4da896b2 2899
4b4bf941 2900 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
b6128b8c
SH
2901 {
2902 cfun->all_throwers_are_sibcalls = 0;
2903 return;
2904 }
2905 }
1ef1bf06 2906}
52a11cbf 2907
ca55abae 2908\f
52a11cbf 2909/* Various hooks for unwind library. */
ca55abae
JM
2910
2911/* Do any necessary initialization to access arbitrary stack frames.
2912 On the SPARC, this means flushing the register windows. */
2913
2914void
502b8322 2915expand_builtin_unwind_init (void)
ca55abae
JM
2916{
2917 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 2918 able to copy the saved values for any registers from frames we unwind. */
ca55abae
JM
2919 current_function_has_nonlocal_label = 1;
2920
2921#ifdef SETUP_FRAME_ADDRESSES
2922 SETUP_FRAME_ADDRESSES ();
2923#endif
2924}
2925
52a11cbf 2926rtx
502b8322 2927expand_builtin_eh_return_data_regno (tree arglist)
52a11cbf
RH
2928{
2929 tree which = TREE_VALUE (arglist);
2930 unsigned HOST_WIDE_INT iwhich;
2931
2932 if (TREE_CODE (which) != INTEGER_CST)
2933 {
971801ff 2934 error ("argument of %<__builtin_eh_return_regno%> must be constant");
52a11cbf
RH
2935 return constm1_rtx;
2936 }
2937
2938 iwhich = tree_low_cst (which, 1);
2939 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2940 if (iwhich == INVALID_REGNUM)
2941 return constm1_rtx;
2942
2943#ifdef DWARF_FRAME_REGNUM
2944 iwhich = DWARF_FRAME_REGNUM (iwhich);
2945#else
2946 iwhich = DBX_REGISTER_NUMBER (iwhich);
2947#endif
2948
3f2c5d1a 2949 return GEN_INT (iwhich);
52a11cbf
RH
2950}
2951
ca55abae
JM
2952/* Given a value extracted from the return address register or stack slot,
2953 return the actual address encoded in that value. */
2954
2955rtx
502b8322 2956expand_builtin_extract_return_addr (tree addr_tree)
ca55abae
JM
2957{
2958 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf 2959
0ab38418
EC
2960 if (GET_MODE (addr) != Pmode
2961 && GET_MODE (addr) != VOIDmode)
2962 {
2963#ifdef POINTERS_EXTEND_UNSIGNED
2964 addr = convert_memory_address (Pmode, addr);
2965#else
2966 addr = convert_to_mode (Pmode, addr, 0);
2967#endif
2968 }
2969
52a11cbf
RH
2970 /* First mask out any unwanted bits. */
2971#ifdef MASK_RETURN_ADDR
22273300 2972 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
52a11cbf
RH
2973#endif
2974
2975 /* Then adjust to find the real return address. */
2976#if defined (RETURN_ADDR_OFFSET)
2977 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2978#endif
2979
2980 return addr;
ca55abae
JM
2981}
2982
2983/* Given an actual address in addr_tree, do any necessary encoding
2984 and return the value to be stored in the return address register or
2985 stack slot so the epilogue will return to that address. */
2986
2987rtx
502b8322 2988expand_builtin_frob_return_addr (tree addr_tree)
ca55abae 2989{
4b6c1672 2990 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
52a11cbf 2991
5ae6cd0d 2992 addr = convert_memory_address (Pmode, addr);
be128cd9 2993
ca55abae 2994#ifdef RETURN_ADDR_OFFSET
52a11cbf 2995 addr = force_reg (Pmode, addr);
ca55abae
JM
2996 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2997#endif
52a11cbf 2998
ca55abae
JM
2999 return addr;
3000}
3001
52a11cbf
RH
3002/* Set up the epilogue with the magic bits we'll need to return to the
3003 exception handler. */
ca55abae 3004
52a11cbf 3005void
502b8322
AJ
3006expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3007 tree handler_tree)
ca55abae 3008{
34dc173c 3009 rtx tmp;
ca55abae 3010
34dc173c
UW
3011#ifdef EH_RETURN_STACKADJ_RTX
3012 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
5ae6cd0d 3013 tmp = convert_memory_address (Pmode, tmp);
34dc173c
UW
3014 if (!cfun->eh->ehr_stackadj)
3015 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3016 else if (tmp != cfun->eh->ehr_stackadj)
3017 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
be128cd9
RK
3018#endif
3019
34dc173c 3020 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
5ae6cd0d 3021 tmp = convert_memory_address (Pmode, tmp);
34dc173c
UW
3022 if (!cfun->eh->ehr_handler)
3023 cfun->eh->ehr_handler = copy_to_reg (tmp);
3024 else if (tmp != cfun->eh->ehr_handler)
3025 emit_move_insn (cfun->eh->ehr_handler, tmp);
ca55abae 3026
34dc173c
UW
3027 if (!cfun->eh->ehr_label)
3028 cfun->eh->ehr_label = gen_label_rtx ();
52a11cbf 3029 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
3030}
3031
71038426 3032void
502b8322 3033expand_eh_return (void)
ca55abae 3034{
34dc173c 3035 rtx around_label;
ca55abae 3036
52a11cbf 3037 if (! cfun->eh->ehr_label)
71038426 3038 return;
ca55abae 3039
52a11cbf 3040 current_function_calls_eh_return = 1;
ca55abae 3041
34dc173c
UW
3042#ifdef EH_RETURN_STACKADJ_RTX
3043 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3044#endif
3045
52a11cbf 3046 around_label = gen_label_rtx ();
52a11cbf 3047 emit_jump (around_label);
ca55abae 3048
52a11cbf
RH
3049 emit_label (cfun->eh->ehr_label);
3050 clobber_return_register ();
ca55abae 3051
34dc173c
UW
3052#ifdef EH_RETURN_STACKADJ_RTX
3053 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3054#endif
3055
52a11cbf
RH
3056#ifdef HAVE_eh_return
3057 if (HAVE_eh_return)
34dc173c 3058 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
52a11cbf 3059 else
71038426 3060#endif
52a11cbf 3061 {
34dc173c
UW
3062#ifdef EH_RETURN_HANDLER_RTX
3063 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3064#else
3065 error ("__builtin_eh_return not supported on this target");
3066#endif
52a11cbf 3067 }
71038426 3068
52a11cbf 3069 emit_label (around_label);
71038426 3070}
c76362b4
JW
3071
3072/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3073 POINTERS_EXTEND_UNSIGNED and return it. */
3074
3075rtx
3076expand_builtin_extend_pointer (tree addr_tree)
3077{
3078 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3079 int extend;
3080
3081#ifdef POINTERS_EXTEND_UNSIGNED
3082 extend = POINTERS_EXTEND_UNSIGNED;
3083#else
3084 /* The previous EH code did an unsigned extend by default, so we do this also
3085 for consistency. */
3086 extend = 1;
3087#endif
3088
3089 return convert_modes (word_mode, ptr_mode, addr, extend);
3090}
77d33a84 3091\f
949f197f 3092/* In the following functions, we represent entries in the action table
eaec9b3d 3093 as 1-based indices. Special cases are:
949f197f
RH
3094
3095 0: null action record, non-null landing pad; implies cleanups
3096 -1: null action record, null landing pad; implies no action
3097 -2: no call-site entry; implies must_not_throw
3098 -3: we have yet to process outer regions
3099
3100 Further, no special cases apply to the "next" field of the record.
3101 For next, 0 means end of list. */
3102
52a11cbf
RH
3103struct action_record
3104{
3105 int offset;
3106 int filter;
3107 int next;
3108};
77d33a84 3109
52a11cbf 3110static int
502b8322 3111action_record_eq (const void *pentry, const void *pdata)
52a11cbf
RH
3112{
3113 const struct action_record *entry = (const struct action_record *) pentry;
3114 const struct action_record *data = (const struct action_record *) pdata;
3115 return entry->filter == data->filter && entry->next == data->next;
3116}
77d33a84 3117
52a11cbf 3118static hashval_t
502b8322 3119action_record_hash (const void *pentry)
52a11cbf
RH
3120{
3121 const struct action_record *entry = (const struct action_record *) pentry;
3122 return entry->next * 1009 + entry->filter;
3123}
77d33a84 3124
52a11cbf 3125static int
502b8322 3126add_action_record (htab_t ar_hash, int filter, int next)
77d33a84 3127{
52a11cbf
RH
3128 struct action_record **slot, *new, tmp;
3129
3130 tmp.filter = filter;
3131 tmp.next = next;
3132 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 3133
52a11cbf 3134 if ((new = *slot) == NULL)
77d33a84 3135 {
703ad42b 3136 new = xmalloc (sizeof (*new));
52a11cbf
RH
3137 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3138 new->filter = filter;
3139 new->next = next;
3140 *slot = new;
3141
3142 /* The filter value goes in untouched. The link to the next
3143 record is a "self-relative" byte offset, or zero to indicate
3144 that there is no next record. So convert the absolute 1 based
eaec9b3d 3145 indices we've been carrying around into a displacement. */
52a11cbf
RH
3146
3147 push_sleb128 (&cfun->eh->action_record_data, filter);
3148 if (next)
3149 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3150 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 3151 }
77d33a84 3152
52a11cbf
RH
3153 return new->offset;
3154}
77d33a84 3155
52a11cbf 3156static int
502b8322 3157collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
77d33a84 3158{
52a11cbf
RH
3159 struct eh_region *c;
3160 int next;
77d33a84 3161
52a11cbf
RH
3162 /* If we've reached the top of the region chain, then we have
3163 no actions, and require no landing pad. */
3164 if (region == NULL)
3165 return -1;
3166
3167 switch (region->type)
77d33a84 3168 {
52a11cbf
RH
3169 case ERT_CLEANUP:
3170 /* A cleanup adds a zero filter to the beginning of the chain, but
3171 there are special cases to look out for. If there are *only*
3172 cleanups along a path, then it compresses to a zero action.
3173 Further, if there are multiple cleanups along a path, we only
3174 need to represent one of them, as that is enough to trigger
3175 entry to the landing pad at runtime. */
3176 next = collect_one_action_chain (ar_hash, region->outer);
3177 if (next <= 0)
3178 return 0;
3179 for (c = region->outer; c ; c = c->outer)
3180 if (c->type == ERT_CLEANUP)
3181 return next;
3182 return add_action_record (ar_hash, 0, next);
3183
3184 case ERT_TRY:
3185 /* Process the associated catch regions in reverse order.
3186 If there's a catch-all handler, then we don't need to
3187 search outer regions. Use a magic -3 value to record
a1f300c0 3188 that we haven't done the outer search. */
52a11cbf
RH
3189 next = -3;
3190 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3191 {
6d41a92f
OH
3192 if (c->u.catch.type_list == NULL)
3193 {
3194 /* Retrieve the filter from the head of the filter list
3195 where we have stored it (see assign_filter_values). */
a944ceb9
RH
3196 int filter
3197 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3198
3199 next = add_action_record (ar_hash, filter, 0);
6d41a92f 3200 }
52a11cbf
RH
3201 else
3202 {
6d41a92f
OH
3203 /* Once the outer search is done, trigger an action record for
3204 each filter we have. */
3205 tree flt_node;
3206
52a11cbf
RH
3207 if (next == -3)
3208 {
3209 next = collect_one_action_chain (ar_hash, region->outer);
949f197f
RH
3210
3211 /* If there is no next action, terminate the chain. */
3212 if (next == -1)
52a11cbf 3213 next = 0;
949f197f
RH
3214 /* If all outer actions are cleanups or must_not_throw,
3215 we'll have no action record for it, since we had wanted
3216 to encode these states in the call-site record directly.
3217 Add a cleanup action to the chain to catch these. */
3218 else if (next <= 0)
3219 next = add_action_record (ar_hash, 0, 0);
52a11cbf 3220 }
3f2c5d1a 3221
6d41a92f
OH
3222 flt_node = c->u.catch.filter_list;
3223 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3224 {
3225 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3226 next = add_action_record (ar_hash, filter, next);
3227 }
52a11cbf
RH
3228 }
3229 }
3230 return next;
3231
3232 case ERT_ALLOWED_EXCEPTIONS:
3233 /* An exception specification adds its filter to the
3234 beginning of the chain. */
3235 next = collect_one_action_chain (ar_hash, region->outer);
0977ab3a
RH
3236
3237 /* If there is no next action, terminate the chain. */
3238 if (next == -1)
3239 next = 0;
3240 /* If all outer actions are cleanups or must_not_throw,
3241 we'll have no action record for it, since we had wanted
3242 to encode these states in the call-site record directly.
3243 Add a cleanup action to the chain to catch these. */
3244 else if (next <= 0)
3245 next = add_action_record (ar_hash, 0, 0);
3246
3247 return add_action_record (ar_hash, region->u.allowed.filter, next);
52a11cbf
RH
3248
3249 case ERT_MUST_NOT_THROW:
3250 /* A must-not-throw region with no inner handlers or cleanups
3251 requires no call-site entry. Note that this differs from
3252 the no handler or cleanup case in that we do require an lsda
3253 to be generated. Return a magic -2 value to record this. */
3254 return -2;
3255
3256 case ERT_CATCH:
3257 case ERT_THROW:
3258 /* CATCH regions are handled in TRY above. THROW regions are
3259 for optimization information only and produce no output. */
3260 return collect_one_action_chain (ar_hash, region->outer);
3261
3262 default:
5b0264cb 3263 gcc_unreachable ();
77d33a84
AM
3264 }
3265}
3266
52a11cbf 3267static int
502b8322 3268add_call_site (rtx landing_pad, int action)
77d33a84 3269{
52a11cbf
RH
3270 struct call_site_record *data = cfun->eh->call_site_data;
3271 int used = cfun->eh->call_site_data_used;
3272 int size = cfun->eh->call_site_data_size;
77d33a84 3273
52a11cbf
RH
3274 if (used >= size)
3275 {
3276 size = (size ? size * 2 : 64);
703ad42b 3277 data = ggc_realloc (data, sizeof (*data) * size);
52a11cbf
RH
3278 cfun->eh->call_site_data = data;
3279 cfun->eh->call_site_data_size = size;
3280 }
77d33a84 3281
52a11cbf
RH
3282 data[used].landing_pad = landing_pad;
3283 data[used].action = action;
77d33a84 3284
52a11cbf 3285 cfun->eh->call_site_data_used = used + 1;
77d33a84 3286
52a11cbf 3287 return used + call_site_base;
77d33a84
AM
3288}
3289
52a11cbf
RH
3290/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3291 The new note numbers will not refer to region numbers, but
3292 instead to call site entries. */
77d33a84 3293
52a11cbf 3294void
502b8322 3295convert_to_eh_region_ranges (void)
77d33a84 3296{
52a11cbf
RH
3297 rtx insn, iter, note;
3298 htab_t ar_hash;
3299 int last_action = -3;
3300 rtx last_action_insn = NULL_RTX;
3301 rtx last_landing_pad = NULL_RTX;
3302 rtx first_no_action_insn = NULL_RTX;
ae0ed63a 3303 int call_site = 0;
77d33a84 3304
52a11cbf
RH
3305 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3306 return;
77d33a84 3307
52a11cbf 3308 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 3309
52a11cbf 3310 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 3311
52a11cbf
RH
3312 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3313 if (INSN_P (iter))
3314 {
3315 struct eh_region *region;
3316 int this_action;
3317 rtx this_landing_pad;
77d33a84 3318
52a11cbf 3319 insn = iter;
4b4bf941 3320 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
3321 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3322 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 3323
52a11cbf
RH
3324 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3325 if (!note)
3326 {
4b4bf941 3327 if (! (CALL_P (insn)
52a11cbf
RH
3328 || (flag_non_call_exceptions
3329 && may_trap_p (PATTERN (insn)))))
3330 continue;
3331 this_action = -1;
3332 region = NULL;
3333 }
3334 else
3335 {
3336 if (INTVAL (XEXP (note, 0)) <= 0)
3337 continue;
3338 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3339 this_action = collect_one_action_chain (ar_hash, region);
3340 }
3341
3342 /* Existence of catch handlers, or must-not-throw regions
3343 implies that an lsda is needed (even if empty). */
3344 if (this_action != -1)
3345 cfun->uses_eh_lsda = 1;
3346
3347 /* Delay creation of region notes for no-action regions
3348 until we're sure that an lsda will be required. */
3349 else if (last_action == -3)
3350 {
3351 first_no_action_insn = iter;
3352 last_action = -1;
3353 }
1ef1bf06 3354
52a11cbf
RH
3355 /* Cleanups and handlers may share action chains but not
3356 landing pads. Collect the landing pad for this region. */
3357 if (this_action >= 0)
3358 {
3359 struct eh_region *o;
3360 for (o = region; ! o->landing_pad ; o = o->outer)
3361 continue;
3362 this_landing_pad = o->landing_pad;
3363 }
3364 else
3365 this_landing_pad = NULL_RTX;
1ef1bf06 3366
52a11cbf
RH
3367 /* Differing actions or landing pads implies a change in call-site
3368 info, which implies some EH_REGION note should be emitted. */
3369 if (last_action != this_action
3370 || last_landing_pad != this_landing_pad)
3371 {
3372 /* If we'd not seen a previous action (-3) or the previous
3373 action was must-not-throw (-2), then we do not need an
3374 end note. */
3375 if (last_action >= -1)
3376 {
3377 /* If we delayed the creation of the begin, do it now. */
3378 if (first_no_action_insn)
3379 {
3380 call_site = add_call_site (NULL_RTX, 0);
3381 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3382 first_no_action_insn);
3383 NOTE_EH_HANDLER (note) = call_site;
3384 first_no_action_insn = NULL_RTX;
3385 }
3386
3387 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3388 last_action_insn);
3389 NOTE_EH_HANDLER (note) = call_site;
3390 }
3391
3392 /* If the new action is must-not-throw, then no region notes
3393 are created. */
3394 if (this_action >= -1)
3395 {
3f2c5d1a 3396 call_site = add_call_site (this_landing_pad,
52a11cbf
RH
3397 this_action < 0 ? 0 : this_action);
3398 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3399 NOTE_EH_HANDLER (note) = call_site;
3400 }
3401
3402 last_action = this_action;
3403 last_landing_pad = this_landing_pad;
3404 }
3405 last_action_insn = iter;
3406 }
1ef1bf06 3407
52a11cbf 3408 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3409 {
52a11cbf
RH
3410 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3411 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3412 }
3413
52a11cbf
RH
3414 htab_delete (ar_hash);
3415}
1ef1bf06 3416
52a11cbf
RH
3417\f
3418static void
502b8322 3419push_uleb128 (varray_type *data_area, unsigned int value)
52a11cbf
RH
3420{
3421 do
3422 {
3423 unsigned char byte = value & 0x7f;
3424 value >>= 7;
3425 if (value)
3426 byte |= 0x80;
3427 VARRAY_PUSH_UCHAR (*data_area, byte);
3428 }
3429 while (value);
3430}
1ef1bf06 3431
52a11cbf 3432static void
502b8322 3433push_sleb128 (varray_type *data_area, int value)
52a11cbf
RH
3434{
3435 unsigned char byte;
3436 int more;
1ef1bf06 3437
52a11cbf 3438 do
1ef1bf06 3439 {
52a11cbf
RH
3440 byte = value & 0x7f;
3441 value >>= 7;
3442 more = ! ((value == 0 && (byte & 0x40) == 0)
3443 || (value == -1 && (byte & 0x40) != 0));
3444 if (more)
3445 byte |= 0x80;
3446 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3447 }
52a11cbf
RH
3448 while (more);
3449}
1ef1bf06 3450
52a11cbf 3451\f
52a11cbf
RH
3452#ifndef HAVE_AS_LEB128
3453static int
502b8322 3454dw2_size_of_call_site_table (void)
1ef1bf06 3455{
52a11cbf
RH
3456 int n = cfun->eh->call_site_data_used;
3457 int size = n * (4 + 4 + 4);
3458 int i;
1ef1bf06 3459
52a11cbf
RH
3460 for (i = 0; i < n; ++i)
3461 {
3462 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3463 size += size_of_uleb128 (cs->action);
3464 }
fac62ecf 3465
52a11cbf
RH
3466 return size;
3467}
3468
3469static int
502b8322 3470sjlj_size_of_call_site_table (void)
52a11cbf
RH
3471{
3472 int n = cfun->eh->call_site_data_used;
3473 int size = 0;
3474 int i;
77d33a84 3475
52a11cbf 3476 for (i = 0; i < n; ++i)
1ef1bf06 3477 {
52a11cbf
RH
3478 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3479 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3480 size += size_of_uleb128 (cs->action);
1ef1bf06 3481 }
52a11cbf
RH
3482
3483 return size;
3484}
3485#endif
3486
3487static void
502b8322 3488dw2_output_call_site_table (void)
52a11cbf 3489{
52a11cbf
RH
3490 int n = cfun->eh->call_site_data_used;
3491 int i;
3492
3493 for (i = 0; i < n; ++i)
1ef1bf06 3494 {
52a11cbf
RH
3495 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3496 char reg_start_lab[32];
3497 char reg_end_lab[32];
3498 char landing_pad_lab[32];
3499
3500 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3501 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3502
3503 if (cs->landing_pad)
3504 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3505 CODE_LABEL_NUMBER (cs->landing_pad));
3506
3507 /* ??? Perhaps use insn length scaling if the assembler supports
3508 generic arithmetic. */
3509 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3510 data4 if the function is small enough. */
3511#ifdef HAVE_AS_LEB128
375d2edc
GK
3512 dw2_asm_output_delta_uleb128 (reg_start_lab,
3513 current_function_func_begin_label,
52a11cbf
RH
3514 "region %d start", i);
3515 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3516 "length");
3517 if (cs->landing_pad)
375d2edc
GK
3518 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3519 current_function_func_begin_label,
52a11cbf
RH
3520 "landing pad");
3521 else
3522 dw2_asm_output_data_uleb128 (0, "landing pad");
3523#else
375d2edc
GK
3524 dw2_asm_output_delta (4, reg_start_lab,
3525 current_function_func_begin_label,
52a11cbf
RH
3526 "region %d start", i);
3527 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3528 if (cs->landing_pad)
375d2edc
GK
3529 dw2_asm_output_delta (4, landing_pad_lab,
3530 current_function_func_begin_label,
52a11cbf
RH
3531 "landing pad");
3532 else
3533 dw2_asm_output_data (4, 0, "landing pad");
3534#endif
3535 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3536 }
3537
52a11cbf
RH
3538 call_site_base += n;
3539}
3540
3541static void
502b8322 3542sjlj_output_call_site_table (void)
52a11cbf
RH
3543{
3544 int n = cfun->eh->call_site_data_used;
3545 int i;
1ef1bf06 3546
52a11cbf 3547 for (i = 0; i < n; ++i)
1ef1bf06 3548 {
52a11cbf 3549 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3550
52a11cbf
RH
3551 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3552 "region %d landing pad", i);
3553 dw2_asm_output_data_uleb128 (cs->action, "action");
3554 }
4da896b2 3555
52a11cbf 3556 call_site_base += n;
1ef1bf06
AM
3557}
3558
96d0f4dc
JJ
3559/* Tell assembler to switch to the section for the exception handling
3560 table. */
3561
3562void
502b8322 3563default_exception_section (void)
96d0f4dc
JJ
3564{
3565 if (targetm.have_named_sections)
3566 {
96d0f4dc 3567 int flags;
96d0f4dc 3568#ifdef HAVE_LD_RO_RW_SECTION_MIXING
fe3f9515
KG
3569 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3570
96d0f4dc
JJ
3571 flags = (! flag_pic
3572 || ((tt_format & 0x70) != DW_EH_PE_absptr
3573 && (tt_format & 0x70) != DW_EH_PE_aligned))
3574 ? 0 : SECTION_WRITE;
3575#else
3576 flags = SECTION_WRITE;
3577#endif
3578 named_section_flags (".gcc_except_table", flags);
3579 }
3580 else if (flag_pic)
3581 data_section ();
3582 else
3583 readonly_data_section ();
3584}
3585
52a11cbf 3586void
502b8322 3587output_function_exception_table (void)
52a11cbf 3588{
2a1ee410 3589 int tt_format, cs_format, lp_format, i, n;
52a11cbf
RH
3590#ifdef HAVE_AS_LEB128
3591 char ttype_label[32];
3592 char cs_after_size_label[32];
3593 char cs_end_label[32];
3594#else
3595 int call_site_len;
3596#endif
3597 int have_tt_data;
ae0ed63a 3598 int tt_format_size = 0;
1ef1bf06 3599
52a11cbf
RH
3600 /* Not all functions need anything. */
3601 if (! cfun->uses_eh_lsda)
3602 return;
fac62ecf 3603
951120ea
PB
3604#ifdef TARGET_UNWIND_INFO
3605 /* TODO: Move this into target file. */
2a1ee410
RH
3606 fputs ("\t.personality\t", asm_out_file);
3607 output_addr_const (asm_out_file, eh_personality_libfunc);
3608 fputs ("\n\t.handlerdata\n", asm_out_file);
3609 /* Note that varasm still thinks we're in the function's code section.
3610 The ".endp" directive that will immediately follow will take us back. */
3611#else
5fd9b178 3612 targetm.asm_out.exception_section ();
2a1ee410 3613#endif
52a11cbf
RH
3614
3615 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3616 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3617
b627d6fe
RH
3618 /* Indicate the format of the @TType entries. */
3619 if (! have_tt_data)
3620 tt_format = DW_EH_PE_omit;
3621 else
3622 {
3623 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3624#ifdef HAVE_AS_LEB128
df696a75
RH
3625 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3626 current_function_funcdef_no);
b627d6fe
RH
3627#endif
3628 tt_format_size = size_of_encoded_value (tt_format);
3629
7a900ebc 3630 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3631 }
52a11cbf 3632
5fd9b178 3633 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
df696a75 3634 current_function_funcdef_no);
52a11cbf
RH
3635
3636 /* The LSDA header. */
3637
3638 /* Indicate the format of the landing pad start pointer. An omitted
3639 field implies @LPStart == @Start. */
3640 /* Currently we always put @LPStart == @Start. This field would
3641 be most useful in moving the landing pads completely out of
3642 line to another section, but it could also be used to minimize
3643 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3644 lp_format = DW_EH_PE_omit;
3645 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3646 eh_data_format_name (lp_format));
52a11cbf
RH
3647
3648 /* @LPStart pointer would go here. */
3649
2a1ee410
RH
3650 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3651 eh_data_format_name (tt_format));
52a11cbf
RH
3652
3653#ifndef HAVE_AS_LEB128
3654 if (USING_SJLJ_EXCEPTIONS)
3655 call_site_len = sjlj_size_of_call_site_table ();
3656 else
3657 call_site_len = dw2_size_of_call_site_table ();
3658#endif
3659
3660 /* A pc-relative 4-byte displacement to the @TType data. */
3661 if (have_tt_data)
3662 {
3663#ifdef HAVE_AS_LEB128
3664 char ttype_after_disp_label[32];
3f2c5d1a 3665 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
df696a75 3666 current_function_funcdef_no);
52a11cbf
RH
3667 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3668 "@TType base offset");
3669 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3670#else
3671 /* Ug. Alignment queers things. */
b627d6fe 3672 unsigned int before_disp, after_disp, last_disp, disp;
52a11cbf 3673
52a11cbf
RH
3674 before_disp = 1 + 1;
3675 after_disp = (1 + size_of_uleb128 (call_site_len)
3676 + call_site_len
3677 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
b627d6fe
RH
3678 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3679 * tt_format_size));
52a11cbf
RH
3680
3681 disp = after_disp;
3682 do
1ef1bf06 3683 {
52a11cbf
RH
3684 unsigned int disp_size, pad;
3685
3686 last_disp = disp;
3687 disp_size = size_of_uleb128 (disp);
3688 pad = before_disp + disp_size + after_disp;
b627d6fe
RH
3689 if (pad % tt_format_size)
3690 pad = tt_format_size - (pad % tt_format_size);
52a11cbf
RH
3691 else
3692 pad = 0;
3693 disp = after_disp + pad;
1ef1bf06 3694 }
52a11cbf
RH
3695 while (disp != last_disp);
3696
3697 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3698#endif
1ef1bf06 3699 }
1ef1bf06 3700
52a11cbf
RH
3701 /* Indicate the format of the call-site offsets. */
3702#ifdef HAVE_AS_LEB128
2a1ee410 3703 cs_format = DW_EH_PE_uleb128;
52a11cbf 3704#else
2a1ee410 3705 cs_format = DW_EH_PE_udata4;
52a11cbf 3706#endif
2a1ee410
RH
3707 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3708 eh_data_format_name (cs_format));
52a11cbf
RH
3709
3710#ifdef HAVE_AS_LEB128
3711 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
df696a75 3712 current_function_funcdef_no);
52a11cbf 3713 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
df696a75 3714 current_function_funcdef_no);
52a11cbf
RH
3715 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3716 "Call-site table length");
3717 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3718 if (USING_SJLJ_EXCEPTIONS)
3719 sjlj_output_call_site_table ();
3720 else
3721 dw2_output_call_site_table ();
3722 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3723#else
3724 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3725 if (USING_SJLJ_EXCEPTIONS)
3726 sjlj_output_call_site_table ();
3727 else
3728 dw2_output_call_site_table ();
3729#endif
3730
3731 /* ??? Decode and interpret the data for flag_debug_asm. */
3732 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3733 for (i = 0; i < n; ++i)
3734 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3735 (i ? NULL : "Action record table"));
1ef1bf06 3736
52a11cbf 3737 if (have_tt_data)
7a900ebc 3738 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3739
52a11cbf
RH
3740 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3741 while (i-- > 0)
1ef1bf06 3742 {
52a11cbf 3743 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
225b9cb9 3744 rtx value;
52a11cbf
RH
3745
3746 if (type == NULL_TREE)
dd07abd7 3747 value = const0_rtx;
52a11cbf 3748 else
dd07abd7
RH
3749 {
3750 struct cgraph_varpool_node *node;
3751
3752 type = lookup_type_for_runtime (type);
3753 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3754
3755 /* Let cgraph know that the rtti decl is used. Not all of the
3756 paths below go through assemble_integer, which would take
3757 care of this for us. */
6de9cd9a 3758 STRIP_NOPS (type);
19ae5445
RK
3759 if (TREE_CODE (type) == ADDR_EXPR)
3760 {
dba601db 3761 type = TREE_OPERAND (type, 0);
093c7153
RH
3762 if (TREE_CODE (type) == VAR_DECL)
3763 {
3764 node = cgraph_varpool_node (type);
3765 if (node)
3766 cgraph_varpool_mark_needed_node (node);
3767 }
19ae5445 3768 }
5b0264cb
NS
3769 else
3770 gcc_assert (TREE_CODE (type) == INTEGER_CST);
dd07abd7 3771 }
52a11cbf 3772
225b9cb9
RH
3773 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3774 assemble_integer (value, tt_format_size,
3775 tt_format_size * BITS_PER_UNIT, 1);
3776 else
0fb7aeda 3777 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
1ef1bf06 3778 }
52a11cbf
RH
3779
3780#ifdef HAVE_AS_LEB128
3781 if (have_tt_data)
3782 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3783#endif
3784
3785 /* ??? Decode and interpret the data for flag_debug_asm. */
3786 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3787 for (i = 0; i < n; ++i)
3788 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3789 (i ? NULL : "Exception specification table"));
3790
3791 function_section (current_function_decl);
1ef1bf06 3792}
e2500fed
GK
3793
3794#include "gt-except.h"