]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
cfgrtl.c (rtl_delete_block): Fix comment.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
3f2c5d1a 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3897f229 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
1322177d 6This file is part of GCC.
4956d07c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
4956d07c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
4956d07c
MS
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
4956d07c
MS
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4977bab6
ZW
52#include "coretypes.h"
53#include "tm.h"
4956d07c
MS
54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
4956d07c 57#include "function.h"
4956d07c 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
4956d07c 60#include "insn-config.h"
52a11cbf
RH
61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
4956d07c 65#include "output.h"
52a11cbf
RH
66#include "dwarf2asm.h"
67#include "dwarf2out.h"
2a1ee410 68#include "dwarf2.h"
10f0ad3d 69#include "toplev.h"
52a11cbf 70#include "hashtab.h"
2b12ffe0 71#include "intl.h"
87ff9c8e 72#include "ggc.h"
b1474bb7 73#include "tm_p.h"
07c9d2eb 74#include "target.h"
f1e639b1 75#include "langhooks.h"
dd07abd7 76#include "cgraph.h"
52a11cbf
RH
77
78/* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
52a11cbf
RH
80#ifndef EH_RETURN_DATA_REGNO
81#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
82#endif
83
27a36778 84
52a11cbf
RH
85/* Protect cleanup actions with must-not-throw regions, with a call
86 to the given failure handler. */
502b8322 87tree (*lang_protect_cleanup_actions) (void);
27a36778 88
52a11cbf 89/* Return true if type A catches type B. */
502b8322 90int (*lang_eh_type_covers) (tree a, tree b);
27a36778 91
52a11cbf 92/* Map a type to a runtime object to match type. */
502b8322 93tree (*lang_eh_runtime_type) (tree);
4956d07c 94
6a58eee9
RH
95/* A hash table of label to region number. */
96
e2500fed 97struct ehl_map_entry GTY(())
6a58eee9
RH
98{
99 rtx label;
100 struct eh_region *region;
101};
102
21c157b4 103static GTY(()) int call_site_base;
e2500fed
GK
104static GTY ((param_is (union tree_node)))
105 htab_t type_to_runtime_map;
52a11cbf
RH
106
107/* Describe the SjLj_Function_Context structure. */
e2500fed 108static GTY(()) tree sjlj_fc_type_node;
52a11cbf
RH
109static int sjlj_fc_call_site_ofs;
110static int sjlj_fc_data_ofs;
111static int sjlj_fc_personality_ofs;
112static int sjlj_fc_lsda_ofs;
113static int sjlj_fc_jbuf_ofs;
114\f
115/* Describes one exception region. */
e2500fed 116struct eh_region GTY(())
52a11cbf
RH
117{
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
956d6950 120
52a11cbf
RH
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
956d6950 124
52a11cbf
RH
125 /* An identifier for this region. */
126 int region_number;
71038426 127
6a58eee9
RH
128 /* When a region is deleted, its parents inherit the REG_EH_REGION
129 numbers already assigned. */
130 bitmap aka;
131
52a11cbf
RH
132 /* Each region does exactly one thing. */
133 enum eh_region_type
6de9cd9a 134 {
572202a7
RK
135 ERT_UNKNOWN = 0,
136 ERT_CLEANUP,
52a11cbf
RH
137 ERT_TRY,
138 ERT_CATCH,
139 ERT_ALLOWED_EXCEPTIONS,
140 ERT_MUST_NOT_THROW,
141 ERT_THROW,
142 ERT_FIXUP
143 } type;
144
eaec9b3d 145 /* Holds the action to perform based on the preceding type. */
e2500fed 146 union eh_region_u {
52a11cbf
RH
147 /* A list of catch blocks, a surrounding try block,
148 and the label for continuing after a catch. */
e2500fed 149 struct eh_region_u_try {
52a11cbf
RH
150 struct eh_region *catch;
151 struct eh_region *last_catch;
152 struct eh_region *prev_try;
153 rtx continue_label;
e2500fed 154 } GTY ((tag ("ERT_TRY"))) try;
52a11cbf 155
6d41a92f
OH
156 /* The list through the catch handlers, the list of type objects
157 matched, and the list of associated filters. */
e2500fed 158 struct eh_region_u_catch {
52a11cbf
RH
159 struct eh_region *next_catch;
160 struct eh_region *prev_catch;
6d41a92f
OH
161 tree type_list;
162 tree filter_list;
e2500fed 163 } GTY ((tag ("ERT_CATCH"))) catch;
52a11cbf
RH
164
165 /* A tree_list of allowed types. */
e2500fed 166 struct eh_region_u_allowed {
52a11cbf
RH
167 tree type_list;
168 int filter;
e2500fed 169 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
52a11cbf 170
3f2c5d1a 171 /* The type given by a call to "throw foo();", or discovered
52a11cbf 172 for a throw. */
e2500fed 173 struct eh_region_u_throw {
52a11cbf 174 tree type;
e2500fed 175 } GTY ((tag ("ERT_THROW"))) throw;
52a11cbf
RH
176
177 /* Retain the cleanup expression even after expansion so that
178 we can match up fixup regions. */
e2500fed 179 struct eh_region_u_cleanup {
52a11cbf 180 tree exp;
bafb714b 181 struct eh_region *prev_try;
e2500fed 182 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
52a11cbf
RH
183
184 /* The real region (by expression and by pointer) that fixup code
185 should live in. */
e2500fed 186 struct eh_region_u_fixup {
52a11cbf
RH
187 tree cleanup_exp;
188 struct eh_region *real_region;
1bddbeb4 189 bool resolved;
e2500fed
GK
190 } GTY ((tag ("ERT_FIXUP"))) fixup;
191 } GTY ((desc ("%0.type"))) u;
52a11cbf 192
47c84870
JM
193 /* Entry point for this region's handler before landing pads are built. */
194 rtx label;
6de9cd9a 195 tree tree_label;
52a11cbf 196
47c84870 197 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
198 rtx landing_pad;
199
47c84870 200 /* Entry point for this region's handler from an inner region. */
52a11cbf 201 rtx post_landing_pad;
47c84870
JM
202
203 /* The RESX insn for handing off control to the next outermost handler,
204 if appropriate. */
205 rtx resume;
b2dd096b
MM
206
207 /* True if something in this region may throw. */
208 unsigned may_contain_throw : 1;
52a11cbf 209};
71038426 210
e2500fed
GK
211struct call_site_record GTY(())
212{
213 rtx landing_pad;
214 int action;
215};
216
52a11cbf 217/* Used to save exception status for each function. */
e2500fed 218struct eh_status GTY(())
52a11cbf
RH
219{
220 /* The tree of all regions for this function. */
221 struct eh_region *region_tree;
e6cfb550 222
52a11cbf 223 /* The same information as an indexable array. */
e2500fed 224 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
e6cfb550 225
52a11cbf
RH
226 /* The most recently open region. */
227 struct eh_region *cur_region;
e6cfb550 228
52a11cbf
RH
229 /* This is the region for which we are processing catch blocks. */
230 struct eh_region *try_region;
71038426 231
52a11cbf
RH
232 rtx filter;
233 rtx exc_ptr;
4956d07c 234
52a11cbf
RH
235 int built_landing_pads;
236 int last_region_number;
e6cfb550 237
52a11cbf
RH
238 varray_type ttype_data;
239 varray_type ehspec_data;
240 varray_type action_record_data;
6814a8a0 241
e2500fed
GK
242 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
243
502b8322 244 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
e2500fed 245 call_site_data;
52a11cbf
RH
246 int call_site_data_used;
247 int call_site_data_size;
248
249 rtx ehr_stackadj;
250 rtx ehr_handler;
251 rtx ehr_label;
252
253 rtx sjlj_fc;
254 rtx sjlj_exit_after;
255};
e6cfb550 256
52a11cbf 257\f
502b8322
AJ
258static int t2r_eq (const void *, const void *);
259static hashval_t t2r_hash (const void *);
260static void add_type_for_runtime (tree);
261static tree lookup_type_for_runtime (tree);
262
502b8322 263static void remove_unreachable_regions (rtx);
502b8322 264
502b8322
AJ
265static int ttypes_filter_eq (const void *, const void *);
266static hashval_t ttypes_filter_hash (const void *);
267static int ehspec_filter_eq (const void *, const void *);
268static hashval_t ehspec_filter_hash (const void *);
269static int add_ttypes_entry (htab_t, tree);
270static int add_ehspec_entry (htab_t, htab_t, tree);
271static void assign_filter_values (void);
272static void build_post_landing_pads (void);
273static void connect_post_landing_pads (void);
274static void dw2_build_landing_pads (void);
52a11cbf
RH
275
276struct sjlj_lp_info;
502b8322
AJ
277static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
278static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
279static void sjlj_mark_call_sites (struct sjlj_lp_info *);
280static void sjlj_emit_function_enter (rtx);
281static void sjlj_emit_function_exit (void);
282static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
283static void sjlj_build_landing_pads (void);
284
285static hashval_t ehl_hash (const void *);
286static int ehl_eq (const void *, const void *);
287static void add_ehl_entry (rtx, struct eh_region *);
288static void remove_exception_handler_label (rtx);
289static void remove_eh_handler (struct eh_region *);
290static int for_each_eh_label_1 (void **, void *);
52a11cbf 291
52a11cbf
RH
292/* The return value of reachable_next_level. */
293enum reachable_code
294{
295 /* The given exception is not processed by the given region. */
296 RNL_NOT_CAUGHT,
297 /* The given exception may need processing by the given region. */
298 RNL_MAYBE_CAUGHT,
299 /* The given exception is completely processed by the given region. */
300 RNL_CAUGHT,
301 /* The given exception is completely processed by the runtime. */
302 RNL_BLOCKED
303};
e6cfb550 304
6de9cd9a 305struct reachable_info;
502b8322
AJ
306static enum reachable_code reachable_next_level (struct eh_region *, tree,
307 struct reachable_info *);
308
309static int action_record_eq (const void *, const void *);
310static hashval_t action_record_hash (const void *);
311static int add_action_record (htab_t, int, int);
312static int collect_one_action_chain (htab_t, struct eh_region *);
313static int add_call_site (rtx, int);
314
315static void push_uleb128 (varray_type *, unsigned int);
316static void push_sleb128 (varray_type *, int);
52a11cbf 317#ifndef HAVE_AS_LEB128
502b8322
AJ
318static int dw2_size_of_call_site_table (void);
319static int sjlj_size_of_call_site_table (void);
52a11cbf 320#endif
502b8322
AJ
321static void dw2_output_call_site_table (void);
322static void sjlj_output_call_site_table (void);
e6cfb550 323
52a11cbf
RH
324\f
325/* Routine to see if exception handling is turned on.
cc2902df 326 DO_WARN is nonzero if we want to inform the user that exception
3f2c5d1a 327 handling is turned off.
4956d07c 328
52a11cbf
RH
329 This is used to ensure that -fexceptions has been specified if the
330 compiler tries to use any exception-specific functions. */
4956d07c 331
52a11cbf 332int
502b8322 333doing_eh (int do_warn)
52a11cbf
RH
334{
335 if (! flag_exceptions)
336 {
337 static int warned = 0;
338 if (! warned && do_warn)
339 {
340 error ("exception handling disabled, use -fexceptions to enable");
341 warned = 1;
342 }
343 return 0;
344 }
345 return 1;
4956d07c
MS
346}
347
52a11cbf
RH
348\f
349void
502b8322 350init_eh (void)
4956d07c 351{
52a11cbf
RH
352 if (! flag_exceptions)
353 return;
4956d07c 354
e2500fed 355 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
4956d07c 356
52a11cbf
RH
357 /* Create the SjLj_Function_Context structure. This should match
358 the definition in unwind-sjlj.c. */
359 if (USING_SJLJ_EXCEPTIONS)
360 {
361 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 362
ae2bcd98 363 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
9a0d1e1b 364
52a11cbf
RH
365 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
366 build_pointer_type (sjlj_fc_type_node));
367 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 368
52a11cbf
RH
369 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
370 integer_type_node);
371 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 372
7d60be94 373 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
ae2bcd98 374 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
b0c48229 375 tmp);
52a11cbf
RH
376 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
377 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 378
52a11cbf
RH
379 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
380 ptr_type_node);
381 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 382
52a11cbf
RH
383 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
384 ptr_type_node);
385 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 386
52a11cbf
RH
387#ifdef DONT_USE_BUILTIN_SETJMP
388#ifdef JMP_BUF_SIZE
7d60be94 389 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
52a11cbf
RH
390#else
391 /* Should be large enough for most systems, if it is not,
392 JMP_BUF_SIZE should be defined with the proper value. It will
393 also tend to be larger than necessary for most systems, a more
394 optimal port will define JMP_BUF_SIZE. */
7d60be94 395 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
52a11cbf
RH
396#endif
397#else
83810fcb 398 /* builtin_setjmp takes a pointer to 5 words. */
7d60be94 399 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
52a11cbf
RH
400#endif
401 tmp = build_index_type (tmp);
402 tmp = build_array_type (ptr_type_node, tmp);
403 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
404#ifdef DONT_USE_BUILTIN_SETJMP
405 /* We don't know what the alignment requirements of the
406 runtime's jmp_buf has. Overestimate. */
407 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
408 DECL_USER_ALIGN (f_jbuf) = 1;
409#endif
410 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
411
412 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
413 TREE_CHAIN (f_prev) = f_cs;
414 TREE_CHAIN (f_cs) = f_data;
415 TREE_CHAIN (f_data) = f_per;
416 TREE_CHAIN (f_per) = f_lsda;
417 TREE_CHAIN (f_lsda) = f_jbuf;
418
419 layout_type (sjlj_fc_type_node);
420
421 /* Cache the interesting field offsets so that we have
422 easy access from rtl. */
423 sjlj_fc_call_site_ofs
424 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
425 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
426 sjlj_fc_data_ofs
427 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
428 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
429 sjlj_fc_personality_ofs
430 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
431 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
432 sjlj_fc_lsda_ofs
433 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
434 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
435 sjlj_fc_jbuf_ofs
436 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
437 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
438 }
4956d07c
MS
439}
440
52a11cbf 441void
502b8322 442init_eh_for_function (void)
4956d07c 443{
703ad42b 444 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
6a58eee9 445}
52a11cbf 446\f
6de9cd9a
DN
447/* Routines to generate the exception tree somewhat directly.
448 These are used from tree-eh.c when processing exception related
449 nodes during tree optimization. */
450
451static struct eh_region *
452gen_eh_region (enum eh_region_type type, struct eh_region *outer)
453{
454 struct eh_region *new;
455
456#ifdef ENABLE_CHECKING
5b0264cb 457 gcc_assert (doing_eh (0));
6de9cd9a
DN
458#endif
459
460 /* Insert a new blank region as a leaf in the tree. */
461 new = ggc_alloc_cleared (sizeof (*new));
462 new->type = type;
463 new->outer = outer;
464 if (outer)
465 {
466 new->next_peer = outer->inner;
467 outer->inner = new;
468 }
469 else
470 {
471 new->next_peer = cfun->eh->region_tree;
472 cfun->eh->region_tree = new;
473 }
474
475 new->region_number = ++cfun->eh->last_region_number;
476
477 return new;
478}
479
480struct eh_region *
481gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
482{
483 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
484 cleanup->u.cleanup.prev_try = prev_try;
485 return cleanup;
486}
487
488struct eh_region *
489gen_eh_region_try (struct eh_region *outer)
490{
491 return gen_eh_region (ERT_TRY, outer);
492}
493
494struct eh_region *
495gen_eh_region_catch (struct eh_region *t, tree type_or_list)
496{
497 struct eh_region *c, *l;
498 tree type_list, type_node;
499
500 /* Ensure to always end up with a type list to normalize further
501 processing, then register each type against the runtime types map. */
502 type_list = type_or_list;
503 if (type_or_list)
504 {
505 if (TREE_CODE (type_or_list) != TREE_LIST)
506 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
507
508 type_node = type_list;
509 for (; type_node; type_node = TREE_CHAIN (type_node))
510 add_type_for_runtime (TREE_VALUE (type_node));
511 }
512
513 c = gen_eh_region (ERT_CATCH, t->outer);
514 c->u.catch.type_list = type_list;
515 l = t->u.try.last_catch;
516 c->u.catch.prev_catch = l;
517 if (l)
518 l->u.catch.next_catch = c;
519 else
520 t->u.try.catch = c;
521 t->u.try.last_catch = c;
522
523 return c;
524}
525
526struct eh_region *
527gen_eh_region_allowed (struct eh_region *outer, tree allowed)
528{
529 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
530 region->u.allowed.type_list = allowed;
531
532 for (; allowed ; allowed = TREE_CHAIN (allowed))
533 add_type_for_runtime (TREE_VALUE (allowed));
534
535 return region;
536}
537
538struct eh_region *
539gen_eh_region_must_not_throw (struct eh_region *outer)
540{
541 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
542}
543
544int
545get_eh_region_number (struct eh_region *region)
546{
547 return region->region_number;
548}
549
550bool
551get_eh_region_may_contain_throw (struct eh_region *region)
552{
553 return region->may_contain_throw;
554}
555
556tree
557get_eh_region_tree_label (struct eh_region *region)
558{
559 return region->tree_label;
560}
561
562void
563set_eh_region_tree_label (struct eh_region *region, tree lab)
564{
565 region->tree_label = lab;
566}
567\f
6de9cd9a
DN
568void
569expand_resx_expr (tree exp)
570{
571 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
572 struct eh_region *reg = cfun->eh->region_array[region_nr];
573
574 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
575 emit_barrier ();
576}
577
b2dd096b
MM
578/* Note that the current EH region (if any) may contain a throw, or a
579 call to a function which itself may contain a throw. */
580
581void
6de9cd9a 582note_eh_region_may_contain_throw (struct eh_region *region)
b2dd096b 583{
b2dd096b
MM
584 while (region && !region->may_contain_throw)
585 {
586 region->may_contain_throw = 1;
587 region = region->outer;
588 }
589}
590
6de9cd9a
DN
591void
592note_current_region_may_contain_throw (void)
593{
594 note_eh_region_may_contain_throw (cfun->eh->cur_region);
595}
596
597
47c84870 598/* Return an rtl expression for a pointer to the exception object
52a11cbf 599 within a handler. */
4956d07c
MS
600
601rtx
502b8322 602get_exception_pointer (struct function *fun)
4956d07c 603{
86c99549
RH
604 rtx exc_ptr = fun->eh->exc_ptr;
605 if (fun == cfun && ! exc_ptr)
52a11cbf 606 {
26b10ae0 607 exc_ptr = gen_reg_rtx (ptr_mode);
86c99549 608 fun->eh->exc_ptr = exc_ptr;
52a11cbf
RH
609 }
610 return exc_ptr;
611}
4956d07c 612
47c84870
JM
613/* Return an rtl expression for the exception dispatch filter
614 within a handler. */
615
6de9cd9a 616rtx
502b8322 617get_exception_filter (struct function *fun)
47c84870 618{
86c99549
RH
619 rtx filter = fun->eh->filter;
620 if (fun == cfun && ! filter)
47c84870 621 {
93f90be6 622 filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
86c99549 623 fun->eh->filter = filter;
47c84870
JM
624 }
625 return filter;
626}
52a11cbf
RH
627\f
628/* This section is for the exception handling specific optimization pass. */
154bba13 629
ac45df5d 630/* Random access the exception region tree. */
154bba13 631
6de9cd9a 632void
502b8322 633collect_eh_region_array (void)
154bba13 634{
52a11cbf 635 struct eh_region **array, *i;
154bba13 636
52a11cbf
RH
637 i = cfun->eh->region_tree;
638 if (! i)
639 return;
154bba13 640
e2500fed
GK
641 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
642 * sizeof (*array));
52a11cbf 643 cfun->eh->region_array = array;
154bba13 644
52a11cbf
RH
645 while (1)
646 {
647 array[i->region_number] = i;
648
649 /* If there are sub-regions, process them. */
650 if (i->inner)
651 i = i->inner;
652 /* If there are peers, process them. */
653 else if (i->next_peer)
654 i = i->next_peer;
655 /* Otherwise, step back up the tree to the next peer. */
656 else
657 {
658 do {
659 i = i->outer;
660 if (i == NULL)
661 return;
662 } while (i->next_peer == NULL);
663 i = i->next_peer;
664 }
665 }
27a36778
MS
666}
667
655dd289
JJ
668/* Remove all regions whose labels are not reachable from insns. */
669
670static void
502b8322 671remove_unreachable_regions (rtx insns)
655dd289
JJ
672{
673 int i, *uid_region_num;
674 bool *reachable;
675 struct eh_region *r;
676 rtx insn;
677
678 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
679 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
680
681 for (i = cfun->eh->last_region_number; i > 0; --i)
682 {
683 r = cfun->eh->region_array[i];
684 if (!r || r->region_number != i)
685 continue;
686
687 if (r->resume)
0fb7aeda 688 {
5b0264cb 689 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
655dd289 690 uid_region_num[INSN_UID (r->resume)] = i;
0fb7aeda 691 }
655dd289 692 if (r->label)
0fb7aeda 693 {
5b0264cb 694 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
655dd289 695 uid_region_num[INSN_UID (r->label)] = i;
0fb7aeda 696 }
655dd289
JJ
697 }
698
699 for (insn = insns; insn; insn = NEXT_INSN (insn))
6ce2bcb7 700 reachable[uid_region_num[INSN_UID (insn)]] = true;
655dd289
JJ
701
702 for (i = cfun->eh->last_region_number; i > 0; --i)
703 {
704 r = cfun->eh->region_array[i];
705 if (r && r->region_number == i && !reachable[i])
706 {
6de9cd9a
DN
707 bool kill_it = true;
708 switch (r->type)
709 {
710 case ERT_THROW:
711 /* Don't remove ERT_THROW regions if their outer region
712 is reachable. */
713 if (r->outer && reachable[r->outer->region_number])
714 kill_it = false;
715 break;
716
717 case ERT_MUST_NOT_THROW:
718 /* MUST_NOT_THROW regions are implementable solely in the
1ea7e6ad 719 runtime, but their existence continues to affect calls
6de9cd9a
DN
720 within that region. Never delete them here. */
721 kill_it = false;
722 break;
723
724 case ERT_TRY:
725 {
726 /* TRY regions are reachable if any of its CATCH regions
727 are reachable. */
728 struct eh_region *c;
729 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
730 if (reachable[c->region_number])
731 {
732 kill_it = false;
733 break;
734 }
735 break;
736 }
655dd289 737
6de9cd9a
DN
738 default:
739 break;
740 }
741
742 if (kill_it)
743 remove_eh_handler (r);
655dd289
JJ
744 }
745 }
746
747 free (reachable);
748 free (uid_region_num);
749}
750
f39e46ba 751/* Set up EH labels for RTL. */
6de9cd9a 752
52a11cbf 753void
502b8322 754convert_from_eh_region_ranges (void)
52a11cbf 755{
6de9cd9a 756 rtx insns = get_insns ();
f39e46ba 757 int i, n = cfun->eh->last_region_number;
6de9cd9a 758
f39e46ba
SB
759 /* Most of the work is already done at the tree level. All we need to
760 do is collect the rtl labels that correspond to the tree labels that
761 collect the rtl labels that correspond to the tree labels
762 we allocated earlier. */
763 for (i = 1; i <= n; ++i)
6de9cd9a 764 {
f39e46ba
SB
765 struct eh_region *region = cfun->eh->region_array[i];
766 if (region && region->tree_label)
767 region->label = DECL_RTL_IF_SET (region->tree_label);
6de9cd9a 768 }
27a36778 769
655dd289 770 remove_unreachable_regions (insns);
27a36778
MS
771}
772
6a58eee9 773static void
502b8322 774add_ehl_entry (rtx label, struct eh_region *region)
6a58eee9
RH
775{
776 struct ehl_map_entry **slot, *entry;
777
778 LABEL_PRESERVE_P (label) = 1;
779
703ad42b 780 entry = ggc_alloc (sizeof (*entry));
6a58eee9
RH
781 entry->label = label;
782 entry->region = region;
783
784 slot = (struct ehl_map_entry **)
e2500fed 785 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
6f3d0447
RH
786
787 /* Before landing pad creation, each exception handler has its own
788 label. After landing pad creation, the exception handlers may
789 share landing pads. This is ok, since maybe_remove_eh_handler
790 only requires the 1-1 mapping before landing pad creation. */
5b0264cb 791 gcc_assert (!*slot || cfun->eh->built_landing_pads);
6f3d0447 792
6a58eee9
RH
793 *slot = entry;
794}
795
52a11cbf 796void
502b8322 797find_exception_handler_labels (void)
27a36778 798{
52a11cbf 799 int i;
27a36778 800
e2500fed
GK
801 if (cfun->eh->exception_handler_label_map)
802 htab_empty (cfun->eh->exception_handler_label_map);
6a58eee9
RH
803 else
804 {
805 /* ??? The expansion factor here (3/2) must be greater than the htab
806 occupancy factor (4/3) to avoid unnecessary resizing. */
e2500fed
GK
807 cfun->eh->exception_handler_label_map
808 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
809 ehl_hash, ehl_eq, NULL);
6a58eee9 810 }
27a36778 811
52a11cbf
RH
812 if (cfun->eh->region_tree == NULL)
813 return;
27a36778 814
52a11cbf
RH
815 for (i = cfun->eh->last_region_number; i > 0; --i)
816 {
817 struct eh_region *region = cfun->eh->region_array[i];
818 rtx lab;
27a36778 819
655dd289 820 if (! region || region->region_number != i)
52a11cbf
RH
821 continue;
822 if (cfun->eh->built_landing_pads)
823 lab = region->landing_pad;
824 else
825 lab = region->label;
27a36778 826
52a11cbf 827 if (lab)
6a58eee9 828 add_ehl_entry (lab, region);
27a36778
MS
829 }
830
52a11cbf
RH
831 /* For sjlj exceptions, need the return label to remain live until
832 after landing pad generation. */
833 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
6a58eee9 834 add_ehl_entry (return_label, NULL);
27a36778
MS
835}
836
93f82d60 837bool
502b8322 838current_function_has_exception_handlers (void)
93f82d60
RH
839{
840 int i;
841
842 for (i = cfun->eh->last_region_number; i > 0; --i)
843 {
844 struct eh_region *region = cfun->eh->region_array[i];
845
846 if (! region || region->region_number != i)
847 continue;
848 if (region->type != ERT_THROW)
849 return true;
850 }
851
852 return false;
853}
52a11cbf 854\f
52a11cbf 855static int
502b8322 856t2r_eq (const void *pentry, const void *pdata)
9762d48d 857{
52a11cbf
RH
858 tree entry = (tree) pentry;
859 tree data = (tree) pdata;
9762d48d 860
52a11cbf 861 return TREE_PURPOSE (entry) == data;
9762d48d
JM
862}
863
52a11cbf 864static hashval_t
502b8322 865t2r_hash (const void *pentry)
52a11cbf
RH
866{
867 tree entry = (tree) pentry;
fd917e0d 868 return TREE_HASH (TREE_PURPOSE (entry));
52a11cbf 869}
9762d48d 870
52a11cbf 871static void
502b8322 872add_type_for_runtime (tree type)
52a11cbf
RH
873{
874 tree *slot;
9762d48d 875
52a11cbf 876 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
fd917e0d 877 TREE_HASH (type), INSERT);
52a11cbf
RH
878 if (*slot == NULL)
879 {
880 tree runtime = (*lang_eh_runtime_type) (type);
881 *slot = tree_cons (type, runtime, NULL_TREE);
882 }
883}
3f2c5d1a 884
52a11cbf 885static tree
502b8322 886lookup_type_for_runtime (tree type)
52a11cbf
RH
887{
888 tree *slot;
b37f006b 889
52a11cbf 890 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
fd917e0d 891 TREE_HASH (type), NO_INSERT);
b37f006b 892
a1f300c0 893 /* We should have always inserted the data earlier. */
52a11cbf
RH
894 return TREE_VALUE (*slot);
895}
9762d48d 896
52a11cbf
RH
897\f
898/* Represent an entry in @TTypes for either catch actions
899 or exception filter actions. */
e2500fed 900struct ttypes_filter GTY(())
52a11cbf
RH
901{
902 tree t;
903 int filter;
904};
b37f006b 905
52a11cbf
RH
906/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
907 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 908
52a11cbf 909static int
502b8322 910ttypes_filter_eq (const void *pentry, const void *pdata)
52a11cbf
RH
911{
912 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
913 tree data = (tree) pdata;
b37f006b 914
52a11cbf 915 return entry->t == data;
9762d48d
JM
916}
917
52a11cbf 918static hashval_t
502b8322 919ttypes_filter_hash (const void *pentry)
52a11cbf
RH
920{
921 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
fd917e0d 922 return TREE_HASH (entry->t);
52a11cbf 923}
4956d07c 924
52a11cbf
RH
925/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
926 exception specification list we are thinking about adding. */
927/* ??? Currently we use the type lists in the order given. Someone
928 should put these in some canonical order. */
929
930static int
502b8322 931ehspec_filter_eq (const void *pentry, const void *pdata)
4956d07c 932{
52a11cbf
RH
933 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
934 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
935
936 return type_list_equal (entry->t, data->t);
4956d07c
MS
937}
938
52a11cbf 939/* Hash function for exception specification lists. */
4956d07c 940
52a11cbf 941static hashval_t
502b8322 942ehspec_filter_hash (const void *pentry)
4956d07c 943{
52a11cbf
RH
944 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
945 hashval_t h = 0;
946 tree list;
947
948 for (list = entry->t; list ; list = TREE_CHAIN (list))
fd917e0d 949 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
52a11cbf 950 return h;
4956d07c
MS
951}
952
fd917e0d
JM
953/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
954 to speed up the search. Return the filter value to be used. */
4956d07c 955
52a11cbf 956static int
502b8322 957add_ttypes_entry (htab_t ttypes_hash, tree type)
4956d07c 958{
52a11cbf 959 struct ttypes_filter **slot, *n;
4956d07c 960
52a11cbf 961 slot = (struct ttypes_filter **)
fd917e0d 962 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
52a11cbf
RH
963
964 if ((n = *slot) == NULL)
4956d07c 965 {
52a11cbf 966 /* Filter value is a 1 based table index. */
12670d88 967
703ad42b 968 n = xmalloc (sizeof (*n));
52a11cbf
RH
969 n->t = type;
970 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
971 *slot = n;
972
973 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 974 }
52a11cbf
RH
975
976 return n->filter;
4956d07c
MS
977}
978
52a11cbf
RH
979/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
980 to speed up the search. Return the filter value to be used. */
981
982static int
502b8322 983add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
12670d88 984{
52a11cbf
RH
985 struct ttypes_filter **slot, *n;
986 struct ttypes_filter dummy;
12670d88 987
52a11cbf
RH
988 dummy.t = list;
989 slot = (struct ttypes_filter **)
990 htab_find_slot (ehspec_hash, &dummy, INSERT);
991
992 if ((n = *slot) == NULL)
993 {
994 /* Filter value is a -1 based byte index into a uleb128 buffer. */
995
703ad42b 996 n = xmalloc (sizeof (*n));
52a11cbf
RH
997 n->t = list;
998 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
999 *slot = n;
1000
1001 /* Look up each type in the list and encode its filter
1002 value as a uleb128. Terminate the list with 0. */
1003 for (; list ; list = TREE_CHAIN (list))
3f2c5d1a 1004 push_uleb128 (&cfun->eh->ehspec_data,
52a11cbf
RH
1005 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1006 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1007 }
1008
1009 return n->filter;
12670d88
RK
1010}
1011
52a11cbf
RH
1012/* Generate the action filter values to be used for CATCH and
1013 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1014 we use lots of landing pads, and so every type or list can share
1015 the same filter value, which saves table space. */
1016
1017static void
502b8322 1018assign_filter_values (void)
9a0d1e1b 1019{
52a11cbf
RH
1020 int i;
1021 htab_t ttypes, ehspec;
9a9deafc 1022
52a11cbf
RH
1023 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1024 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1025
52a11cbf
RH
1026 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1027 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1028
52a11cbf
RH
1029 for (i = cfun->eh->last_region_number; i > 0; --i)
1030 {
1031 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1032
52a11cbf
RH
1033 /* Mind we don't process a region more than once. */
1034 if (!r || r->region_number != i)
1035 continue;
9a0d1e1b 1036
52a11cbf
RH
1037 switch (r->type)
1038 {
1039 case ERT_CATCH:
6d41a92f
OH
1040 /* Whatever type_list is (NULL or true list), we build a list
1041 of filters for the region. */
1042 r->u.catch.filter_list = NULL_TREE;
1043
1044 if (r->u.catch.type_list != NULL)
1045 {
1046 /* Get a filter value for each of the types caught and store
1047 them in the region's dedicated list. */
1048 tree tp_node = r->u.catch.type_list;
1049
1050 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1051 {
1052 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
7d60be94 1053 tree flt_node = build_int_cst (NULL_TREE, flt);
3f2c5d1a
RS
1054
1055 r->u.catch.filter_list
6d41a92f
OH
1056 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1057 }
1058 }
1059 else
1060 {
1061 /* Get a filter value for the NULL list also since it will need
1062 an action record anyway. */
1063 int flt = add_ttypes_entry (ttypes, NULL);
7d60be94 1064 tree flt_node = build_int_cst (NULL_TREE, flt);
3f2c5d1a
RS
1065
1066 r->u.catch.filter_list
6d41a92f
OH
1067 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1068 }
3f2c5d1a 1069
52a11cbf 1070 break;
bf71cd2e 1071
52a11cbf
RH
1072 case ERT_ALLOWED_EXCEPTIONS:
1073 r->u.allowed.filter
1074 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1075 break;
bf71cd2e 1076
52a11cbf
RH
1077 default:
1078 break;
1079 }
1080 }
1081
1082 htab_delete (ttypes);
1083 htab_delete (ehspec);
1084}
1085
12c3874e
JH
1086/* Emit SEQ into basic block just before INSN (that is assumed to be
1087 first instruction of some existing BB and return the newly
1088 produced block. */
1089static basic_block
1090emit_to_new_bb_before (rtx seq, rtx insn)
1091{
1092 rtx last;
1093 basic_block bb;
a61bf177 1094 edge e;
628f6a4e 1095 edge_iterator ei;
a61bf177 1096
1f838355 1097 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
a61bf177
JH
1098 call), we don't want it to go into newly created landing pad or other EH
1099 construct. */
628f6a4e 1100 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
a61bf177
JH
1101 if (e->flags & EDGE_FALLTHRU)
1102 force_nonfallthru (e);
628f6a4e
BE
1103 else
1104 ei_next (&ei);
12c3874e 1105 last = emit_insn_before (seq, insn);
4b4bf941 1106 if (BARRIER_P (last))
12c3874e
JH
1107 last = PREV_INSN (last);
1108 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1109 update_bb_for_insn (bb);
1110 bb->flags |= BB_SUPERBLOCK;
1111 return bb;
1112}
1113
ac850948
JM
1114/* Generate the code to actually handle exceptions, which will follow the
1115 landing pads. */
1116
52a11cbf 1117static void
502b8322 1118build_post_landing_pads (void)
52a11cbf
RH
1119{
1120 int i;
bf71cd2e 1121
52a11cbf 1122 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1123 {
52a11cbf
RH
1124 struct eh_region *region = cfun->eh->region_array[i];
1125 rtx seq;
bf71cd2e 1126
52a11cbf
RH
1127 /* Mind we don't process a region more than once. */
1128 if (!region || region->region_number != i)
1129 continue;
1130
1131 switch (region->type)
987009bf 1132 {
52a11cbf
RH
1133 case ERT_TRY:
1134 /* ??? Collect the set of all non-overlapping catch handlers
1135 all the way up the chain until blocked by a cleanup. */
1136 /* ??? Outer try regions can share landing pads with inner
1137 try regions if the types are completely non-overlapping,
a1f300c0 1138 and there are no intervening cleanups. */
bf71cd2e 1139
52a11cbf 1140 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1141
52a11cbf 1142 start_sequence ();
bf71cd2e 1143
52a11cbf 1144 emit_label (region->post_landing_pad);
bf71cd2e 1145
52a11cbf
RH
1146 /* ??? It is mighty inconvenient to call back into the
1147 switch statement generation code in expand_end_case.
1148 Rapid prototyping sez a sequence of ifs. */
1149 {
1150 struct eh_region *c;
1151 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1152 {
6d41a92f 1153 if (c->u.catch.type_list == NULL)
a944ceb9 1154 emit_jump (c->label);
52a11cbf 1155 else
6d41a92f
OH
1156 {
1157 /* Need for one cmp/jump per type caught. Each type
1158 list entry has a matching entry in the filter list
1159 (see assign_filter_values). */
1160 tree tp_node = c->u.catch.type_list;
1161 tree flt_node = c->u.catch.filter_list;
1162
1163 for (; tp_node; )
1164 {
1165 emit_cmp_and_jump_insns
1166 (cfun->eh->filter,
1167 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
93f90be6
FJ
1168 EQ, NULL_RTX,
1169 targetm.eh_return_filter_mode (), 0, c->label);
6d41a92f
OH
1170
1171 tp_node = TREE_CHAIN (tp_node);
1172 flt_node = TREE_CHAIN (flt_node);
1173 }
1174 }
52a11cbf
RH
1175 }
1176 }
bf71cd2e 1177
47c84870
JM
1178 /* We delay the generation of the _Unwind_Resume until we generate
1179 landing pads. We emit a marker here so as to get good control
1180 flow data in the meantime. */
1181 region->resume
1182 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1183 emit_barrier ();
1184
52a11cbf
RH
1185 seq = get_insns ();
1186 end_sequence ();
e6cfb550 1187
12c3874e
JH
1188 emit_to_new_bb_before (seq, region->u.try.catch->label);
1189
52a11cbf 1190 break;
bf71cd2e 1191
52a11cbf
RH
1192 case ERT_ALLOWED_EXCEPTIONS:
1193 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1194
52a11cbf 1195 start_sequence ();
f54a7f6f 1196
52a11cbf 1197 emit_label (region->post_landing_pad);
f54a7f6f 1198
52a11cbf
RH
1199 emit_cmp_and_jump_insns (cfun->eh->filter,
1200 GEN_INT (region->u.allowed.filter),
93f90be6
FJ
1201 EQ, NULL_RTX,
1202 targetm.eh_return_filter_mode (), 0, region->label);
f54a7f6f 1203
47c84870
JM
1204 /* We delay the generation of the _Unwind_Resume until we generate
1205 landing pads. We emit a marker here so as to get good control
1206 flow data in the meantime. */
1207 region->resume
1208 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1209 emit_barrier ();
1210
52a11cbf
RH
1211 seq = get_insns ();
1212 end_sequence ();
1213
12c3874e 1214 emit_to_new_bb_before (seq, region->label);
52a11cbf 1215 break;
f54a7f6f 1216
52a11cbf 1217 case ERT_CLEANUP:
125ca8fd 1218 case ERT_MUST_NOT_THROW:
a944ceb9 1219 region->post_landing_pad = region->label;
125ca8fd
RH
1220 break;
1221
52a11cbf
RH
1222 case ERT_CATCH:
1223 case ERT_THROW:
1224 /* Nothing to do. */
1225 break;
1226
1227 default:
5b0264cb 1228 gcc_unreachable ();
52a11cbf
RH
1229 }
1230 }
1231}
1e4ceb6f 1232
47c84870
JM
1233/* Replace RESX patterns with jumps to the next handler if any, or calls to
1234 _Unwind_Resume otherwise. */
1235
1e4ceb6f 1236static void
502b8322 1237connect_post_landing_pads (void)
1e4ceb6f 1238{
52a11cbf 1239 int i;
76fc91c7 1240
52a11cbf
RH
1241 for (i = cfun->eh->last_region_number; i > 0; --i)
1242 {
1243 struct eh_region *region = cfun->eh->region_array[i];
1244 struct eh_region *outer;
47c84870 1245 rtx seq;
12c3874e 1246 rtx barrier;
1e4ceb6f 1247
52a11cbf
RH
1248 /* Mind we don't process a region more than once. */
1249 if (!region || region->region_number != i)
1250 continue;
1e4ceb6f 1251
47c84870
JM
1252 /* If there is no RESX, or it has been deleted by flow, there's
1253 nothing to fix up. */
1254 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 1255 continue;
76fc91c7 1256
52a11cbf
RH
1257 /* Search for another landing pad in this function. */
1258 for (outer = region->outer; outer ; outer = outer->outer)
1259 if (outer->post_landing_pad)
1260 break;
1e4ceb6f 1261
52a11cbf 1262 start_sequence ();
12670d88 1263
52a11cbf 1264 if (outer)
12c3874e
JH
1265 {
1266 edge e;
1267 basic_block src, dest;
1268
1269 emit_jump (outer->post_landing_pad);
1270 src = BLOCK_FOR_INSN (region->resume);
1271 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
628f6a4e
BE
1272 while (EDGE_COUNT (src->succs) > 0)
1273 remove_edge (EDGE_SUCC (src, 0));
12c3874e
JH
1274 e = make_edge (src, dest, 0);
1275 e->probability = REG_BR_PROB_BASE;
1276 e->count = src->count;
1277 }
52a11cbf 1278 else
29c246a7
HPN
1279 {
1280 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1281 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1282
1283 /* What we just emitted was a throwing libcall, so it got a
1284 barrier automatically added after it. If the last insn in
1285 the libcall sequence isn't the barrier, it's because the
1286 target emits multiple insns for a call, and there are insns
1287 after the actual call insn (which are redundant and would be
1288 optimized away). The barrier is inserted exactly after the
1289 call insn, so let's go get that and delete the insns after
1290 it, because below we need the barrier to be the last insn in
1291 the sequence. */
1292 delete_insns_since (NEXT_INSN (last_call_insn ()));
1293 }
4956d07c 1294
52a11cbf
RH
1295 seq = get_insns ();
1296 end_sequence ();
12c3874e
JH
1297 barrier = emit_insn_before (seq, region->resume);
1298 /* Avoid duplicate barrier. */
5b0264cb 1299 gcc_assert (BARRIER_P (barrier));
12c3874e 1300 delete_insn (barrier);
53c17031 1301 delete_insn (region->resume);
6de9cd9a
DN
1302
1303 /* ??? From tree-ssa we can wind up with catch regions whose
1304 label is not instantiated, but whose resx is present. Now
1305 that we've dealt with the resx, kill the region. */
1306 if (region->label == NULL && region->type == ERT_CLEANUP)
1307 remove_eh_handler (region);
52a11cbf
RH
1308 }
1309}
1310
1311\f
1312static void
502b8322 1313dw2_build_landing_pads (void)
4956d07c 1314{
ae0ed63a
JM
1315 int i;
1316 unsigned int j;
4956d07c 1317
52a11cbf
RH
1318 for (i = cfun->eh->last_region_number; i > 0; --i)
1319 {
1320 struct eh_region *region = cfun->eh->region_array[i];
1321 rtx seq;
12c3874e 1322 basic_block bb;
5c701bb1 1323 bool clobbers_hard_regs = false;
12c3874e 1324 edge e;
4956d07c 1325
52a11cbf
RH
1326 /* Mind we don't process a region more than once. */
1327 if (!region || region->region_number != i)
1328 continue;
1418bb67 1329
52a11cbf
RH
1330 if (region->type != ERT_CLEANUP
1331 && region->type != ERT_TRY
1332 && region->type != ERT_ALLOWED_EXCEPTIONS)
1333 continue;
12670d88 1334
52a11cbf 1335 start_sequence ();
4956d07c 1336
52a11cbf
RH
1337 region->landing_pad = gen_label_rtx ();
1338 emit_label (region->landing_pad);
4956d07c 1339
52a11cbf
RH
1340#ifdef HAVE_exception_receiver
1341 if (HAVE_exception_receiver)
1342 emit_insn (gen_exception_receiver ());
1343 else
1344#endif
1345#ifdef HAVE_nonlocal_goto_receiver
1346 if (HAVE_nonlocal_goto_receiver)
1347 emit_insn (gen_nonlocal_goto_receiver ());
1348 else
1349#endif
1350 { /* Nothing */ }
4956d07c 1351
52a11cbf
RH
1352 /* If the eh_return data registers are call-saved, then we
1353 won't have considered them clobbered from the call that
1354 threw. Kill them now. */
1355 for (j = 0; ; ++j)
1356 {
1357 unsigned r = EH_RETURN_DATA_REGNO (j);
1358 if (r == INVALID_REGNUM)
1359 break;
1360 if (! call_used_regs[r])
5c701bb1
JS
1361 {
1362 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1363 clobbers_hard_regs = true;
1364 }
1365 }
1366
1367 if (clobbers_hard_regs)
1368 {
1369 /* @@@ This is a kludge. Not all machine descriptions define a
1370 blockage insn, but we must not allow the code we just generated
1371 to be reordered by scheduling. So emit an ASM_INPUT to act as
2ba84f36 1372 blockage insn. */
5c701bb1 1373 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
52a11cbf 1374 }
e701eb4d 1375
52a11cbf 1376 emit_move_insn (cfun->eh->exc_ptr,
26b10ae0 1377 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
52a11cbf 1378 emit_move_insn (cfun->eh->filter,
93f90be6
FJ
1379 gen_rtx_REG (targetm.eh_return_filter_mode (),
1380 EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 1381
52a11cbf
RH
1382 seq = get_insns ();
1383 end_sequence ();
5816cb14 1384
12c3874e
JH
1385 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1386 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1387 e->count = bb->count;
1388 e->probability = REG_BR_PROB_BASE;
52a11cbf 1389 }
4956d07c
MS
1390}
1391
52a11cbf
RH
1392\f
1393struct sjlj_lp_info
1394{
1395 int directly_reachable;
1396 int action_index;
1397 int dispatch_index;
1398 int call_site_index;
1399};
4956d07c 1400
52a11cbf 1401static bool
502b8322 1402sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
4956d07c 1403{
52a11cbf
RH
1404 rtx insn;
1405 bool found_one = false;
4956d07c 1406
52a11cbf
RH
1407 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1408 {
1409 struct eh_region *region;
98ce21b3 1410 enum reachable_code rc;
52a11cbf
RH
1411 tree type_thrown;
1412 rtx note;
4956d07c 1413
52a11cbf
RH
1414 if (! INSN_P (insn))
1415 continue;
0d3453df 1416
52a11cbf
RH
1417 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1418 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1419 continue;
5dfa7520 1420
52a11cbf 1421 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 1422
52a11cbf
RH
1423 type_thrown = NULL_TREE;
1424 if (region->type == ERT_THROW)
1425 {
1426 type_thrown = region->u.throw.type;
1427 region = region->outer;
1428 }
12670d88 1429
52a11cbf
RH
1430 /* Find the first containing region that might handle the exception.
1431 That's the landing pad to which we will transfer control. */
98ce21b3 1432 rc = RNL_NOT_CAUGHT;
52a11cbf 1433 for (; region; region = region->outer)
98ce21b3 1434 {
6de9cd9a 1435 rc = reachable_next_level (region, type_thrown, NULL);
98ce21b3
RH
1436 if (rc != RNL_NOT_CAUGHT)
1437 break;
1438 }
1439 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
52a11cbf
RH
1440 {
1441 lp_info[region->region_number].directly_reachable = 1;
1442 found_one = true;
1443 }
1444 }
4956d07c 1445
52a11cbf
RH
1446 return found_one;
1447}
e701eb4d
JM
1448
1449static void
502b8322 1450sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
e701eb4d 1451{
52a11cbf
RH
1452 htab_t ar_hash;
1453 int i, index;
1454
1455 /* First task: build the action table. */
1456
1457 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1458 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1459
1460 for (i = cfun->eh->last_region_number; i > 0; --i)
1461 if (lp_info[i].directly_reachable)
e6cfb550 1462 {
52a11cbf
RH
1463 struct eh_region *r = cfun->eh->region_array[i];
1464 r->landing_pad = dispatch_label;
1465 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1466 if (lp_info[i].action_index != -1)
1467 cfun->uses_eh_lsda = 1;
e6cfb550 1468 }
e701eb4d 1469
52a11cbf 1470 htab_delete (ar_hash);
76fc91c7 1471
52a11cbf
RH
1472 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1473 landing pad label for the region. For sjlj though, there is one
1474 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 1475
52a11cbf
RH
1476 A region receives a dispatch index if it is directly reachable
1477 and requires in-function processing. Regions that share post-landing
eaec9b3d 1478 pads may share dispatch indices. */
52a11cbf
RH
1479 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1480 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 1481
52a11cbf
RH
1482 index = 0;
1483 for (i = cfun->eh->last_region_number; i > 0; --i)
98ce21b3 1484 if (lp_info[i].directly_reachable)
52a11cbf 1485 lp_info[i].dispatch_index = index++;
76fc91c7 1486
52a11cbf
RH
1487 /* Finally: assign call-site values. If dwarf2 terms, this would be
1488 the region number assigned by convert_to_eh_region_ranges, but
1489 handles no-action and must-not-throw differently. */
76fc91c7 1490
52a11cbf
RH
1491 call_site_base = 1;
1492 for (i = cfun->eh->last_region_number; i > 0; --i)
1493 if (lp_info[i].directly_reachable)
1494 {
1495 int action = lp_info[i].action_index;
1496
1497 /* Map must-not-throw to otherwise unused call-site index 0. */
1498 if (action == -2)
1499 index = 0;
1500 /* Map no-action to otherwise unused call-site index -1. */
1501 else if (action == -1)
1502 index = -1;
1503 /* Otherwise, look it up in the table. */
1504 else
1505 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1506
1507 lp_info[i].call_site_index = index;
1508 }
4956d07c 1509}
27a36778 1510
52a11cbf 1511static void
502b8322 1512sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
27a36778 1513{
52a11cbf
RH
1514 int last_call_site = -2;
1515 rtx insn, mem;
1516
52a11cbf 1517 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 1518 {
52a11cbf
RH
1519 struct eh_region *region;
1520 int this_call_site;
1521 rtx note, before, p;
27a36778 1522
52a11cbf 1523 /* Reset value tracking at extended basic block boundaries. */
4b4bf941 1524 if (LABEL_P (insn))
52a11cbf 1525 last_call_site = -2;
27a36778 1526
52a11cbf
RH
1527 if (! INSN_P (insn))
1528 continue;
27a36778 1529
52a11cbf
RH
1530 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1531 if (!note)
1532 {
1533 /* Calls (and trapping insns) without notes are outside any
1534 exception handling region in this function. Mark them as
1535 no action. */
4b4bf941 1536 if (CALL_P (insn)
52a11cbf
RH
1537 || (flag_non_call_exceptions
1538 && may_trap_p (PATTERN (insn))))
1539 this_call_site = -1;
1540 else
1541 continue;
1542 }
1543 else
1544 {
1545 /* Calls that are known to not throw need not be marked. */
1546 if (INTVAL (XEXP (note, 0)) <= 0)
1547 continue;
27a36778 1548
52a11cbf
RH
1549 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1550 this_call_site = lp_info[region->region_number].call_site_index;
1551 }
27a36778 1552
52a11cbf
RH
1553 if (this_call_site == last_call_site)
1554 continue;
1555
1556 /* Don't separate a call from it's argument loads. */
1557 before = insn;
4b4bf941 1558 if (CALL_P (insn))
0fb7aeda 1559 before = find_first_parameter_load (insn, NULL_RTX);
4956d07c 1560
52a11cbf 1561 start_sequence ();
fd2c57a9
AH
1562 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
1563 sjlj_fc_call_site_ofs);
52a11cbf
RH
1564 emit_move_insn (mem, GEN_INT (this_call_site));
1565 p = get_insns ();
1566 end_sequence ();
12670d88 1567
2f937369 1568 emit_insn_before (p, before);
52a11cbf
RH
1569 last_call_site = this_call_site;
1570 }
1571}
4956d07c 1572
52a11cbf
RH
1573/* Construct the SjLj_Function_Context. */
1574
1575static void
502b8322 1576sjlj_emit_function_enter (rtx dispatch_label)
4956d07c 1577{
52a11cbf 1578 rtx fn_begin, fc, mem, seq;
4956d07c 1579
52a11cbf 1580 fc = cfun->eh->sjlj_fc;
4956d07c 1581
52a11cbf 1582 start_sequence ();
8a4451aa 1583
8979edec
JL
1584 /* We're storing this libcall's address into memory instead of
1585 calling it directly. Thus, we must call assemble_external_libcall
1586 here, as we can not depend on emit_library_call to do it for us. */
1587 assemble_external_libcall (eh_personality_libfunc);
f4ef873c 1588 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
52a11cbf
RH
1589 emit_move_insn (mem, eh_personality_libfunc);
1590
f4ef873c 1591 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
52a11cbf
RH
1592 if (cfun->uses_eh_lsda)
1593 {
1594 char buf[20];
86bdf071
RE
1595 rtx sym;
1596
df696a75 1597 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
86bdf071
RE
1598 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1599 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1600 emit_move_insn (mem, sym);
8a4451aa 1601 }
52a11cbf
RH
1602 else
1603 emit_move_insn (mem, const0_rtx);
3f2c5d1a 1604
52a11cbf
RH
1605#ifdef DONT_USE_BUILTIN_SETJMP
1606 {
1607 rtx x, note;
9defc9b7 1608 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
52a11cbf
RH
1609 TYPE_MODE (integer_type_node), 1,
1610 plus_constant (XEXP (fc, 0),
1611 sjlj_fc_jbuf_ofs), Pmode);
1612
2e040219 1613 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
52a11cbf
RH
1614 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
1615
1616 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
a06ef755 1617 TYPE_MODE (integer_type_node), 0, dispatch_label);
52a11cbf
RH
1618 }
1619#else
1620 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1621 dispatch_label);
4956d07c 1622#endif
4956d07c 1623
52a11cbf
RH
1624 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1625 1, XEXP (fc, 0), Pmode);
12670d88 1626
52a11cbf
RH
1627 seq = get_insns ();
1628 end_sequence ();
4956d07c 1629
52a11cbf
RH
1630 /* ??? Instead of doing this at the beginning of the function,
1631 do this in a block that is at loop level 0 and dominates all
1632 can_throw_internal instructions. */
4956d07c 1633
52a11cbf 1634 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
4b4bf941 1635 if (NOTE_P (fn_begin)
12c3874e
JH
1636 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
1637 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
52a11cbf 1638 break;
12c3874e 1639 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
628f6a4e 1640 insert_insn_on_edge (seq, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
12c3874e
JH
1641 else
1642 {
628f6a4e 1643 rtx last = BB_END (EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest);
12c3874e 1644 for (; ; fn_begin = NEXT_INSN (fn_begin))
4b4bf941 1645 if ((NOTE_P (fn_begin)
12c3874e
JH
1646 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1647 || fn_begin == last)
1648 break;
1649 emit_insn_after (seq, fn_begin);
1650 }
4956d07c
MS
1651}
1652
52a11cbf
RH
1653/* Call back from expand_function_end to know where we should put
1654 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 1655
52a11cbf 1656void
502b8322 1657sjlj_emit_function_exit_after (rtx after)
52a11cbf
RH
1658{
1659 cfun->eh->sjlj_exit_after = after;
1660}
4956d07c
MS
1661
1662static void
502b8322 1663sjlj_emit_function_exit (void)
52a11cbf
RH
1664{
1665 rtx seq;
12c3874e 1666 edge e;
628f6a4e 1667 edge_iterator ei;
4956d07c 1668
52a11cbf 1669 start_sequence ();
ce152ef8 1670
52a11cbf
RH
1671 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1672 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 1673
52a11cbf
RH
1674 seq = get_insns ();
1675 end_sequence ();
4956d07c 1676
52a11cbf
RH
1677 /* ??? Really this can be done in any block at loop level 0 that
1678 post-dominates all can_throw_internal instructions. This is
1679 the last possible moment. */
9a0d1e1b 1680
628f6a4e 1681 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
12c3874e
JH
1682 if (e->flags & EDGE_FALLTHRU)
1683 break;
1684 if (e)
1685 {
1686 rtx insn;
1687
1688 /* Figure out whether the place we are supposed to insert libcall
1689 is inside the last basic block or after it. In the other case
1690 we need to emit to edge. */
5b0264cb 1691 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
f6a41d17 1692 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
12c3874e 1693 {
f6a41d17
RH
1694 if (insn == cfun->eh->sjlj_exit_after)
1695 {
1696 if (LABEL_P (insn))
1697 insn = NEXT_INSN (insn);
1698 emit_insn_after (seq, insn);
1699 return;
1700 }
1701 if (insn == BB_END (e->src))
1702 break;
12c3874e 1703 }
f6a41d17 1704 insert_insn_on_edge (seq, e);
12c3874e 1705 }
9a0d1e1b
AM
1706}
1707
52a11cbf 1708static void
502b8322 1709sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
ce152ef8 1710{
52a11cbf
RH
1711 int i, first_reachable;
1712 rtx mem, dispatch, seq, fc;
12c3874e
JH
1713 rtx before;
1714 basic_block bb;
1715 edge e;
52a11cbf
RH
1716
1717 fc = cfun->eh->sjlj_fc;
1718
1719 start_sequence ();
1720
1721 emit_label (dispatch_label);
3f2c5d1a 1722
52a11cbf
RH
1723#ifndef DONT_USE_BUILTIN_SETJMP
1724 expand_builtin_setjmp_receiver (dispatch_label);
1725#endif
1726
1727 /* Load up dispatch index, exc_ptr and filter values from the
1728 function context. */
f4ef873c
RK
1729 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
1730 sjlj_fc_call_site_ofs);
52a11cbf
RH
1731 dispatch = copy_to_reg (mem);
1732
f4ef873c 1733 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
f920765d 1734 if (word_mode != ptr_mode)
52a11cbf
RH
1735 {
1736#ifdef POINTERS_EXTEND_UNSIGNED
f920765d 1737 mem = convert_memory_address (ptr_mode, mem);
52a11cbf 1738#else
f920765d 1739 mem = convert_to_mode (ptr_mode, mem, 0);
52a11cbf
RH
1740#endif
1741 }
1742 emit_move_insn (cfun->eh->exc_ptr, mem);
1743
f4ef873c 1744 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
52a11cbf 1745 emit_move_insn (cfun->eh->filter, mem);
4956d07c 1746
52a11cbf
RH
1747 /* Jump to one of the directly reachable regions. */
1748 /* ??? This really ought to be using a switch statement. */
1749
1750 first_reachable = 0;
1751 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 1752 {
98ce21b3 1753 if (! lp_info[i].directly_reachable)
52a11cbf 1754 continue;
a1622f83 1755
52a11cbf
RH
1756 if (! first_reachable)
1757 {
1758 first_reachable = i;
1759 continue;
1760 }
e6cfb550 1761
a06ef755
RK
1762 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
1763 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
52a11cbf 1764 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 1765 }
9a0d1e1b 1766
52a11cbf
RH
1767 seq = get_insns ();
1768 end_sequence ();
4956d07c 1769
12c3874e
JH
1770 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
1771
1772 bb = emit_to_new_bb_before (seq, before);
1773 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1774 e->count = bb->count;
1775 e->probability = REG_BR_PROB_BASE;
ce152ef8
AM
1776}
1777
52a11cbf 1778static void
502b8322 1779sjlj_build_landing_pads (void)
ce152ef8 1780{
52a11cbf 1781 struct sjlj_lp_info *lp_info;
ce152ef8 1782
703ad42b
KG
1783 lp_info = xcalloc (cfun->eh->last_region_number + 1,
1784 sizeof (struct sjlj_lp_info));
ce152ef8 1785
52a11cbf
RH
1786 if (sjlj_find_directly_reachable_regions (lp_info))
1787 {
1788 rtx dispatch_label = gen_label_rtx ();
ce152ef8 1789
52a11cbf
RH
1790 cfun->eh->sjlj_fc
1791 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1792 int_size_in_bytes (sjlj_fc_type_node),
1793 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 1794
52a11cbf
RH
1795 sjlj_assign_call_site_values (dispatch_label, lp_info);
1796 sjlj_mark_call_sites (lp_info);
a1622f83 1797
52a11cbf
RH
1798 sjlj_emit_function_enter (dispatch_label);
1799 sjlj_emit_dispatch_table (dispatch_label, lp_info);
1800 sjlj_emit_function_exit ();
1801 }
a1622f83 1802
52a11cbf 1803 free (lp_info);
4956d07c 1804}
ce152ef8 1805
ce152ef8 1806void
502b8322 1807finish_eh_generation (void)
ce152ef8 1808{
12c3874e
JH
1809 basic_block bb;
1810
52a11cbf
RH
1811 /* Nothing to do if no regions created. */
1812 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
1813 return;
1814
52a11cbf
RH
1815 /* The object here is to provide find_basic_blocks with detailed
1816 information (via reachable_handlers) on how exception control
1817 flows within the function. In this first pass, we can include
1818 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
1819 regions, and hope that it will be useful in deleting unreachable
1820 handlers. Subsequently, we will generate landing pads which will
1821 connect many of the handlers, and then type information will not
1822 be effective. Still, this is a win over previous implementations. */
1823
52a11cbf
RH
1824 /* These registers are used by the landing pads. Make sure they
1825 have been generated. */
86c99549
RH
1826 get_exception_pointer (cfun);
1827 get_exception_filter (cfun);
52a11cbf
RH
1828
1829 /* Construct the landing pads. */
1830
1831 assign_filter_values ();
1832 build_post_landing_pads ();
1833 connect_post_landing_pads ();
1834 if (USING_SJLJ_EXCEPTIONS)
1835 sjlj_build_landing_pads ();
1836 else
1837 dw2_build_landing_pads ();
ce152ef8 1838
52a11cbf 1839 cfun->eh->built_landing_pads = 1;
ce152ef8 1840
52a11cbf
RH
1841 /* We've totally changed the CFG. Start over. */
1842 find_exception_handler_labels ();
12c3874e
JH
1843 break_superblocks ();
1844 if (USING_SJLJ_EXCEPTIONS)
1845 commit_edge_insertions ();
1846 FOR_EACH_BB (bb)
1847 {
628f6a4e
BE
1848 edge e;
1849 edge_iterator ei;
12c3874e 1850 bool eh = false;
628f6a4e 1851 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
12c3874e 1852 {
12c3874e
JH
1853 if (e->flags & EDGE_EH)
1854 {
1855 remove_edge (e);
1856 eh = true;
1857 }
628f6a4e
BE
1858 else
1859 ei_next (&ei);
12c3874e
JH
1860 }
1861 if (eh)
6de9cd9a 1862 rtl_make_eh_edge (NULL, bb, BB_END (bb));
12c3874e 1863 }
ce152ef8 1864}
4956d07c 1865\f
6a58eee9 1866static hashval_t
502b8322 1867ehl_hash (const void *pentry)
6a58eee9
RH
1868{
1869 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
1870
1871 /* 2^32 * ((sqrt(5) - 1) / 2) */
1872 const hashval_t scaled_golden_ratio = 0x9e3779b9;
1873 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
1874}
1875
1876static int
502b8322 1877ehl_eq (const void *pentry, const void *pdata)
6a58eee9
RH
1878{
1879 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
1880 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
1881
1882 return entry->label == data->label;
1883}
1884
52a11cbf 1885/* This section handles removing dead code for flow. */
154bba13 1886
6a58eee9 1887/* Remove LABEL from exception_handler_label_map. */
154bba13 1888
52a11cbf 1889static void
502b8322 1890remove_exception_handler_label (rtx label)
154bba13 1891{
6a58eee9 1892 struct ehl_map_entry **slot, tmp;
100d81d4 1893
6a58eee9 1894 /* If exception_handler_label_map was not built yet,
655dd289 1895 there is nothing to do. */
e2500fed 1896 if (cfun->eh->exception_handler_label_map == NULL)
655dd289
JJ
1897 return;
1898
6a58eee9
RH
1899 tmp.label = label;
1900 slot = (struct ehl_map_entry **)
e2500fed 1901 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
5b0264cb 1902 gcc_assert (slot);
154bba13 1903
e2500fed 1904 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
154bba13
TT
1905}
1906
52a11cbf 1907/* Splice REGION from the region tree etc. */
12670d88 1908
f19c9228 1909static void
502b8322 1910remove_eh_handler (struct eh_region *region)
4956d07c 1911{
ff2c46ac 1912 struct eh_region **pp, **pp_start, *p, *outer, *inner;
52a11cbf 1913 rtx lab;
4956d07c 1914
52a11cbf
RH
1915 /* For the benefit of efficiently handling REG_EH_REGION notes,
1916 replace this region in the region array with its containing
1917 region. Note that previous region deletions may result in
6a58eee9
RH
1918 multiple copies of this region in the array, so we have a
1919 list of alternate numbers by which we are known. */
1920
ff2c46ac
RH
1921 outer = region->outer;
1922 cfun->eh->region_array[region->region_number] = outer;
6a58eee9
RH
1923 if (region->aka)
1924 {
3cd8c58a 1925 unsigned i;
87c476a2
ZD
1926 bitmap_iterator bi;
1927
1928 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
1929 {
1930 cfun->eh->region_array[i] = outer;
1931 }
6a58eee9
RH
1932 }
1933
ff2c46ac 1934 if (outer)
6a58eee9 1935 {
ff2c46ac 1936 if (!outer->aka)
e2500fed 1937 outer->aka = BITMAP_GGC_ALLOC ();
6a58eee9 1938 if (region->aka)
67299d91 1939 bitmap_ior_into (outer->aka, region->aka);
ff2c46ac 1940 bitmap_set_bit (outer->aka, region->region_number);
6a58eee9 1941 }
52a11cbf
RH
1942
1943 if (cfun->eh->built_landing_pads)
1944 lab = region->landing_pad;
1945 else
1946 lab = region->label;
1947 if (lab)
1948 remove_exception_handler_label (lab);
1949
ff2c46ac
RH
1950 if (outer)
1951 pp_start = &outer->inner;
52a11cbf 1952 else
ff2c46ac
RH
1953 pp_start = &cfun->eh->region_tree;
1954 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
52a11cbf 1955 continue;
ff2c46ac 1956 *pp = region->next_peer;
12670d88 1957
ff2c46ac
RH
1958 inner = region->inner;
1959 if (inner)
4956d07c 1960 {
ff2c46ac
RH
1961 for (p = inner; p->next_peer ; p = p->next_peer)
1962 p->outer = outer;
1963 p->outer = outer;
1964
1965 p->next_peer = *pp_start;
1966 *pp_start = inner;
4956d07c 1967 }
f19c9228 1968
52a11cbf
RH
1969 if (region->type == ERT_CATCH)
1970 {
1971 struct eh_region *try, *next, *prev;
f19c9228 1972
52a11cbf
RH
1973 for (try = region->next_peer;
1974 try->type == ERT_CATCH;
1975 try = try->next_peer)
1976 continue;
5b0264cb 1977 gcc_assert (try->type == ERT_TRY);
f19c9228 1978
52a11cbf
RH
1979 next = region->u.catch.next_catch;
1980 prev = region->u.catch.prev_catch;
f19c9228 1981
52a11cbf
RH
1982 if (next)
1983 next->u.catch.prev_catch = prev;
1984 else
1985 try->u.try.last_catch = prev;
1986 if (prev)
1987 prev->u.catch.next_catch = next;
1988 else
1989 {
1990 try->u.try.catch = next;
1991 if (! next)
1992 remove_eh_handler (try);
1993 }
1994 }
4956d07c
MS
1995}
1996
52a11cbf
RH
1997/* LABEL heads a basic block that is about to be deleted. If this
1998 label corresponds to an exception region, we may be able to
1999 delete the region. */
4956d07c
MS
2000
2001void
502b8322 2002maybe_remove_eh_handler (rtx label)
4956d07c 2003{
6a58eee9
RH
2004 struct ehl_map_entry **slot, tmp;
2005 struct eh_region *region;
4956d07c 2006
52a11cbf
RH
2007 /* ??? After generating landing pads, it's not so simple to determine
2008 if the region data is completely unused. One must examine the
2009 landing pad and the post landing pad, and whether an inner try block
2010 is referencing the catch handlers directly. */
2011 if (cfun->eh->built_landing_pads)
4956d07c
MS
2012 return;
2013
6a58eee9
RH
2014 tmp.label = label;
2015 slot = (struct ehl_map_entry **)
e2500fed 2016 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
6a58eee9
RH
2017 if (! slot)
2018 return;
2019 region = (*slot)->region;
2020 if (! region)
2021 return;
2022
2023 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2024 because there is no path to the fallback call to terminate.
2025 But the region continues to affect call-site data until there
2026 are no more contained calls, which we don't see here. */
2027 if (region->type == ERT_MUST_NOT_THROW)
87ff9c8e 2028 {
e2500fed 2029 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
6a58eee9 2030 region->label = NULL_RTX;
87ff9c8e 2031 }
6a58eee9
RH
2032 else
2033 remove_eh_handler (region);
2034}
2035
2036/* Invokes CALLBACK for every exception handler label. Only used by old
2037 loop hackery; should not be used by new code. */
2038
2039void
502b8322 2040for_each_eh_label (void (*callback) (rtx))
6a58eee9 2041{
e2500fed 2042 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
3897f229 2043 (void *) &callback);
87ff9c8e
RH
2044}
2045
6a58eee9 2046static int
502b8322 2047for_each_eh_label_1 (void **pentry, void *data)
6a58eee9
RH
2048{
2049 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
3897f229 2050 void (*callback) (rtx) = *(void (**) (rtx)) data;
6a58eee9
RH
2051
2052 (*callback) (entry->label);
2053 return 1;
2054}
f698d217
SB
2055
2056/* Invoke CALLBACK for every exception region in the current function. */
2057
2058void
2059for_each_eh_region (void (*callback) (struct eh_region *))
2060{
2061 int i, n = cfun->eh->last_region_number;
2062 for (i = 1; i <= n; ++i)
2063 {
2064 struct eh_region *region = cfun->eh->region_array[i];
2065 if (region)
2066 (*callback) (region);
2067 }
2068}
52a11cbf
RH
2069\f
2070/* This section describes CFG exception edges for flow. */
87ff9c8e 2071
52a11cbf 2072/* For communicating between calls to reachable_next_level. */
6de9cd9a 2073struct reachable_info
87ff9c8e 2074{
52a11cbf
RH
2075 tree types_caught;
2076 tree types_allowed;
6de9cd9a
DN
2077 void (*callback) (struct eh_region *, void *);
2078 void *callback_data;
2079 bool saw_any_handlers;
52a11cbf 2080};
87ff9c8e 2081
52a11cbf
RH
2082/* A subroutine of reachable_next_level. Return true if TYPE, or a
2083 base class of TYPE, is in HANDLED. */
87ff9c8e 2084
6de9cd9a 2085int
502b8322 2086check_handled (tree handled, tree type)
87ff9c8e 2087{
52a11cbf
RH
2088 tree t;
2089
2090 /* We can check for exact matches without front-end help. */
2091 if (! lang_eh_type_covers)
f54a7f6f 2092 {
52a11cbf
RH
2093 for (t = handled; t ; t = TREE_CHAIN (t))
2094 if (TREE_VALUE (t) == type)
2095 return 1;
2096 }
2097 else
2098 {
2099 for (t = handled; t ; t = TREE_CHAIN (t))
2100 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2101 return 1;
f54a7f6f 2102 }
52a11cbf
RH
2103
2104 return 0;
87ff9c8e
RH
2105}
2106
52a11cbf
RH
2107/* A subroutine of reachable_next_level. If we are collecting a list
2108 of handlers, add one. After landing pad generation, reference
2109 it instead of the handlers themselves. Further, the handlers are
3f2c5d1a 2110 all wired together, so by referencing one, we've got them all.
52a11cbf
RH
2111 Before landing pad generation we reference each handler individually.
2112
2113 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2114
2115static void
6de9cd9a
DN
2116add_reachable_handler (struct reachable_info *info,
2117 struct eh_region *lp_region, struct eh_region *region)
87ff9c8e 2118{
52a11cbf
RH
2119 if (! info)
2120 return;
2121
6de9cd9a
DN
2122 info->saw_any_handlers = true;
2123
52a11cbf 2124 if (cfun->eh->built_landing_pads)
6de9cd9a 2125 info->callback (lp_region, info->callback_data);
52a11cbf 2126 else
6de9cd9a 2127 info->callback (region, info->callback_data);
87ff9c8e
RH
2128}
2129
3f2c5d1a 2130/* Process one level of exception regions for reachability.
52a11cbf
RH
2131 If TYPE_THROWN is non-null, then it is the *exact* type being
2132 propagated. If INFO is non-null, then collect handler labels
2133 and caught/allowed type information between invocations. */
87ff9c8e 2134
52a11cbf 2135static enum reachable_code
502b8322
AJ
2136reachable_next_level (struct eh_region *region, tree type_thrown,
2137 struct reachable_info *info)
87ff9c8e 2138{
52a11cbf
RH
2139 switch (region->type)
2140 {
2141 case ERT_CLEANUP:
2142 /* Before landing-pad generation, we model control flow
2143 directly to the individual handlers. In this way we can
2144 see that catch handler types may shadow one another. */
2145 add_reachable_handler (info, region, region);
2146 return RNL_MAYBE_CAUGHT;
2147
2148 case ERT_TRY:
2149 {
2150 struct eh_region *c;
2151 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2152
52a11cbf
RH
2153 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2154 {
2155 /* A catch-all handler ends the search. */
6d41a92f 2156 if (c->u.catch.type_list == NULL)
52a11cbf
RH
2157 {
2158 add_reachable_handler (info, region, c);
2159 return RNL_CAUGHT;
2160 }
2161
2162 if (type_thrown)
2163 {
a8154559 2164 /* If we have at least one type match, end the search. */
6d41a92f 2165 tree tp_node = c->u.catch.type_list;
3f2c5d1a 2166
6d41a92f 2167 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
52a11cbf 2168 {
6d41a92f
OH
2169 tree type = TREE_VALUE (tp_node);
2170
2171 if (type == type_thrown
2172 || (lang_eh_type_covers
2173 && (*lang_eh_type_covers) (type, type_thrown)))
2174 {
2175 add_reachable_handler (info, region, c);
2176 return RNL_CAUGHT;
2177 }
52a11cbf
RH
2178 }
2179
2180 /* If we have definitive information of a match failure,
2181 the catch won't trigger. */
2182 if (lang_eh_type_covers)
2183 return RNL_NOT_CAUGHT;
2184 }
2185
6d41a92f
OH
2186 /* At this point, we either don't know what type is thrown or
2187 don't have front-end assistance to help deciding if it is
2188 covered by one of the types in the list for this region.
3f2c5d1a 2189
6d41a92f
OH
2190 We'd then like to add this region to the list of reachable
2191 handlers since it is indeed potentially reachable based on the
3f2c5d1a
RS
2192 information we have.
2193
6d41a92f
OH
2194 Actually, this handler is for sure not reachable if all the
2195 types it matches have already been caught. That is, it is only
2196 potentially reachable if at least one of the types it catches
2197 has not been previously caught. */
2198
52a11cbf
RH
2199 if (! info)
2200 ret = RNL_MAYBE_CAUGHT;
6d41a92f 2201 else
52a11cbf 2202 {
6d41a92f
OH
2203 tree tp_node = c->u.catch.type_list;
2204 bool maybe_reachable = false;
52a11cbf 2205
6d41a92f
OH
2206 /* Compute the potential reachability of this handler and
2207 update the list of types caught at the same time. */
2208 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2209 {
2210 tree type = TREE_VALUE (tp_node);
2211
2212 if (! check_handled (info->types_caught, type))
2213 {
2214 info->types_caught
2215 = tree_cons (NULL, type, info->types_caught);
3f2c5d1a 2216
6d41a92f
OH
2217 maybe_reachable = true;
2218 }
2219 }
3f2c5d1a 2220
6d41a92f
OH
2221 if (maybe_reachable)
2222 {
2223 add_reachable_handler (info, region, c);
3f2c5d1a 2224
6d41a92f
OH
2225 /* ??? If the catch type is a base class of every allowed
2226 type, then we know we can stop the search. */
2227 ret = RNL_MAYBE_CAUGHT;
2228 }
52a11cbf
RH
2229 }
2230 }
87ff9c8e 2231
52a11cbf
RH
2232 return ret;
2233 }
87ff9c8e 2234
52a11cbf
RH
2235 case ERT_ALLOWED_EXCEPTIONS:
2236 /* An empty list of types definitely ends the search. */
2237 if (region->u.allowed.type_list == NULL_TREE)
2238 {
2239 add_reachable_handler (info, region, region);
2240 return RNL_CAUGHT;
2241 }
87ff9c8e 2242
52a11cbf
RH
2243 /* Collect a list of lists of allowed types for use in detecting
2244 when a catch may be transformed into a catch-all. */
2245 if (info)
2246 info->types_allowed = tree_cons (NULL_TREE,
2247 region->u.allowed.type_list,
2248 info->types_allowed);
3f2c5d1a 2249
684d9f3b 2250 /* If we have definitive information about the type hierarchy,
52a11cbf
RH
2251 then we can tell if the thrown type will pass through the
2252 filter. */
2253 if (type_thrown && lang_eh_type_covers)
2254 {
2255 if (check_handled (region->u.allowed.type_list, type_thrown))
2256 return RNL_NOT_CAUGHT;
2257 else
2258 {
2259 add_reachable_handler (info, region, region);
2260 return RNL_CAUGHT;
2261 }
2262 }
21cd906e 2263
52a11cbf
RH
2264 add_reachable_handler (info, region, region);
2265 return RNL_MAYBE_CAUGHT;
21cd906e 2266
52a11cbf 2267 case ERT_CATCH:
fbe5a4a6 2268 /* Catch regions are handled by their controlling try region. */
52a11cbf 2269 return RNL_NOT_CAUGHT;
21cd906e 2270
52a11cbf
RH
2271 case ERT_MUST_NOT_THROW:
2272 /* Here we end our search, since no exceptions may propagate.
2273 If we've touched down at some landing pad previous, then the
2274 explicit function call we generated may be used. Otherwise
2275 the call is made by the runtime. */
6de9cd9a 2276 if (info && info->saw_any_handlers)
21cd906e 2277 {
52a11cbf 2278 add_reachable_handler (info, region, region);
0fb7aeda 2279 return RNL_CAUGHT;
21cd906e 2280 }
52a11cbf
RH
2281 else
2282 return RNL_BLOCKED;
21cd906e 2283
52a11cbf
RH
2284 case ERT_THROW:
2285 case ERT_FIXUP:
3f2c5d1a 2286 case ERT_UNKNOWN:
52a11cbf 2287 /* Shouldn't see these here. */
5b0264cb 2288 gcc_unreachable ();
52a11cbf 2289 break;
5b0264cb
NS
2290 default:
2291 gcc_unreachable ();
21cd906e 2292 }
fa51b01b 2293}
4956d07c 2294
6de9cd9a 2295/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
4956d07c 2296
6de9cd9a
DN
2297void
2298foreach_reachable_handler (int region_number, bool is_resx,
2299 void (*callback) (struct eh_region *, void *),
2300 void *callback_data)
4956d07c 2301{
52a11cbf
RH
2302 struct reachable_info info;
2303 struct eh_region *region;
2304 tree type_thrown;
4956d07c 2305
52a11cbf 2306 memset (&info, 0, sizeof (info));
6de9cd9a
DN
2307 info.callback = callback;
2308 info.callback_data = callback_data;
4956d07c 2309
52a11cbf 2310 region = cfun->eh->region_array[region_number];
fb13d4d0 2311
52a11cbf 2312 type_thrown = NULL_TREE;
6de9cd9a 2313 if (is_resx)
7f206d8f
RH
2314 {
2315 /* A RESX leaves a region instead of entering it. Thus the
2316 region itself may have been deleted out from under us. */
2317 if (region == NULL)
6de9cd9a 2318 return;
7f206d8f
RH
2319 region = region->outer;
2320 }
2321 else if (region->type == ERT_THROW)
52a11cbf
RH
2322 {
2323 type_thrown = region->u.throw.type;
2324 region = region->outer;
2325 }
fac62ecf 2326
bafb714b
MM
2327 while (region)
2328 {
2329 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
a944ceb9 2330 break;
bafb714b
MM
2331 /* If we have processed one cleanup, there is no point in
2332 processing any more of them. Each cleanup will have an edge
2333 to the next outer cleanup region, so the flow graph will be
2334 accurate. */
2335 if (region->type == ERT_CLEANUP)
2336 region = region->u.cleanup.prev_try;
2337 else
2338 region = region->outer;
2339 }
6de9cd9a
DN
2340}
2341
2342/* Retrieve a list of labels of exception handlers which can be
2343 reached by a given insn. */
2344
2345static void
2346arh_to_landing_pad (struct eh_region *region, void *data)
2347{
2348 rtx *p_handlers = data;
2349 if (! *p_handlers)
2350 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2351}
2352
2353static void
2354arh_to_label (struct eh_region *region, void *data)
2355{
2356 rtx *p_handlers = data;
2357 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2358}
2359
2360rtx
2361reachable_handlers (rtx insn)
2362{
2363 bool is_resx = false;
2364 rtx handlers = NULL;
2365 int region_number;
2366
4b4bf941 2367 if (JUMP_P (insn)
6de9cd9a
DN
2368 && GET_CODE (PATTERN (insn)) == RESX)
2369 {
2370 region_number = XINT (PATTERN (insn), 0);
2371 is_resx = true;
2372 }
2373 else
2374 {
2375 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2376 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2377 return NULL;
2378 region_number = INTVAL (XEXP (note, 0));
2379 }
502b8322 2380
6de9cd9a
DN
2381 foreach_reachable_handler (region_number, is_resx,
2382 (cfun->eh->built_landing_pads
2383 ? arh_to_landing_pad
2384 : arh_to_label),
2385 &handlers);
2386
2387 return handlers;
fb13d4d0
JM
2388}
2389
52a11cbf
RH
2390/* Determine if the given INSN can throw an exception that is caught
2391 within the function. */
4956d07c 2392
52a11cbf 2393bool
6de9cd9a 2394can_throw_internal_1 (int region_number)
4956d07c 2395{
52a11cbf
RH
2396 struct eh_region *region;
2397 tree type_thrown;
6de9cd9a
DN
2398
2399 region = cfun->eh->region_array[region_number];
2400
2401 type_thrown = NULL_TREE;
2402 if (region->type == ERT_THROW)
2403 {
2404 type_thrown = region->u.throw.type;
2405 region = region->outer;
2406 }
2407
2408 /* If this exception is ignored by each and every containing region,
2409 then control passes straight out. The runtime may handle some
2410 regions, which also do not require processing internally. */
2411 for (; region; region = region->outer)
2412 {
2413 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2414 if (how == RNL_BLOCKED)
2415 return false;
2416 if (how != RNL_NOT_CAUGHT)
2417 return true;
2418 }
2419
2420 return false;
2421}
2422
2423bool
2424can_throw_internal (rtx insn)
2425{
52a11cbf 2426 rtx note;
e6cfb550 2427
52a11cbf
RH
2428 if (! INSN_P (insn))
2429 return false;
12670d88 2430
4b4bf941 2431 if (JUMP_P (insn)
0620be18
JH
2432 && GET_CODE (PATTERN (insn)) == RESX
2433 && XINT (PATTERN (insn), 0) > 0)
2434 return can_throw_internal_1 (XINT (PATTERN (insn), 0));
2435
4b4bf941 2436 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
2437 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2438 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 2439
52a11cbf
RH
2440 /* Every insn that might throw has an EH_REGION note. */
2441 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2442 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2443 return false;
4956d07c 2444
6de9cd9a
DN
2445 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
2446}
2447
2448/* Determine if the given INSN can throw an exception that is
2449 visible outside the function. */
2450
2451bool
2452can_throw_external_1 (int region_number)
2453{
2454 struct eh_region *region;
2455 tree type_thrown;
2456
2457 region = cfun->eh->region_array[region_number];
4956d07c 2458
52a11cbf
RH
2459 type_thrown = NULL_TREE;
2460 if (region->type == ERT_THROW)
2461 {
2462 type_thrown = region->u.throw.type;
2463 region = region->outer;
2464 }
4956d07c 2465
6de9cd9a
DN
2466 /* If the exception is caught or blocked by any containing region,
2467 then it is not seen by any calling function. */
2468 for (; region ; region = region->outer)
2469 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2470 return false;
4956d07c 2471
6de9cd9a 2472 return true;
52a11cbf 2473}
4956d07c 2474
52a11cbf 2475bool
502b8322 2476can_throw_external (rtx insn)
4956d07c 2477{
52a11cbf 2478 rtx note;
4956d07c 2479
52a11cbf
RH
2480 if (! INSN_P (insn))
2481 return false;
2482
4b4bf941 2483 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
2484 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2485 insn = XVECEXP (PATTERN (insn), 0, 0);
2486
52a11cbf
RH
2487 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2488 if (!note)
2489 {
2490 /* Calls (and trapping insns) without notes are outside any
2491 exception handling region in this function. We have to
2492 assume it might throw. Given that the front end and middle
2493 ends mark known NOTHROW functions, this isn't so wildly
2494 inaccurate. */
4b4bf941 2495 return (CALL_P (insn)
52a11cbf
RH
2496 || (flag_non_call_exceptions
2497 && may_trap_p (PATTERN (insn))));
2498 }
2499 if (INTVAL (XEXP (note, 0)) <= 0)
2500 return false;
2501
6de9cd9a 2502 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
4956d07c 2503}
1ef1bf06 2504
97b0ade3 2505/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
6814a8a0 2506
b6128b8c 2507void
502b8322 2508set_nothrow_function_flags (void)
1ef1bf06
AM
2509{
2510 rtx insn;
502b8322 2511
97b0ade3 2512 TREE_NOTHROW (current_function_decl) = 1;
1ef1bf06 2513
b6128b8c
SH
2514 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2515 something that can throw an exception. We specifically exempt
2516 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2517 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2518 is optimistic. */
1ef1bf06 2519
b6128b8c
SH
2520 cfun->all_throwers_are_sibcalls = 1;
2521
2522 if (! flag_exceptions)
2523 return;
502b8322 2524
1ef1bf06 2525 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf 2526 if (can_throw_external (insn))
b6128b8c 2527 {
97b0ade3 2528 TREE_NOTHROW (current_function_decl) = 0;
b6128b8c 2529
4b4bf941 2530 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
b6128b8c
SH
2531 {
2532 cfun->all_throwers_are_sibcalls = 0;
2533 return;
2534 }
2535 }
2536
52a11cbf
RH
2537 for (insn = current_function_epilogue_delay_list; insn;
2538 insn = XEXP (insn, 1))
b6128b8c
SH
2539 if (can_throw_external (insn))
2540 {
97b0ade3 2541 TREE_NOTHROW (current_function_decl) = 0;
4da896b2 2542
4b4bf941 2543 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
b6128b8c
SH
2544 {
2545 cfun->all_throwers_are_sibcalls = 0;
2546 return;
2547 }
2548 }
1ef1bf06 2549}
52a11cbf 2550
ca55abae 2551\f
52a11cbf 2552/* Various hooks for unwind library. */
ca55abae
JM
2553
2554/* Do any necessary initialization to access arbitrary stack frames.
2555 On the SPARC, this means flushing the register windows. */
2556
2557void
502b8322 2558expand_builtin_unwind_init (void)
ca55abae
JM
2559{
2560 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 2561 able to copy the saved values for any registers from frames we unwind. */
ca55abae
JM
2562 current_function_has_nonlocal_label = 1;
2563
2564#ifdef SETUP_FRAME_ADDRESSES
2565 SETUP_FRAME_ADDRESSES ();
2566#endif
2567}
2568
52a11cbf 2569rtx
502b8322 2570expand_builtin_eh_return_data_regno (tree arglist)
52a11cbf
RH
2571{
2572 tree which = TREE_VALUE (arglist);
2573 unsigned HOST_WIDE_INT iwhich;
2574
2575 if (TREE_CODE (which) != INTEGER_CST)
2576 {
971801ff 2577 error ("argument of %<__builtin_eh_return_regno%> must be constant");
52a11cbf
RH
2578 return constm1_rtx;
2579 }
2580
2581 iwhich = tree_low_cst (which, 1);
2582 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2583 if (iwhich == INVALID_REGNUM)
2584 return constm1_rtx;
2585
2586#ifdef DWARF_FRAME_REGNUM
2587 iwhich = DWARF_FRAME_REGNUM (iwhich);
2588#else
2589 iwhich = DBX_REGISTER_NUMBER (iwhich);
2590#endif
2591
3f2c5d1a 2592 return GEN_INT (iwhich);
52a11cbf
RH
2593}
2594
ca55abae
JM
2595/* Given a value extracted from the return address register or stack slot,
2596 return the actual address encoded in that value. */
2597
2598rtx
502b8322 2599expand_builtin_extract_return_addr (tree addr_tree)
ca55abae
JM
2600{
2601 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf 2602
0ab38418
EC
2603 if (GET_MODE (addr) != Pmode
2604 && GET_MODE (addr) != VOIDmode)
2605 {
2606#ifdef POINTERS_EXTEND_UNSIGNED
2607 addr = convert_memory_address (Pmode, addr);
2608#else
2609 addr = convert_to_mode (Pmode, addr, 0);
2610#endif
2611 }
2612
52a11cbf
RH
2613 /* First mask out any unwanted bits. */
2614#ifdef MASK_RETURN_ADDR
22273300 2615 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
52a11cbf
RH
2616#endif
2617
2618 /* Then adjust to find the real return address. */
2619#if defined (RETURN_ADDR_OFFSET)
2620 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2621#endif
2622
2623 return addr;
ca55abae
JM
2624}
2625
2626/* Given an actual address in addr_tree, do any necessary encoding
2627 and return the value to be stored in the return address register or
2628 stack slot so the epilogue will return to that address. */
2629
2630rtx
502b8322 2631expand_builtin_frob_return_addr (tree addr_tree)
ca55abae 2632{
4b6c1672 2633 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
52a11cbf 2634
5ae6cd0d 2635 addr = convert_memory_address (Pmode, addr);
be128cd9 2636
ca55abae 2637#ifdef RETURN_ADDR_OFFSET
52a11cbf 2638 addr = force_reg (Pmode, addr);
ca55abae
JM
2639 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2640#endif
52a11cbf 2641
ca55abae
JM
2642 return addr;
2643}
2644
52a11cbf
RH
2645/* Set up the epilogue with the magic bits we'll need to return to the
2646 exception handler. */
ca55abae 2647
52a11cbf 2648void
502b8322
AJ
2649expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2650 tree handler_tree)
ca55abae 2651{
34dc173c 2652 rtx tmp;
ca55abae 2653
34dc173c
UW
2654#ifdef EH_RETURN_STACKADJ_RTX
2655 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
5ae6cd0d 2656 tmp = convert_memory_address (Pmode, tmp);
34dc173c
UW
2657 if (!cfun->eh->ehr_stackadj)
2658 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
2659 else if (tmp != cfun->eh->ehr_stackadj)
2660 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
be128cd9
RK
2661#endif
2662
34dc173c 2663 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
5ae6cd0d 2664 tmp = convert_memory_address (Pmode, tmp);
34dc173c
UW
2665 if (!cfun->eh->ehr_handler)
2666 cfun->eh->ehr_handler = copy_to_reg (tmp);
2667 else if (tmp != cfun->eh->ehr_handler)
2668 emit_move_insn (cfun->eh->ehr_handler, tmp);
ca55abae 2669
34dc173c
UW
2670 if (!cfun->eh->ehr_label)
2671 cfun->eh->ehr_label = gen_label_rtx ();
52a11cbf 2672 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
2673}
2674
71038426 2675void
502b8322 2676expand_eh_return (void)
ca55abae 2677{
34dc173c 2678 rtx around_label;
ca55abae 2679
52a11cbf 2680 if (! cfun->eh->ehr_label)
71038426 2681 return;
ca55abae 2682
52a11cbf 2683 current_function_calls_eh_return = 1;
ca55abae 2684
34dc173c
UW
2685#ifdef EH_RETURN_STACKADJ_RTX
2686 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2687#endif
2688
52a11cbf 2689 around_label = gen_label_rtx ();
52a11cbf 2690 emit_jump (around_label);
ca55abae 2691
52a11cbf
RH
2692 emit_label (cfun->eh->ehr_label);
2693 clobber_return_register ();
ca55abae 2694
34dc173c
UW
2695#ifdef EH_RETURN_STACKADJ_RTX
2696 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
2697#endif
2698
52a11cbf
RH
2699#ifdef HAVE_eh_return
2700 if (HAVE_eh_return)
34dc173c 2701 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
52a11cbf 2702 else
71038426 2703#endif
52a11cbf 2704 {
34dc173c
UW
2705#ifdef EH_RETURN_HANDLER_RTX
2706 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
2707#else
2708 error ("__builtin_eh_return not supported on this target");
2709#endif
52a11cbf 2710 }
71038426 2711
52a11cbf 2712 emit_label (around_label);
71038426 2713}
c76362b4
JW
2714
2715/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2716 POINTERS_EXTEND_UNSIGNED and return it. */
2717
2718rtx
2719expand_builtin_extend_pointer (tree addr_tree)
2720{
2721 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
2722 int extend;
2723
2724#ifdef POINTERS_EXTEND_UNSIGNED
2725 extend = POINTERS_EXTEND_UNSIGNED;
2726#else
2727 /* The previous EH code did an unsigned extend by default, so we do this also
2728 for consistency. */
2729 extend = 1;
2730#endif
2731
2732 return convert_modes (word_mode, ptr_mode, addr, extend);
2733}
77d33a84 2734\f
949f197f 2735/* In the following functions, we represent entries in the action table
eaec9b3d 2736 as 1-based indices. Special cases are:
949f197f
RH
2737
2738 0: null action record, non-null landing pad; implies cleanups
2739 -1: null action record, null landing pad; implies no action
2740 -2: no call-site entry; implies must_not_throw
2741 -3: we have yet to process outer regions
2742
2743 Further, no special cases apply to the "next" field of the record.
2744 For next, 0 means end of list. */
2745
52a11cbf
RH
2746struct action_record
2747{
2748 int offset;
2749 int filter;
2750 int next;
2751};
77d33a84 2752
52a11cbf 2753static int
502b8322 2754action_record_eq (const void *pentry, const void *pdata)
52a11cbf
RH
2755{
2756 const struct action_record *entry = (const struct action_record *) pentry;
2757 const struct action_record *data = (const struct action_record *) pdata;
2758 return entry->filter == data->filter && entry->next == data->next;
2759}
77d33a84 2760
52a11cbf 2761static hashval_t
502b8322 2762action_record_hash (const void *pentry)
52a11cbf
RH
2763{
2764 const struct action_record *entry = (const struct action_record *) pentry;
2765 return entry->next * 1009 + entry->filter;
2766}
77d33a84 2767
52a11cbf 2768static int
502b8322 2769add_action_record (htab_t ar_hash, int filter, int next)
77d33a84 2770{
52a11cbf
RH
2771 struct action_record **slot, *new, tmp;
2772
2773 tmp.filter = filter;
2774 tmp.next = next;
2775 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 2776
52a11cbf 2777 if ((new = *slot) == NULL)
77d33a84 2778 {
703ad42b 2779 new = xmalloc (sizeof (*new));
52a11cbf
RH
2780 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2781 new->filter = filter;
2782 new->next = next;
2783 *slot = new;
2784
2785 /* The filter value goes in untouched. The link to the next
2786 record is a "self-relative" byte offset, or zero to indicate
2787 that there is no next record. So convert the absolute 1 based
eaec9b3d 2788 indices we've been carrying around into a displacement. */
52a11cbf
RH
2789
2790 push_sleb128 (&cfun->eh->action_record_data, filter);
2791 if (next)
2792 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
2793 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 2794 }
77d33a84 2795
52a11cbf
RH
2796 return new->offset;
2797}
77d33a84 2798
52a11cbf 2799static int
502b8322 2800collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
77d33a84 2801{
52a11cbf
RH
2802 struct eh_region *c;
2803 int next;
77d33a84 2804
52a11cbf
RH
2805 /* If we've reached the top of the region chain, then we have
2806 no actions, and require no landing pad. */
2807 if (region == NULL)
2808 return -1;
2809
2810 switch (region->type)
77d33a84 2811 {
52a11cbf
RH
2812 case ERT_CLEANUP:
2813 /* A cleanup adds a zero filter to the beginning of the chain, but
2814 there are special cases to look out for. If there are *only*
2815 cleanups along a path, then it compresses to a zero action.
2816 Further, if there are multiple cleanups along a path, we only
2817 need to represent one of them, as that is enough to trigger
2818 entry to the landing pad at runtime. */
2819 next = collect_one_action_chain (ar_hash, region->outer);
2820 if (next <= 0)
2821 return 0;
2822 for (c = region->outer; c ; c = c->outer)
2823 if (c->type == ERT_CLEANUP)
2824 return next;
2825 return add_action_record (ar_hash, 0, next);
2826
2827 case ERT_TRY:
2828 /* Process the associated catch regions in reverse order.
2829 If there's a catch-all handler, then we don't need to
2830 search outer regions. Use a magic -3 value to record
a1f300c0 2831 that we haven't done the outer search. */
52a11cbf
RH
2832 next = -3;
2833 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
2834 {
6d41a92f
OH
2835 if (c->u.catch.type_list == NULL)
2836 {
2837 /* Retrieve the filter from the head of the filter list
2838 where we have stored it (see assign_filter_values). */
a944ceb9
RH
2839 int filter
2840 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
2841
2842 next = add_action_record (ar_hash, filter, 0);
6d41a92f 2843 }
52a11cbf
RH
2844 else
2845 {
6d41a92f
OH
2846 /* Once the outer search is done, trigger an action record for
2847 each filter we have. */
2848 tree flt_node;
2849
52a11cbf
RH
2850 if (next == -3)
2851 {
2852 next = collect_one_action_chain (ar_hash, region->outer);
949f197f
RH
2853
2854 /* If there is no next action, terminate the chain. */
2855 if (next == -1)
52a11cbf 2856 next = 0;
949f197f
RH
2857 /* If all outer actions are cleanups or must_not_throw,
2858 we'll have no action record for it, since we had wanted
2859 to encode these states in the call-site record directly.
2860 Add a cleanup action to the chain to catch these. */
2861 else if (next <= 0)
2862 next = add_action_record (ar_hash, 0, 0);
52a11cbf 2863 }
3f2c5d1a 2864
6d41a92f
OH
2865 flt_node = c->u.catch.filter_list;
2866 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2867 {
2868 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2869 next = add_action_record (ar_hash, filter, next);
2870 }
52a11cbf
RH
2871 }
2872 }
2873 return next;
2874
2875 case ERT_ALLOWED_EXCEPTIONS:
2876 /* An exception specification adds its filter to the
2877 beginning of the chain. */
2878 next = collect_one_action_chain (ar_hash, region->outer);
0977ab3a
RH
2879
2880 /* If there is no next action, terminate the chain. */
2881 if (next == -1)
2882 next = 0;
2883 /* If all outer actions are cleanups or must_not_throw,
2884 we'll have no action record for it, since we had wanted
2885 to encode these states in the call-site record directly.
2886 Add a cleanup action to the chain to catch these. */
2887 else if (next <= 0)
2888 next = add_action_record (ar_hash, 0, 0);
2889
2890 return add_action_record (ar_hash, region->u.allowed.filter, next);
52a11cbf
RH
2891
2892 case ERT_MUST_NOT_THROW:
2893 /* A must-not-throw region with no inner handlers or cleanups
2894 requires no call-site entry. Note that this differs from
2895 the no handler or cleanup case in that we do require an lsda
2896 to be generated. Return a magic -2 value to record this. */
2897 return -2;
2898
2899 case ERT_CATCH:
2900 case ERT_THROW:
2901 /* CATCH regions are handled in TRY above. THROW regions are
2902 for optimization information only and produce no output. */
2903 return collect_one_action_chain (ar_hash, region->outer);
2904
2905 default:
5b0264cb 2906 gcc_unreachable ();
77d33a84
AM
2907 }
2908}
2909
52a11cbf 2910static int
502b8322 2911add_call_site (rtx landing_pad, int action)
77d33a84 2912{
52a11cbf
RH
2913 struct call_site_record *data = cfun->eh->call_site_data;
2914 int used = cfun->eh->call_site_data_used;
2915 int size = cfun->eh->call_site_data_size;
77d33a84 2916
52a11cbf
RH
2917 if (used >= size)
2918 {
2919 size = (size ? size * 2 : 64);
703ad42b 2920 data = ggc_realloc (data, sizeof (*data) * size);
52a11cbf
RH
2921 cfun->eh->call_site_data = data;
2922 cfun->eh->call_site_data_size = size;
2923 }
77d33a84 2924
52a11cbf
RH
2925 data[used].landing_pad = landing_pad;
2926 data[used].action = action;
77d33a84 2927
52a11cbf 2928 cfun->eh->call_site_data_used = used + 1;
77d33a84 2929
52a11cbf 2930 return used + call_site_base;
77d33a84
AM
2931}
2932
52a11cbf
RH
2933/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2934 The new note numbers will not refer to region numbers, but
2935 instead to call site entries. */
77d33a84 2936
52a11cbf 2937void
502b8322 2938convert_to_eh_region_ranges (void)
77d33a84 2939{
52a11cbf
RH
2940 rtx insn, iter, note;
2941 htab_t ar_hash;
2942 int last_action = -3;
2943 rtx last_action_insn = NULL_RTX;
2944 rtx last_landing_pad = NULL_RTX;
2945 rtx first_no_action_insn = NULL_RTX;
ae0ed63a 2946 int call_site = 0;
77d33a84 2947
52a11cbf
RH
2948 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
2949 return;
77d33a84 2950
52a11cbf 2951 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 2952
52a11cbf 2953 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 2954
52a11cbf
RH
2955 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2956 if (INSN_P (iter))
2957 {
2958 struct eh_region *region;
2959 int this_action;
2960 rtx this_landing_pad;
77d33a84 2961
52a11cbf 2962 insn = iter;
4b4bf941 2963 if (NONJUMP_INSN_P (insn)
52a11cbf
RH
2964 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2965 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 2966
52a11cbf
RH
2967 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2968 if (!note)
2969 {
4b4bf941 2970 if (! (CALL_P (insn)
52a11cbf
RH
2971 || (flag_non_call_exceptions
2972 && may_trap_p (PATTERN (insn)))))
2973 continue;
2974 this_action = -1;
2975 region = NULL;
2976 }
2977 else
2978 {
2979 if (INTVAL (XEXP (note, 0)) <= 0)
2980 continue;
2981 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2982 this_action = collect_one_action_chain (ar_hash, region);
2983 }
2984
2985 /* Existence of catch handlers, or must-not-throw regions
2986 implies that an lsda is needed (even if empty). */
2987 if (this_action != -1)
2988 cfun->uses_eh_lsda = 1;
2989
2990 /* Delay creation of region notes for no-action regions
2991 until we're sure that an lsda will be required. */
2992 else if (last_action == -3)
2993 {
2994 first_no_action_insn = iter;
2995 last_action = -1;
2996 }
1ef1bf06 2997
52a11cbf
RH
2998 /* Cleanups and handlers may share action chains but not
2999 landing pads. Collect the landing pad for this region. */
3000 if (this_action >= 0)
3001 {
3002 struct eh_region *o;
3003 for (o = region; ! o->landing_pad ; o = o->outer)
3004 continue;
3005 this_landing_pad = o->landing_pad;
3006 }
3007 else
3008 this_landing_pad = NULL_RTX;
1ef1bf06 3009
52a11cbf
RH
3010 /* Differing actions or landing pads implies a change in call-site
3011 info, which implies some EH_REGION note should be emitted. */
3012 if (last_action != this_action
3013 || last_landing_pad != this_landing_pad)
3014 {
3015 /* If we'd not seen a previous action (-3) or the previous
3016 action was must-not-throw (-2), then we do not need an
3017 end note. */
3018 if (last_action >= -1)
3019 {
3020 /* If we delayed the creation of the begin, do it now. */
3021 if (first_no_action_insn)
3022 {
3023 call_site = add_call_site (NULL_RTX, 0);
3024 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3025 first_no_action_insn);
3026 NOTE_EH_HANDLER (note) = call_site;
3027 first_no_action_insn = NULL_RTX;
3028 }
3029
3030 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3031 last_action_insn);
3032 NOTE_EH_HANDLER (note) = call_site;
3033 }
3034
3035 /* If the new action is must-not-throw, then no region notes
3036 are created. */
3037 if (this_action >= -1)
3038 {
3f2c5d1a 3039 call_site = add_call_site (this_landing_pad,
52a11cbf
RH
3040 this_action < 0 ? 0 : this_action);
3041 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3042 NOTE_EH_HANDLER (note) = call_site;
3043 }
3044
3045 last_action = this_action;
3046 last_landing_pad = this_landing_pad;
3047 }
3048 last_action_insn = iter;
3049 }
1ef1bf06 3050
52a11cbf 3051 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3052 {
52a11cbf
RH
3053 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3054 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3055 }
3056
52a11cbf
RH
3057 htab_delete (ar_hash);
3058}
1ef1bf06 3059
52a11cbf
RH
3060\f
3061static void
502b8322 3062push_uleb128 (varray_type *data_area, unsigned int value)
52a11cbf
RH
3063{
3064 do
3065 {
3066 unsigned char byte = value & 0x7f;
3067 value >>= 7;
3068 if (value)
3069 byte |= 0x80;
3070 VARRAY_PUSH_UCHAR (*data_area, byte);
3071 }
3072 while (value);
3073}
1ef1bf06 3074
52a11cbf 3075static void
502b8322 3076push_sleb128 (varray_type *data_area, int value)
52a11cbf
RH
3077{
3078 unsigned char byte;
3079 int more;
1ef1bf06 3080
52a11cbf 3081 do
1ef1bf06 3082 {
52a11cbf
RH
3083 byte = value & 0x7f;
3084 value >>= 7;
3085 more = ! ((value == 0 && (byte & 0x40) == 0)
3086 || (value == -1 && (byte & 0x40) != 0));
3087 if (more)
3088 byte |= 0x80;
3089 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3090 }
52a11cbf
RH
3091 while (more);
3092}
1ef1bf06 3093
52a11cbf 3094\f
52a11cbf
RH
3095#ifndef HAVE_AS_LEB128
3096static int
502b8322 3097dw2_size_of_call_site_table (void)
1ef1bf06 3098{
52a11cbf
RH
3099 int n = cfun->eh->call_site_data_used;
3100 int size = n * (4 + 4 + 4);
3101 int i;
1ef1bf06 3102
52a11cbf
RH
3103 for (i = 0; i < n; ++i)
3104 {
3105 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3106 size += size_of_uleb128 (cs->action);
3107 }
fac62ecf 3108
52a11cbf
RH
3109 return size;
3110}
3111
3112static int
502b8322 3113sjlj_size_of_call_site_table (void)
52a11cbf
RH
3114{
3115 int n = cfun->eh->call_site_data_used;
3116 int size = 0;
3117 int i;
77d33a84 3118
52a11cbf 3119 for (i = 0; i < n; ++i)
1ef1bf06 3120 {
52a11cbf
RH
3121 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3122 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3123 size += size_of_uleb128 (cs->action);
1ef1bf06 3124 }
52a11cbf
RH
3125
3126 return size;
3127}
3128#endif
3129
3130static void
502b8322 3131dw2_output_call_site_table (void)
52a11cbf 3132{
52a11cbf
RH
3133 int n = cfun->eh->call_site_data_used;
3134 int i;
3135
3136 for (i = 0; i < n; ++i)
1ef1bf06 3137 {
52a11cbf
RH
3138 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3139 char reg_start_lab[32];
3140 char reg_end_lab[32];
3141 char landing_pad_lab[32];
3142
3143 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3144 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3145
3146 if (cs->landing_pad)
3147 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3148 CODE_LABEL_NUMBER (cs->landing_pad));
3149
3150 /* ??? Perhaps use insn length scaling if the assembler supports
3151 generic arithmetic. */
3152 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3153 data4 if the function is small enough. */
3154#ifdef HAVE_AS_LEB128
375d2edc
GK
3155 dw2_asm_output_delta_uleb128 (reg_start_lab,
3156 current_function_func_begin_label,
52a11cbf
RH
3157 "region %d start", i);
3158 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3159 "length");
3160 if (cs->landing_pad)
375d2edc
GK
3161 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3162 current_function_func_begin_label,
52a11cbf
RH
3163 "landing pad");
3164 else
3165 dw2_asm_output_data_uleb128 (0, "landing pad");
3166#else
375d2edc
GK
3167 dw2_asm_output_delta (4, reg_start_lab,
3168 current_function_func_begin_label,
52a11cbf
RH
3169 "region %d start", i);
3170 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3171 if (cs->landing_pad)
375d2edc
GK
3172 dw2_asm_output_delta (4, landing_pad_lab,
3173 current_function_func_begin_label,
52a11cbf
RH
3174 "landing pad");
3175 else
3176 dw2_asm_output_data (4, 0, "landing pad");
3177#endif
3178 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3179 }
3180
52a11cbf
RH
3181 call_site_base += n;
3182}
3183
3184static void
502b8322 3185sjlj_output_call_site_table (void)
52a11cbf
RH
3186{
3187 int n = cfun->eh->call_site_data_used;
3188 int i;
1ef1bf06 3189
52a11cbf 3190 for (i = 0; i < n; ++i)
1ef1bf06 3191 {
52a11cbf 3192 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3193
52a11cbf
RH
3194 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3195 "region %d landing pad", i);
3196 dw2_asm_output_data_uleb128 (cs->action, "action");
3197 }
4da896b2 3198
52a11cbf 3199 call_site_base += n;
1ef1bf06
AM
3200}
3201
96d0f4dc
JJ
3202/* Tell assembler to switch to the section for the exception handling
3203 table. */
3204
3205void
502b8322 3206default_exception_section (void)
96d0f4dc
JJ
3207{
3208 if (targetm.have_named_sections)
3209 {
96d0f4dc 3210 int flags;
96d0f4dc 3211#ifdef HAVE_LD_RO_RW_SECTION_MIXING
fe3f9515
KG
3212 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3213
96d0f4dc
JJ
3214 flags = (! flag_pic
3215 || ((tt_format & 0x70) != DW_EH_PE_absptr
3216 && (tt_format & 0x70) != DW_EH_PE_aligned))
3217 ? 0 : SECTION_WRITE;
3218#else
3219 flags = SECTION_WRITE;
3220#endif
3221 named_section_flags (".gcc_except_table", flags);
3222 }
3223 else if (flag_pic)
3224 data_section ();
3225 else
3226 readonly_data_section ();
3227}
3228
52a11cbf 3229void
502b8322 3230output_function_exception_table (void)
52a11cbf 3231{
2a1ee410 3232 int tt_format, cs_format, lp_format, i, n;
52a11cbf
RH
3233#ifdef HAVE_AS_LEB128
3234 char ttype_label[32];
3235 char cs_after_size_label[32];
3236 char cs_end_label[32];
3237#else
3238 int call_site_len;
3239#endif
3240 int have_tt_data;
ae0ed63a 3241 int tt_format_size = 0;
1ef1bf06 3242
52a11cbf
RH
3243 /* Not all functions need anything. */
3244 if (! cfun->uses_eh_lsda)
3245 return;
fac62ecf 3246
951120ea
PB
3247#ifdef TARGET_UNWIND_INFO
3248 /* TODO: Move this into target file. */
2a1ee410
RH
3249 fputs ("\t.personality\t", asm_out_file);
3250 output_addr_const (asm_out_file, eh_personality_libfunc);
3251 fputs ("\n\t.handlerdata\n", asm_out_file);
3252 /* Note that varasm still thinks we're in the function's code section.
3253 The ".endp" directive that will immediately follow will take us back. */
3254#else
5fd9b178 3255 targetm.asm_out.exception_section ();
2a1ee410 3256#endif
52a11cbf
RH
3257
3258 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3259 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3260
b627d6fe
RH
3261 /* Indicate the format of the @TType entries. */
3262 if (! have_tt_data)
3263 tt_format = DW_EH_PE_omit;
3264 else
3265 {
3266 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3267#ifdef HAVE_AS_LEB128
df696a75
RH
3268 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3269 current_function_funcdef_no);
b627d6fe
RH
3270#endif
3271 tt_format_size = size_of_encoded_value (tt_format);
3272
7a900ebc 3273 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3274 }
52a11cbf 3275
5fd9b178 3276 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
df696a75 3277 current_function_funcdef_no);
52a11cbf
RH
3278
3279 /* The LSDA header. */
3280
3281 /* Indicate the format of the landing pad start pointer. An omitted
3282 field implies @LPStart == @Start. */
3283 /* Currently we always put @LPStart == @Start. This field would
3284 be most useful in moving the landing pads completely out of
3285 line to another section, but it could also be used to minimize
3286 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3287 lp_format = DW_EH_PE_omit;
3288 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3289 eh_data_format_name (lp_format));
52a11cbf
RH
3290
3291 /* @LPStart pointer would go here. */
3292
2a1ee410
RH
3293 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3294 eh_data_format_name (tt_format));
52a11cbf
RH
3295
3296#ifndef HAVE_AS_LEB128
3297 if (USING_SJLJ_EXCEPTIONS)
3298 call_site_len = sjlj_size_of_call_site_table ();
3299 else
3300 call_site_len = dw2_size_of_call_site_table ();
3301#endif
3302
3303 /* A pc-relative 4-byte displacement to the @TType data. */
3304 if (have_tt_data)
3305 {
3306#ifdef HAVE_AS_LEB128
3307 char ttype_after_disp_label[32];
3f2c5d1a 3308 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
df696a75 3309 current_function_funcdef_no);
52a11cbf
RH
3310 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3311 "@TType base offset");
3312 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3313#else
3314 /* Ug. Alignment queers things. */
b627d6fe 3315 unsigned int before_disp, after_disp, last_disp, disp;
52a11cbf 3316
52a11cbf
RH
3317 before_disp = 1 + 1;
3318 after_disp = (1 + size_of_uleb128 (call_site_len)
3319 + call_site_len
3320 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
b627d6fe
RH
3321 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3322 * tt_format_size));
52a11cbf
RH
3323
3324 disp = after_disp;
3325 do
1ef1bf06 3326 {
52a11cbf
RH
3327 unsigned int disp_size, pad;
3328
3329 last_disp = disp;
3330 disp_size = size_of_uleb128 (disp);
3331 pad = before_disp + disp_size + after_disp;
b627d6fe
RH
3332 if (pad % tt_format_size)
3333 pad = tt_format_size - (pad % tt_format_size);
52a11cbf
RH
3334 else
3335 pad = 0;
3336 disp = after_disp + pad;
1ef1bf06 3337 }
52a11cbf
RH
3338 while (disp != last_disp);
3339
3340 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3341#endif
1ef1bf06 3342 }
1ef1bf06 3343
52a11cbf
RH
3344 /* Indicate the format of the call-site offsets. */
3345#ifdef HAVE_AS_LEB128
2a1ee410 3346 cs_format = DW_EH_PE_uleb128;
52a11cbf 3347#else
2a1ee410 3348 cs_format = DW_EH_PE_udata4;
52a11cbf 3349#endif
2a1ee410
RH
3350 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3351 eh_data_format_name (cs_format));
52a11cbf
RH
3352
3353#ifdef HAVE_AS_LEB128
3354 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
df696a75 3355 current_function_funcdef_no);
52a11cbf 3356 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
df696a75 3357 current_function_funcdef_no);
52a11cbf
RH
3358 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3359 "Call-site table length");
3360 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3361 if (USING_SJLJ_EXCEPTIONS)
3362 sjlj_output_call_site_table ();
3363 else
3364 dw2_output_call_site_table ();
3365 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3366#else
3367 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3368 if (USING_SJLJ_EXCEPTIONS)
3369 sjlj_output_call_site_table ();
3370 else
3371 dw2_output_call_site_table ();
3372#endif
3373
3374 /* ??? Decode and interpret the data for flag_debug_asm. */
3375 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3376 for (i = 0; i < n; ++i)
3377 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3378 (i ? NULL : "Action record table"));
1ef1bf06 3379
52a11cbf 3380 if (have_tt_data)
7a900ebc 3381 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3382
52a11cbf
RH
3383 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3384 while (i-- > 0)
1ef1bf06 3385 {
52a11cbf 3386 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
225b9cb9 3387 rtx value;
52a11cbf
RH
3388
3389 if (type == NULL_TREE)
dd07abd7 3390 value = const0_rtx;
52a11cbf 3391 else
dd07abd7
RH
3392 {
3393 struct cgraph_varpool_node *node;
3394
3395 type = lookup_type_for_runtime (type);
3396 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3397
3398 /* Let cgraph know that the rtti decl is used. Not all of the
3399 paths below go through assemble_integer, which would take
3400 care of this for us. */
6de9cd9a 3401 STRIP_NOPS (type);
19ae5445
RK
3402 if (TREE_CODE (type) == ADDR_EXPR)
3403 {
dba601db 3404 type = TREE_OPERAND (type, 0);
093c7153
RH
3405 if (TREE_CODE (type) == VAR_DECL)
3406 {
3407 node = cgraph_varpool_node (type);
3408 if (node)
3409 cgraph_varpool_mark_needed_node (node);
3410 }
19ae5445 3411 }
5b0264cb
NS
3412 else
3413 gcc_assert (TREE_CODE (type) == INTEGER_CST);
dd07abd7 3414 }
52a11cbf 3415
225b9cb9
RH
3416 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3417 assemble_integer (value, tt_format_size,
3418 tt_format_size * BITS_PER_UNIT, 1);
3419 else
0fb7aeda 3420 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
1ef1bf06 3421 }
52a11cbf
RH
3422
3423#ifdef HAVE_AS_LEB128
3424 if (have_tt_data)
3425 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3426#endif
3427
3428 /* ??? Decode and interpret the data for flag_debug_asm. */
3429 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3430 for (i = 0; i < n; ++i)
3431 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3432 (i ? NULL : "Exception specification table"));
3433
3434 function_section (current_function_decl);
1ef1bf06 3435}
e2500fed
GK
3436
3437#include "gt-except.h"