]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
except.c: Convert prototypes to ISO C90.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
3f2c5d1a 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
502b8322 3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
1322177d 6This file is part of GCC.
4956d07c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
4956d07c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
4956d07c
MS
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
4956d07c
MS
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4977bab6
ZW
52#include "coretypes.h"
53#include "tm.h"
4956d07c
MS
54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
4956d07c 57#include "function.h"
4956d07c 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
4956d07c 60#include "insn-config.h"
52a11cbf
RH
61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
4956d07c 65#include "output.h"
52a11cbf
RH
66#include "dwarf2asm.h"
67#include "dwarf2out.h"
2a1ee410 68#include "dwarf2.h"
10f0ad3d 69#include "toplev.h"
52a11cbf 70#include "hashtab.h"
2b12ffe0 71#include "intl.h"
87ff9c8e 72#include "ggc.h"
b1474bb7 73#include "tm_p.h"
07c9d2eb 74#include "target.h"
f1e639b1 75#include "langhooks.h"
52a11cbf
RH
76
77/* Provide defaults for stuff that may not be defined when using
78 sjlj exceptions. */
52a11cbf
RH
79#ifndef EH_RETURN_DATA_REGNO
80#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
81#endif
82
27a36778 83
52a11cbf
RH
84/* Nonzero means enable synchronous exceptions for non-call instructions. */
85int flag_non_call_exceptions;
27a36778 86
52a11cbf
RH
87/* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
502b8322 89tree (*lang_protect_cleanup_actions) (void);
27a36778 90
52a11cbf 91/* Return true if type A catches type B. */
502b8322 92int (*lang_eh_type_covers) (tree a, tree b);
27a36778 93
52a11cbf 94/* Map a type to a runtime object to match type. */
502b8322 95tree (*lang_eh_runtime_type) (tree);
4956d07c 96
6a58eee9
RH
97/* A hash table of label to region number. */
98
e2500fed 99struct ehl_map_entry GTY(())
6a58eee9
RH
100{
101 rtx label;
102 struct eh_region *region;
103};
104
21c157b4 105static GTY(()) int call_site_base;
e2500fed
GK
106static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
52a11cbf
RH
108
109/* Describe the SjLj_Function_Context structure. */
e2500fed 110static GTY(()) tree sjlj_fc_type_node;
52a11cbf
RH
111static int sjlj_fc_call_site_ofs;
112static int sjlj_fc_data_ofs;
113static int sjlj_fc_personality_ofs;
114static int sjlj_fc_lsda_ofs;
115static int sjlj_fc_jbuf_ofs;
116\f
117/* Describes one exception region. */
e2500fed 118struct eh_region GTY(())
52a11cbf
RH
119{
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
956d6950 122
52a11cbf
RH
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
956d6950 126
52a11cbf
RH
127 /* An identifier for this region. */
128 int region_number;
71038426 129
6a58eee9
RH
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
132 bitmap aka;
133
52a11cbf
RH
134 /* Each region does exactly one thing. */
135 enum eh_region_type
136 {
572202a7
RK
137 ERT_UNKNOWN = 0,
138 ERT_CLEANUP,
52a11cbf
RH
139 ERT_TRY,
140 ERT_CATCH,
141 ERT_ALLOWED_EXCEPTIONS,
142 ERT_MUST_NOT_THROW,
143 ERT_THROW,
144 ERT_FIXUP
145 } type;
146
eaec9b3d 147 /* Holds the action to perform based on the preceding type. */
e2500fed 148 union eh_region_u {
52a11cbf
RH
149 /* A list of catch blocks, a surrounding try block,
150 and the label for continuing after a catch. */
e2500fed 151 struct eh_region_u_try {
52a11cbf
RH
152 struct eh_region *catch;
153 struct eh_region *last_catch;
154 struct eh_region *prev_try;
155 rtx continue_label;
e2500fed 156 } GTY ((tag ("ERT_TRY"))) try;
52a11cbf 157
6d41a92f
OH
158 /* The list through the catch handlers, the list of type objects
159 matched, and the list of associated filters. */
e2500fed 160 struct eh_region_u_catch {
52a11cbf
RH
161 struct eh_region *next_catch;
162 struct eh_region *prev_catch;
6d41a92f
OH
163 tree type_list;
164 tree filter_list;
e2500fed 165 } GTY ((tag ("ERT_CATCH"))) catch;
52a11cbf
RH
166
167 /* A tree_list of allowed types. */
e2500fed 168 struct eh_region_u_allowed {
52a11cbf
RH
169 tree type_list;
170 int filter;
e2500fed 171 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
52a11cbf 172
3f2c5d1a 173 /* The type given by a call to "throw foo();", or discovered
52a11cbf 174 for a throw. */
e2500fed 175 struct eh_region_u_throw {
52a11cbf 176 tree type;
e2500fed 177 } GTY ((tag ("ERT_THROW"))) throw;
52a11cbf
RH
178
179 /* Retain the cleanup expression even after expansion so that
180 we can match up fixup regions. */
e2500fed 181 struct eh_region_u_cleanup {
52a11cbf 182 tree exp;
bafb714b 183 struct eh_region *prev_try;
e2500fed 184 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
52a11cbf
RH
185
186 /* The real region (by expression and by pointer) that fixup code
187 should live in. */
e2500fed 188 struct eh_region_u_fixup {
52a11cbf
RH
189 tree cleanup_exp;
190 struct eh_region *real_region;
e2500fed
GK
191 } GTY ((tag ("ERT_FIXUP"))) fixup;
192 } GTY ((desc ("%0.type"))) u;
52a11cbf 193
47c84870
JM
194 /* Entry point for this region's handler before landing pads are built. */
195 rtx label;
52a11cbf 196
47c84870 197 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
198 rtx landing_pad;
199
47c84870 200 /* Entry point for this region's handler from an inner region. */
52a11cbf 201 rtx post_landing_pad;
47c84870
JM
202
203 /* The RESX insn for handing off control to the next outermost handler,
204 if appropriate. */
205 rtx resume;
b2dd096b
MM
206
207 /* True if something in this region may throw. */
208 unsigned may_contain_throw : 1;
52a11cbf 209};
71038426 210
e2500fed
GK
211struct call_site_record GTY(())
212{
213 rtx landing_pad;
214 int action;
215};
216
52a11cbf 217/* Used to save exception status for each function. */
e2500fed 218struct eh_status GTY(())
52a11cbf
RH
219{
220 /* The tree of all regions for this function. */
221 struct eh_region *region_tree;
e6cfb550 222
52a11cbf 223 /* The same information as an indexable array. */
e2500fed 224 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
e6cfb550 225
52a11cbf
RH
226 /* The most recently open region. */
227 struct eh_region *cur_region;
e6cfb550 228
52a11cbf
RH
229 /* This is the region for which we are processing catch blocks. */
230 struct eh_region *try_region;
71038426 231
52a11cbf
RH
232 rtx filter;
233 rtx exc_ptr;
4956d07c 234
52a11cbf
RH
235 int built_landing_pads;
236 int last_region_number;
e6cfb550 237
52a11cbf
RH
238 varray_type ttype_data;
239 varray_type ehspec_data;
240 varray_type action_record_data;
6814a8a0 241
e2500fed
GK
242 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
243
502b8322 244 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
e2500fed 245 call_site_data;
52a11cbf
RH
246 int call_site_data_used;
247 int call_site_data_size;
248
249 rtx ehr_stackadj;
250 rtx ehr_handler;
251 rtx ehr_label;
252
253 rtx sjlj_fc;
254 rtx sjlj_exit_after;
255};
e6cfb550 256
52a11cbf 257\f
502b8322
AJ
258static int t2r_eq (const void *, const void *);
259static hashval_t t2r_hash (const void *);
260static void add_type_for_runtime (tree);
261static tree lookup_type_for_runtime (tree);
262
263static struct eh_region *expand_eh_region_end (void);
264
265static rtx get_exception_filter (struct function *);
266
267static void collect_eh_region_array (void);
268static void resolve_fixup_regions (void);
269static void remove_fixup_regions (void);
270static void remove_unreachable_regions (rtx);
271static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
272
273static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
274 struct inline_remap *);
275static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
276static int ttypes_filter_eq (const void *, const void *);
277static hashval_t ttypes_filter_hash (const void *);
278static int ehspec_filter_eq (const void *, const void *);
279static hashval_t ehspec_filter_hash (const void *);
280static int add_ttypes_entry (htab_t, tree);
281static int add_ehspec_entry (htab_t, htab_t, tree);
282static void assign_filter_values (void);
283static void build_post_landing_pads (void);
284static void connect_post_landing_pads (void);
285static void dw2_build_landing_pads (void);
52a11cbf
RH
286
287struct sjlj_lp_info;
502b8322
AJ
288static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
289static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
290static void sjlj_mark_call_sites (struct sjlj_lp_info *);
291static void sjlj_emit_function_enter (rtx);
292static void sjlj_emit_function_exit (void);
293static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
294static void sjlj_build_landing_pads (void);
295
296static hashval_t ehl_hash (const void *);
297static int ehl_eq (const void *, const void *);
298static void add_ehl_entry (rtx, struct eh_region *);
299static void remove_exception_handler_label (rtx);
300static void remove_eh_handler (struct eh_region *);
301static int for_each_eh_label_1 (void **, void *);
52a11cbf
RH
302
303struct reachable_info;
304
305/* The return value of reachable_next_level. */
306enum reachable_code
307{
308 /* The given exception is not processed by the given region. */
309 RNL_NOT_CAUGHT,
310 /* The given exception may need processing by the given region. */
311 RNL_MAYBE_CAUGHT,
312 /* The given exception is completely processed by the given region. */
313 RNL_CAUGHT,
314 /* The given exception is completely processed by the runtime. */
315 RNL_BLOCKED
316};
e6cfb550 317
502b8322
AJ
318static int check_handled (tree, tree);
319static void add_reachable_handler (struct reachable_info *,
320 struct eh_region *, struct eh_region *);
321static enum reachable_code reachable_next_level (struct eh_region *, tree,
322 struct reachable_info *);
323
324static int action_record_eq (const void *, const void *);
325static hashval_t action_record_hash (const void *);
326static int add_action_record (htab_t, int, int);
327static int collect_one_action_chain (htab_t, struct eh_region *);
328static int add_call_site (rtx, int);
329
330static void push_uleb128 (varray_type *, unsigned int);
331static void push_sleb128 (varray_type *, int);
52a11cbf 332#ifndef HAVE_AS_LEB128
502b8322
AJ
333static int dw2_size_of_call_site_table (void);
334static int sjlj_size_of_call_site_table (void);
52a11cbf 335#endif
502b8322
AJ
336static void dw2_output_call_site_table (void);
337static void sjlj_output_call_site_table (void);
e6cfb550 338
52a11cbf
RH
339\f
340/* Routine to see if exception handling is turned on.
cc2902df 341 DO_WARN is nonzero if we want to inform the user that exception
3f2c5d1a 342 handling is turned off.
4956d07c 343
52a11cbf
RH
344 This is used to ensure that -fexceptions has been specified if the
345 compiler tries to use any exception-specific functions. */
4956d07c 346
52a11cbf 347int
502b8322 348doing_eh (int do_warn)
52a11cbf
RH
349{
350 if (! flag_exceptions)
351 {
352 static int warned = 0;
353 if (! warned && do_warn)
354 {
355 error ("exception handling disabled, use -fexceptions to enable");
356 warned = 1;
357 }
358 return 0;
359 }
360 return 1;
4956d07c
MS
361}
362
52a11cbf
RH
363\f
364void
502b8322 365init_eh (void)
4956d07c 366{
52a11cbf
RH
367 if (! flag_exceptions)
368 return;
4956d07c 369
e2500fed 370 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
4956d07c 371
52a11cbf
RH
372 /* Create the SjLj_Function_Context structure. This should match
373 the definition in unwind-sjlj.c. */
374 if (USING_SJLJ_EXCEPTIONS)
375 {
376 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 377
f1e639b1 378 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
9a0d1e1b 379
52a11cbf
RH
380 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
381 build_pointer_type (sjlj_fc_type_node));
382 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 383
52a11cbf
RH
384 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
385 integer_type_node);
386 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 387
52a11cbf 388 tmp = build_index_type (build_int_2 (4 - 1, 0));
b0c48229
NB
389 tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
390 tmp);
52a11cbf
RH
391 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
392 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 393
52a11cbf
RH
394 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
395 ptr_type_node);
396 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 397
52a11cbf
RH
398 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
399 ptr_type_node);
400 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 401
52a11cbf
RH
402#ifdef DONT_USE_BUILTIN_SETJMP
403#ifdef JMP_BUF_SIZE
404 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
405#else
406 /* Should be large enough for most systems, if it is not,
407 JMP_BUF_SIZE should be defined with the proper value. It will
408 also tend to be larger than necessary for most systems, a more
409 optimal port will define JMP_BUF_SIZE. */
410 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
411#endif
412#else
413 /* This is 2 for builtin_setjmp, plus whatever the target requires
414 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
415 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
416 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
417#endif
418 tmp = build_index_type (tmp);
419 tmp = build_array_type (ptr_type_node, tmp);
420 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
421#ifdef DONT_USE_BUILTIN_SETJMP
422 /* We don't know what the alignment requirements of the
423 runtime's jmp_buf has. Overestimate. */
424 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
425 DECL_USER_ALIGN (f_jbuf) = 1;
426#endif
427 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
428
429 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
430 TREE_CHAIN (f_prev) = f_cs;
431 TREE_CHAIN (f_cs) = f_data;
432 TREE_CHAIN (f_data) = f_per;
433 TREE_CHAIN (f_per) = f_lsda;
434 TREE_CHAIN (f_lsda) = f_jbuf;
435
436 layout_type (sjlj_fc_type_node);
437
438 /* Cache the interesting field offsets so that we have
439 easy access from rtl. */
440 sjlj_fc_call_site_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
443 sjlj_fc_data_ofs
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
446 sjlj_fc_personality_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
449 sjlj_fc_lsda_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
452 sjlj_fc_jbuf_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
455 }
4956d07c
MS
456}
457
52a11cbf 458void
502b8322 459init_eh_for_function (void)
4956d07c 460{
502b8322 461 cfun->eh = (struct eh_status *)
e2500fed 462 ggc_alloc_cleared (sizeof (struct eh_status));
6a58eee9 463}
52a11cbf
RH
464\f
465/* Start an exception handling region. All instructions emitted
466 after this point are considered to be part of the region until
467 expand_eh_region_end is invoked. */
9a0d1e1b 468
52a11cbf 469void
502b8322 470expand_eh_region_start (void)
9a0d1e1b 471{
52a11cbf
RH
472 struct eh_region *new_region;
473 struct eh_region *cur_region;
474 rtx note;
9a0d1e1b 475
52a11cbf
RH
476 if (! doing_eh (0))
477 return;
9a0d1e1b 478
52a11cbf 479 /* Insert a new blank region as a leaf in the tree. */
e2500fed 480 new_region = (struct eh_region *) ggc_alloc_cleared (sizeof (*new_region));
52a11cbf
RH
481 cur_region = cfun->eh->cur_region;
482 new_region->outer = cur_region;
483 if (cur_region)
9a0d1e1b 484 {
52a11cbf
RH
485 new_region->next_peer = cur_region->inner;
486 cur_region->inner = new_region;
487 }
e6cfb550 488 else
9a0d1e1b 489 {
52a11cbf
RH
490 new_region->next_peer = cfun->eh->region_tree;
491 cfun->eh->region_tree = new_region;
9a0d1e1b 492 }
52a11cbf
RH
493 cfun->eh->cur_region = new_region;
494
495 /* Create a note marking the start of this region. */
496 new_region->region_number = ++cfun->eh->last_region_number;
2e040219 497 note = emit_note (NOTE_INSN_EH_REGION_BEG);
52a11cbf 498 NOTE_EH_HANDLER (note) = new_region->region_number;
9a0d1e1b
AM
499}
500
52a11cbf 501/* Common code to end a region. Returns the region just ended. */
9f8e6243 502
52a11cbf 503static struct eh_region *
502b8322 504expand_eh_region_end (void)
9f8e6243 505{
52a11cbf
RH
506 struct eh_region *cur_region = cfun->eh->cur_region;
507 rtx note;
508
a1f300c0 509 /* Create a note marking the end of this region. */
2e040219 510 note = emit_note (NOTE_INSN_EH_REGION_END);
52a11cbf
RH
511 NOTE_EH_HANDLER (note) = cur_region->region_number;
512
513 /* Pop. */
514 cfun->eh->cur_region = cur_region->outer;
515
52a11cbf 516 return cur_region;
9f8e6243
AM
517}
518
52a11cbf
RH
519/* End an exception handling region for a cleanup. HANDLER is an
520 expression to expand for the cleanup. */
9c606f69 521
52a11cbf 522void
502b8322 523expand_eh_region_end_cleanup (tree handler)
9c606f69 524{
52a11cbf 525 struct eh_region *region;
e6855a2d 526 tree protect_cleanup_actions;
52a11cbf 527 rtx around_label;
47c84870 528 rtx data_save[2];
52a11cbf
RH
529
530 if (! doing_eh (0))
531 return;
9c606f69 532
52a11cbf
RH
533 region = expand_eh_region_end ();
534 region->type = ERT_CLEANUP;
535 region->label = gen_label_rtx ();
536 region->u.cleanup.exp = handler;
bafb714b 537 region->u.cleanup.prev_try = cfun->eh->try_region;
9c606f69 538
52a11cbf
RH
539 around_label = gen_label_rtx ();
540 emit_jump (around_label);
9c606f69 541
52a11cbf 542 emit_label (region->label);
9c606f69 543
a944ceb9 544 if (flag_non_call_exceptions || region->may_contain_throw)
b2dd096b
MM
545 {
546 /* Give the language a chance to specify an action to be taken if an
547 exception is thrown that would propagate out of the HANDLER. */
548 protect_cleanup_actions
549 = (lang_protect_cleanup_actions
550 ? (*lang_protect_cleanup_actions) ()
551 : NULL_TREE);
e6855a2d 552
b2dd096b
MM
553 if (protect_cleanup_actions)
554 expand_eh_region_start ();
9c606f69 555
b2dd096b
MM
556 /* In case this cleanup involves an inline destructor with a try block in
557 it, we need to save the EH return data registers around it. */
558 data_save[0] = gen_reg_rtx (ptr_mode);
559 emit_move_insn (data_save[0], get_exception_pointer (cfun));
560 data_save[1] = gen_reg_rtx (word_mode);
561 emit_move_insn (data_save[1], get_exception_filter (cfun));
47c84870 562
b2dd096b 563 expand_expr (handler, const0_rtx, VOIDmode, 0);
9c606f69 564
b2dd096b
MM
565 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
566 emit_move_insn (cfun->eh->filter, data_save[1]);
47c84870 567
b2dd096b
MM
568 if (protect_cleanup_actions)
569 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
a9f0664a 570
b2dd096b
MM
571 /* We need any stack adjustment complete before the around_label. */
572 do_pending_stack_adjust ();
573 }
c10f3adf 574
52a11cbf
RH
575 /* We delay the generation of the _Unwind_Resume until we generate
576 landing pads. We emit a marker here so as to get good control
577 flow data in the meantime. */
47c84870
JM
578 region->resume
579 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
52a11cbf
RH
580 emit_barrier ();
581
52a11cbf 582 emit_label (around_label);
9c606f69
AM
583}
584
52a11cbf
RH
585/* End an exception handling region for a try block, and prepares
586 for subsequent calls to expand_start_catch. */
9a0d1e1b 587
52a11cbf 588void
502b8322 589expand_start_all_catch (void)
9a0d1e1b 590{
52a11cbf 591 struct eh_region *region;
9a0d1e1b 592
52a11cbf
RH
593 if (! doing_eh (1))
594 return;
9a0d1e1b 595
52a11cbf
RH
596 region = expand_eh_region_end ();
597 region->type = ERT_TRY;
598 region->u.try.prev_try = cfun->eh->try_region;
599 region->u.try.continue_label = gen_label_rtx ();
9a0d1e1b 600
52a11cbf
RH
601 cfun->eh->try_region = region;
602
603 emit_jump (region->u.try.continue_label);
604}
9a0d1e1b 605
6d41a92f
OH
606/* Begin a catch clause. TYPE is the type caught, a list of such types, or
607 null if this is a catch-all clause. Providing a type list enables to
608 associate the catch region with potentially several exception types, which
23d1aac4 609 is useful e.g. for Ada. */
9a0d1e1b 610
52a11cbf 611void
502b8322 612expand_start_catch (tree type_or_list)
9a0d1e1b 613{
52a11cbf 614 struct eh_region *t, *c, *l;
6d41a92f 615 tree type_list;
52a11cbf
RH
616
617 if (! doing_eh (0))
618 return;
619
6d41a92f
OH
620 type_list = type_or_list;
621
622 if (type_or_list)
623 {
624 /* Ensure to always end up with a type list to normalize further
625 processing, then register each type against the runtime types
626 map. */
627 tree type_node;
628
629 if (TREE_CODE (type_or_list) != TREE_LIST)
0fb7aeda 630 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
6d41a92f
OH
631
632 type_node = type_list;
633 for (; type_node; type_node = TREE_CHAIN (type_node))
0fb7aeda 634 add_type_for_runtime (TREE_VALUE (type_node));
6d41a92f
OH
635 }
636
52a11cbf
RH
637 expand_eh_region_start ();
638
639 t = cfun->eh->try_region;
640 c = cfun->eh->cur_region;
641 c->type = ERT_CATCH;
6d41a92f 642 c->u.catch.type_list = type_list;
52a11cbf
RH
643 c->label = gen_label_rtx ();
644
645 l = t->u.try.last_catch;
646 c->u.catch.prev_catch = l;
647 if (l)
648 l->u.catch.next_catch = c;
649 else
650 t->u.try.catch = c;
651 t->u.try.last_catch = c;
9a0d1e1b 652
52a11cbf 653 emit_label (c->label);
9a0d1e1b
AM
654}
655
52a11cbf 656/* End a catch clause. Control will resume after the try/catch block. */
9a0d1e1b 657
52a11cbf 658void
502b8322 659expand_end_catch (void)
9a0d1e1b 660{
4977bab6 661 struct eh_region *try_region;
52a11cbf
RH
662
663 if (! doing_eh (0))
664 return;
665
4977bab6 666 expand_eh_region_end ();
52a11cbf
RH
667 try_region = cfun->eh->try_region;
668
669 emit_jump (try_region->u.try.continue_label);
9a0d1e1b
AM
670}
671
52a11cbf 672/* End a sequence of catch handlers for a try block. */
9a0d1e1b 673
52a11cbf 674void
502b8322 675expand_end_all_catch (void)
9a0d1e1b 676{
52a11cbf
RH
677 struct eh_region *try_region;
678
679 if (! doing_eh (0))
680 return;
681
682 try_region = cfun->eh->try_region;
683 cfun->eh->try_region = try_region->u.try.prev_try;
684
685 emit_label (try_region->u.try.continue_label);
9a0d1e1b
AM
686}
687
52a11cbf
RH
688/* End an exception region for an exception type filter. ALLOWED is a
689 TREE_LIST of types to be matched by the runtime. FAILURE is an
ff7cc307 690 expression to invoke if a mismatch occurs.
b4e49397
JM
691
692 ??? We could use these semantics for calls to rethrow, too; if we can
693 see the surrounding catch clause, we know that the exception we're
694 rethrowing satisfies the "filter" of the catch type. */
9a0d1e1b 695
52a11cbf 696void
502b8322 697expand_eh_region_end_allowed (tree allowed, tree failure)
9a0d1e1b 698{
52a11cbf
RH
699 struct eh_region *region;
700 rtx around_label;
9a0d1e1b 701
52a11cbf
RH
702 if (! doing_eh (0))
703 return;
e6cfb550 704
52a11cbf
RH
705 region = expand_eh_region_end ();
706 region->type = ERT_ALLOWED_EXCEPTIONS;
707 region->u.allowed.type_list = allowed;
708 region->label = gen_label_rtx ();
9a0d1e1b 709
52a11cbf
RH
710 for (; allowed ; allowed = TREE_CHAIN (allowed))
711 add_type_for_runtime (TREE_VALUE (allowed));
9a0d1e1b 712
52a11cbf
RH
713 /* We must emit the call to FAILURE here, so that if this function
714 throws a different exception, that it will be processed by the
715 correct region. */
9a0d1e1b 716
52a11cbf
RH
717 around_label = gen_label_rtx ();
718 emit_jump (around_label);
719
720 emit_label (region->label);
721 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
b912bca0
MM
722 /* We must adjust the stack before we reach the AROUND_LABEL because
723 the call to FAILURE does not occur on all paths to the
724 AROUND_LABEL. */
725 do_pending_stack_adjust ();
9a0d1e1b 726
52a11cbf 727 emit_label (around_label);
9a0d1e1b
AM
728}
729
52a11cbf
RH
730/* End an exception region for a must-not-throw filter. FAILURE is an
731 expression invoke if an uncaught exception propagates this far.
e6cfb550 732
52a11cbf
RH
733 This is conceptually identical to expand_eh_region_end_allowed with
734 an empty allowed list (if you passed "std::terminate" instead of
735 "__cxa_call_unexpected"), but they are represented differently in
736 the C++ LSDA. */
6814a8a0 737
52a11cbf 738void
502b8322 739expand_eh_region_end_must_not_throw (tree failure)
e6cfb550 740{
52a11cbf
RH
741 struct eh_region *region;
742 rtx around_label;
e6cfb550 743
52a11cbf
RH
744 if (! doing_eh (0))
745 return;
6814a8a0 746
52a11cbf
RH
747 region = expand_eh_region_end ();
748 region->type = ERT_MUST_NOT_THROW;
749 region->label = gen_label_rtx ();
e6cfb550 750
52a11cbf
RH
751 /* We must emit the call to FAILURE here, so that if this function
752 throws a different exception, that it will be processed by the
753 correct region. */
6814a8a0 754
52a11cbf
RH
755 around_label = gen_label_rtx ();
756 emit_jump (around_label);
6814a8a0 757
52a11cbf
RH
758 emit_label (region->label);
759 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
6814a8a0 760
52a11cbf 761 emit_label (around_label);
e6cfb550
AM
762}
763
52a11cbf
RH
764/* End an exception region for a throw. No handling goes on here,
765 but it's the easiest way for the front-end to indicate what type
766 is being thrown. */
6814a8a0 767
52a11cbf 768void
502b8322 769expand_eh_region_end_throw (tree type)
e6cfb550 770{
52a11cbf
RH
771 struct eh_region *region;
772
773 if (! doing_eh (0))
774 return;
775
776 region = expand_eh_region_end ();
777 region->type = ERT_THROW;
778 region->u.throw.type = type;
e6cfb550
AM
779}
780
52a11cbf
RH
781/* End a fixup region. Within this region the cleanups for the immediately
782 enclosing region are _not_ run. This is used for goto cleanup to avoid
783 destroying an object twice.
12670d88 784
52a11cbf
RH
785 This would be an extraordinarily simple prospect, were it not for the
786 fact that we don't actually know what the immediately enclosing region
787 is. This surprising fact is because expand_cleanups is currently
788 generating a sequence that it will insert somewhere else. We collect
789 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
4956d07c 790
52a11cbf 791void
502b8322 792expand_eh_region_end_fixup (tree handler)
4956d07c 793{
52a11cbf
RH
794 struct eh_region *fixup;
795
796 if (! doing_eh (0))
797 return;
798
799 fixup = expand_eh_region_end ();
800 fixup->type = ERT_FIXUP;
801 fixup->u.fixup.cleanup_exp = handler;
4956d07c
MS
802}
803
b2dd096b
MM
804/* Note that the current EH region (if any) may contain a throw, or a
805 call to a function which itself may contain a throw. */
806
807void
502b8322 808note_eh_region_may_contain_throw (void)
b2dd096b
MM
809{
810 struct eh_region *region;
811
812 region = cfun->eh->cur_region;
813 while (region && !region->may_contain_throw)
814 {
815 region->may_contain_throw = 1;
816 region = region->outer;
817 }
818}
819
47c84870 820/* Return an rtl expression for a pointer to the exception object
52a11cbf 821 within a handler. */
4956d07c
MS
822
823rtx
502b8322 824get_exception_pointer (struct function *fun)
4956d07c 825{
86c99549
RH
826 rtx exc_ptr = fun->eh->exc_ptr;
827 if (fun == cfun && ! exc_ptr)
52a11cbf 828 {
26b10ae0 829 exc_ptr = gen_reg_rtx (ptr_mode);
86c99549 830 fun->eh->exc_ptr = exc_ptr;
52a11cbf
RH
831 }
832 return exc_ptr;
833}
4956d07c 834
47c84870
JM
835/* Return an rtl expression for the exception dispatch filter
836 within a handler. */
837
838static rtx
502b8322 839get_exception_filter (struct function *fun)
47c84870 840{
86c99549
RH
841 rtx filter = fun->eh->filter;
842 if (fun == cfun && ! filter)
47c84870 843 {
041c9d5a 844 filter = gen_reg_rtx (word_mode);
86c99549 845 fun->eh->filter = filter;
47c84870
JM
846 }
847 return filter;
848}
52a11cbf
RH
849\f
850/* This section is for the exception handling specific optimization pass. */
154bba13 851
52a11cbf
RH
852/* Random access the exception region tree. It's just as simple to
853 collect the regions this way as in expand_eh_region_start, but
854 without having to realloc memory. */
154bba13 855
52a11cbf 856static void
502b8322 857collect_eh_region_array (void)
154bba13 858{
52a11cbf 859 struct eh_region **array, *i;
154bba13 860
52a11cbf
RH
861 i = cfun->eh->region_tree;
862 if (! i)
863 return;
154bba13 864
e2500fed
GK
865 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
866 * sizeof (*array));
52a11cbf 867 cfun->eh->region_array = array;
154bba13 868
52a11cbf
RH
869 while (1)
870 {
871 array[i->region_number] = i;
872
873 /* If there are sub-regions, process them. */
874 if (i->inner)
875 i = i->inner;
876 /* If there are peers, process them. */
877 else if (i->next_peer)
878 i = i->next_peer;
879 /* Otherwise, step back up the tree to the next peer. */
880 else
881 {
882 do {
883 i = i->outer;
884 if (i == NULL)
885 return;
886 } while (i->next_peer == NULL);
887 i = i->next_peer;
888 }
889 }
27a36778
MS
890}
891
52a11cbf 892static void
502b8322 893resolve_fixup_regions (void)
27a36778 894{
52a11cbf 895 int i, j, n = cfun->eh->last_region_number;
27a36778 896
52a11cbf
RH
897 for (i = 1; i <= n; ++i)
898 {
899 struct eh_region *fixup = cfun->eh->region_array[i];
ea446801 900 struct eh_region *cleanup = 0;
27a36778 901
52a11cbf
RH
902 if (! fixup || fixup->type != ERT_FIXUP)
903 continue;
27a36778 904
52a11cbf
RH
905 for (j = 1; j <= n; ++j)
906 {
907 cleanup = cfun->eh->region_array[j];
29921faf 908 if (cleanup && cleanup->type == ERT_CLEANUP
52a11cbf
RH
909 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
910 break;
911 }
912 if (j > n)
913 abort ();
27a36778 914
52a11cbf
RH
915 fixup->u.fixup.real_region = cleanup->outer;
916 }
27a36778 917}
27a36778 918
52a11cbf
RH
919/* Now that we've discovered what region actually encloses a fixup,
920 we can shuffle pointers and remove them from the tree. */
27a36778
MS
921
922static void
502b8322 923remove_fixup_regions (void)
27a36778 924{
52a11cbf 925 int i;
45053eaf
RH
926 rtx insn, note;
927 struct eh_region *fixup;
27a36778 928
45053eaf
RH
929 /* Walk the insn chain and adjust the REG_EH_REGION numbers
930 for instructions referencing fixup regions. This is only
931 strictly necessary for fixup regions with no parent, but
932 doesn't hurt to do it for all regions. */
933 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
934 if (INSN_P (insn)
935 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
936 && INTVAL (XEXP (note, 0)) > 0
937 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
938 && fixup->type == ERT_FIXUP)
939 {
940 if (fixup->u.fixup.real_region)
2b1e2382 941 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
45053eaf
RH
942 else
943 remove_note (insn, note);
944 }
945
946 /* Remove the fixup regions from the tree. */
52a11cbf
RH
947 for (i = cfun->eh->last_region_number; i > 0; --i)
948 {
45053eaf 949 fixup = cfun->eh->region_array[i];
52a11cbf
RH
950 if (! fixup)
951 continue;
27a36778 952
52a11cbf
RH
953 /* Allow GC to maybe free some memory. */
954 if (fixup->type == ERT_CLEANUP)
0fb7aeda 955 fixup->u.cleanup.exp = NULL_TREE;
27a36778 956
52a11cbf
RH
957 if (fixup->type != ERT_FIXUP)
958 continue;
27a36778 959
52a11cbf
RH
960 if (fixup->inner)
961 {
962 struct eh_region *parent, *p, **pp;
27a36778 963
52a11cbf 964 parent = fixup->u.fixup.real_region;
27a36778 965
52a11cbf
RH
966 /* Fix up the children's parent pointers; find the end of
967 the list. */
968 for (p = fixup->inner; ; p = p->next_peer)
969 {
970 p->outer = parent;
971 if (! p->next_peer)
972 break;
973 }
27a36778 974
52a11cbf
RH
975 /* In the tree of cleanups, only outer-inner ordering matters.
976 So link the children back in anywhere at the correct level. */
977 if (parent)
978 pp = &parent->inner;
979 else
980 pp = &cfun->eh->region_tree;
981 p->next_peer = *pp;
982 *pp = fixup->inner;
983 fixup->inner = NULL;
984 }
27a36778 985
52a11cbf
RH
986 remove_eh_handler (fixup);
987 }
27a36778
MS
988}
989
655dd289
JJ
990/* Remove all regions whose labels are not reachable from insns. */
991
992static void
502b8322 993remove_unreachable_regions (rtx insns)
655dd289
JJ
994{
995 int i, *uid_region_num;
996 bool *reachable;
997 struct eh_region *r;
998 rtx insn;
999
1000 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1001 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1002
1003 for (i = cfun->eh->last_region_number; i > 0; --i)
1004 {
1005 r = cfun->eh->region_array[i];
1006 if (!r || r->region_number != i)
1007 continue;
1008
1009 if (r->resume)
0fb7aeda 1010 {
655dd289
JJ
1011 if (uid_region_num[INSN_UID (r->resume)])
1012 abort ();
1013 uid_region_num[INSN_UID (r->resume)] = i;
0fb7aeda 1014 }
655dd289 1015 if (r->label)
0fb7aeda 1016 {
655dd289
JJ
1017 if (uid_region_num[INSN_UID (r->label)])
1018 abort ();
1019 uid_region_num[INSN_UID (r->label)] = i;
0fb7aeda 1020 }
655dd289 1021 if (r->type == ERT_TRY && r->u.try.continue_label)
0fb7aeda 1022 {
655dd289
JJ
1023 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1024 abort ();
1025 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
0fb7aeda 1026 }
655dd289
JJ
1027 }
1028
1029 for (insn = insns; insn; insn = NEXT_INSN (insn))
1030 reachable[uid_region_num[INSN_UID (insn)]] = true;
1031
1032 for (i = cfun->eh->last_region_number; i > 0; --i)
1033 {
1034 r = cfun->eh->region_array[i];
1035 if (r && r->region_number == i && !reachable[i])
1036 {
1037 /* Don't remove ERT_THROW regions if their outer region
1038 is reachable. */
1039 if (r->type == ERT_THROW
1040 && r->outer
1041 && reachable[r->outer->region_number])
1042 continue;
1043
1044 remove_eh_handler (r);
1045 }
1046 }
1047
1048 free (reachable);
1049 free (uid_region_num);
1050}
1051
52a11cbf
RH
1052/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1053 can_throw instruction in the region. */
27a36778
MS
1054
1055static void
502b8322 1056convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
27a36778 1057{
52a11cbf
RH
1058 int *sp = orig_sp;
1059 rtx insn, next;
27a36778 1060
52a11cbf
RH
1061 for (insn = *pinsns; insn ; insn = next)
1062 {
1063 next = NEXT_INSN (insn);
1064 if (GET_CODE (insn) == NOTE)
1065 {
1066 int kind = NOTE_LINE_NUMBER (insn);
1067 if (kind == NOTE_INSN_EH_REGION_BEG
1068 || kind == NOTE_INSN_EH_REGION_END)
1069 {
1070 if (kind == NOTE_INSN_EH_REGION_BEG)
1071 {
1072 struct eh_region *r;
27a36778 1073
52a11cbf
RH
1074 *sp++ = cur;
1075 cur = NOTE_EH_HANDLER (insn);
27a36778 1076
52a11cbf
RH
1077 r = cfun->eh->region_array[cur];
1078 if (r->type == ERT_FIXUP)
1079 {
1080 r = r->u.fixup.real_region;
1081 cur = r ? r->region_number : 0;
1082 }
1083 else if (r->type == ERT_CATCH)
1084 {
1085 r = r->outer;
1086 cur = r ? r->region_number : 0;
1087 }
1088 }
1089 else
1090 cur = *--sp;
1091
1092 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1093 requires extra care to adjust sequence start. */
1094 if (insn == *pinsns)
1095 *pinsns = next;
1096 remove_insn (insn);
1097 continue;
1098 }
1099 }
1100 else if (INSN_P (insn))
1101 {
a944ceb9
RH
1102 if (cur > 0
1103 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1104 /* Calls can always potentially throw exceptions, unless
1105 they have a REG_EH_REGION note with a value of 0 or less.
1106 Which should be the only possible kind so far. */
1107 && (GET_CODE (insn) == CALL_INSN
1108 /* If we wanted exceptions for non-call insns, then
1109 any may_trap_p instruction could throw. */
1110 || (flag_non_call_exceptions
1111 && GET_CODE (PATTERN (insn)) != CLOBBER
1112 && GET_CODE (PATTERN (insn)) != USE
1113 && may_trap_p (PATTERN (insn)))))
52a11cbf 1114 {
a944ceb9 1115 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
52a11cbf
RH
1116 REG_NOTES (insn));
1117 }
27a36778 1118
52a11cbf
RH
1119 if (GET_CODE (insn) == CALL_INSN
1120 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1121 {
1122 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1123 sp, cur);
1124 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1125 sp, cur);
1126 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1127 sp, cur);
1128 }
1129 }
1130 }
27a36778 1131
52a11cbf
RH
1132 if (sp != orig_sp)
1133 abort ();
1134}
27a36778 1135
52a11cbf 1136void
502b8322 1137convert_from_eh_region_ranges (void)
52a11cbf
RH
1138{
1139 int *stack;
1140 rtx insns;
27a36778 1141
52a11cbf
RH
1142 collect_eh_region_array ();
1143 resolve_fixup_regions ();
27a36778 1144
52a11cbf
RH
1145 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1146 insns = get_insns ();
1147 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1148 free (stack);
27a36778 1149
52a11cbf 1150 remove_fixup_regions ();
655dd289 1151 remove_unreachable_regions (insns);
27a36778
MS
1152}
1153
6a58eee9 1154static void
502b8322 1155add_ehl_entry (rtx label, struct eh_region *region)
6a58eee9
RH
1156{
1157 struct ehl_map_entry **slot, *entry;
1158
1159 LABEL_PRESERVE_P (label) = 1;
1160
e2500fed 1161 entry = (struct ehl_map_entry *) ggc_alloc (sizeof (*entry));
6a58eee9
RH
1162 entry->label = label;
1163 entry->region = region;
1164
1165 slot = (struct ehl_map_entry **)
e2500fed 1166 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
6f3d0447
RH
1167
1168 /* Before landing pad creation, each exception handler has its own
1169 label. After landing pad creation, the exception handlers may
1170 share landing pads. This is ok, since maybe_remove_eh_handler
1171 only requires the 1-1 mapping before landing pad creation. */
1172 if (*slot && !cfun->eh->built_landing_pads)
6a58eee9 1173 abort ();
6f3d0447 1174
6a58eee9
RH
1175 *slot = entry;
1176}
1177
52a11cbf 1178void
502b8322 1179find_exception_handler_labels (void)
27a36778 1180{
52a11cbf 1181 int i;
27a36778 1182
e2500fed
GK
1183 if (cfun->eh->exception_handler_label_map)
1184 htab_empty (cfun->eh->exception_handler_label_map);
6a58eee9
RH
1185 else
1186 {
1187 /* ??? The expansion factor here (3/2) must be greater than the htab
1188 occupancy factor (4/3) to avoid unnecessary resizing. */
e2500fed
GK
1189 cfun->eh->exception_handler_label_map
1190 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1191 ehl_hash, ehl_eq, NULL);
6a58eee9 1192 }
27a36778 1193
52a11cbf
RH
1194 if (cfun->eh->region_tree == NULL)
1195 return;
27a36778 1196
52a11cbf
RH
1197 for (i = cfun->eh->last_region_number; i > 0; --i)
1198 {
1199 struct eh_region *region = cfun->eh->region_array[i];
1200 rtx lab;
27a36778 1201
655dd289 1202 if (! region || region->region_number != i)
52a11cbf
RH
1203 continue;
1204 if (cfun->eh->built_landing_pads)
1205 lab = region->landing_pad;
1206 else
1207 lab = region->label;
27a36778 1208
52a11cbf 1209 if (lab)
6a58eee9 1210 add_ehl_entry (lab, region);
27a36778
MS
1211 }
1212
52a11cbf
RH
1213 /* For sjlj exceptions, need the return label to remain live until
1214 after landing pad generation. */
1215 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
6a58eee9 1216 add_ehl_entry (return_label, NULL);
27a36778
MS
1217}
1218
93f82d60 1219bool
502b8322 1220current_function_has_exception_handlers (void)
93f82d60
RH
1221{
1222 int i;
1223
1224 for (i = cfun->eh->last_region_number; i > 0; --i)
1225 {
1226 struct eh_region *region = cfun->eh->region_array[i];
1227
1228 if (! region || region->region_number != i)
1229 continue;
1230 if (region->type != ERT_THROW)
1231 return true;
1232 }
1233
1234 return false;
1235}
52a11cbf
RH
1236\f
1237static struct eh_region *
502b8322 1238duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
4956d07c 1239{
52a11cbf 1240 struct eh_region *n
e2500fed 1241 = (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
4956d07c 1242
52a11cbf
RH
1243 n->region_number = o->region_number + cfun->eh->last_region_number;
1244 n->type = o->type;
4956d07c 1245
52a11cbf
RH
1246 switch (n->type)
1247 {
1248 case ERT_CLEANUP:
1249 case ERT_MUST_NOT_THROW:
1250 break;
27a36778 1251
52a11cbf
RH
1252 case ERT_TRY:
1253 if (o->u.try.continue_label)
1254 n->u.try.continue_label
1255 = get_label_from_map (map,
1256 CODE_LABEL_NUMBER (o->u.try.continue_label));
1257 break;
27a36778 1258
52a11cbf 1259 case ERT_CATCH:
6d41a92f 1260 n->u.catch.type_list = o->u.catch.type_list;
52a11cbf 1261 break;
27a36778 1262
52a11cbf
RH
1263 case ERT_ALLOWED_EXCEPTIONS:
1264 n->u.allowed.type_list = o->u.allowed.type_list;
1265 break;
1266
1267 case ERT_THROW:
1268 n->u.throw.type = o->u.throw.type;
3f2c5d1a 1269
52a11cbf
RH
1270 default:
1271 abort ();
1272 }
1273
1274 if (o->label)
1275 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
47c84870 1276 if (o->resume)
e7b9b18e 1277 {
47c84870
JM
1278 n->resume = map->insn_map[INSN_UID (o->resume)];
1279 if (n->resume == NULL)
52a11cbf 1280 abort ();
27a36778 1281 }
4956d07c 1282
52a11cbf 1283 return n;
4956d07c
MS
1284}
1285
52a11cbf 1286static void
502b8322 1287duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
4c581243 1288{
52a11cbf 1289 struct eh_region *n = n_array[o->region_number];
4c581243 1290
52a11cbf
RH
1291 switch (n->type)
1292 {
1293 case ERT_TRY:
1294 n->u.try.catch = n_array[o->u.try.catch->region_number];
1295 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1296 break;
12670d88 1297
52a11cbf
RH
1298 case ERT_CATCH:
1299 if (o->u.catch.next_catch)
0fb7aeda 1300 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
52a11cbf 1301 if (o->u.catch.prev_catch)
0fb7aeda 1302 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
52a11cbf 1303 break;
12670d88 1304
52a11cbf
RH
1305 default:
1306 break;
1307 }
4956d07c 1308
52a11cbf
RH
1309 if (o->outer)
1310 n->outer = n_array[o->outer->region_number];
1311 if (o->inner)
1312 n->inner = n_array[o->inner->region_number];
1313 if (o->next_peer)
1314 n->next_peer = n_array[o->next_peer->region_number];
3f2c5d1a 1315}
52a11cbf
RH
1316
1317int
502b8322 1318duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
4956d07c 1319{
52a11cbf
RH
1320 int ifun_last_region_number = ifun->eh->last_region_number;
1321 struct eh_region **n_array, *root, *cur;
1322 int i;
4956d07c 1323
52a11cbf
RH
1324 if (ifun_last_region_number == 0)
1325 return 0;
4956d07c 1326
52a11cbf 1327 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
4956d07c 1328
52a11cbf 1329 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1330 {
52a11cbf
RH
1331 cur = ifun->eh->region_array[i];
1332 if (!cur || cur->region_number != i)
1333 continue;
1334 n_array[i] = duplicate_eh_region_1 (cur, map);
27a36778 1335 }
52a11cbf 1336 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1337 {
52a11cbf
RH
1338 cur = ifun->eh->region_array[i];
1339 if (!cur || cur->region_number != i)
1340 continue;
1341 duplicate_eh_region_2 (cur, n_array);
1342 }
27a36778 1343
52a11cbf
RH
1344 root = n_array[ifun->eh->region_tree->region_number];
1345 cur = cfun->eh->cur_region;
1346 if (cur)
1347 {
1348 struct eh_region *p = cur->inner;
1349 if (p)
1350 {
1351 while (p->next_peer)
1352 p = p->next_peer;
1353 p->next_peer = root;
1354 }
1355 else
1356 cur->inner = root;
27a36778 1357
52a11cbf 1358 for (i = 1; i <= ifun_last_region_number; ++i)
b24a9e88 1359 if (n_array[i] && n_array[i]->outer == NULL)
52a11cbf
RH
1360 n_array[i]->outer = cur;
1361 }
1362 else
1363 {
1364 struct eh_region *p = cfun->eh->region_tree;
1365 if (p)
1366 {
1367 while (p->next_peer)
1368 p = p->next_peer;
1369 p->next_peer = root;
1370 }
1371 else
1372 cfun->eh->region_tree = root;
27a36778 1373 }
1e4ceb6f 1374
52a11cbf 1375 free (n_array);
1e4ceb6f 1376
52a11cbf
RH
1377 i = cfun->eh->last_region_number;
1378 cfun->eh->last_region_number = i + ifun_last_region_number;
1379 return i;
4956d07c
MS
1380}
1381
52a11cbf 1382\f
52a11cbf 1383static int
502b8322 1384t2r_eq (const void *pentry, const void *pdata)
9762d48d 1385{
52a11cbf
RH
1386 tree entry = (tree) pentry;
1387 tree data = (tree) pdata;
9762d48d 1388
52a11cbf 1389 return TREE_PURPOSE (entry) == data;
9762d48d
JM
1390}
1391
52a11cbf 1392static hashval_t
502b8322 1393t2r_hash (const void *pentry)
52a11cbf
RH
1394{
1395 tree entry = (tree) pentry;
1396 return TYPE_HASH (TREE_PURPOSE (entry));
1397}
9762d48d 1398
52a11cbf 1399static void
502b8322 1400add_type_for_runtime (tree type)
52a11cbf
RH
1401{
1402 tree *slot;
9762d48d 1403
52a11cbf
RH
1404 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1405 TYPE_HASH (type), INSERT);
1406 if (*slot == NULL)
1407 {
1408 tree runtime = (*lang_eh_runtime_type) (type);
1409 *slot = tree_cons (type, runtime, NULL_TREE);
1410 }
1411}
3f2c5d1a 1412
52a11cbf 1413static tree
502b8322 1414lookup_type_for_runtime (tree type)
52a11cbf
RH
1415{
1416 tree *slot;
b37f006b 1417
52a11cbf
RH
1418 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1419 TYPE_HASH (type), NO_INSERT);
b37f006b 1420
a1f300c0 1421 /* We should have always inserted the data earlier. */
52a11cbf
RH
1422 return TREE_VALUE (*slot);
1423}
9762d48d 1424
52a11cbf
RH
1425\f
1426/* Represent an entry in @TTypes for either catch actions
1427 or exception filter actions. */
e2500fed 1428struct ttypes_filter GTY(())
52a11cbf
RH
1429{
1430 tree t;
1431 int filter;
1432};
b37f006b 1433
52a11cbf
RH
1434/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1435 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 1436
52a11cbf 1437static int
502b8322 1438ttypes_filter_eq (const void *pentry, const void *pdata)
52a11cbf
RH
1439{
1440 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1441 tree data = (tree) pdata;
b37f006b 1442
52a11cbf 1443 return entry->t == data;
9762d48d
JM
1444}
1445
52a11cbf 1446static hashval_t
502b8322 1447ttypes_filter_hash (const void *pentry)
52a11cbf
RH
1448{
1449 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1450 return TYPE_HASH (entry->t);
1451}
4956d07c 1452
52a11cbf
RH
1453/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1454 exception specification list we are thinking about adding. */
1455/* ??? Currently we use the type lists in the order given. Someone
1456 should put these in some canonical order. */
1457
1458static int
502b8322 1459ehspec_filter_eq (const void *pentry, const void *pdata)
4956d07c 1460{
52a11cbf
RH
1461 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1462 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1463
1464 return type_list_equal (entry->t, data->t);
4956d07c
MS
1465}
1466
52a11cbf 1467/* Hash function for exception specification lists. */
4956d07c 1468
52a11cbf 1469static hashval_t
502b8322 1470ehspec_filter_hash (const void *pentry)
4956d07c 1471{
52a11cbf
RH
1472 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1473 hashval_t h = 0;
1474 tree list;
1475
1476 for (list = entry->t; list ; list = TREE_CHAIN (list))
1477 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1478 return h;
4956d07c
MS
1479}
1480
52a11cbf
RH
1481/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1482 up the search. Return the filter value to be used. */
4956d07c 1483
52a11cbf 1484static int
502b8322 1485add_ttypes_entry (htab_t ttypes_hash, tree type)
4956d07c 1486{
52a11cbf 1487 struct ttypes_filter **slot, *n;
4956d07c 1488
52a11cbf
RH
1489 slot = (struct ttypes_filter **)
1490 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1491
1492 if ((n = *slot) == NULL)
4956d07c 1493 {
52a11cbf 1494 /* Filter value is a 1 based table index. */
12670d88 1495
52a11cbf
RH
1496 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1497 n->t = type;
1498 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1499 *slot = n;
1500
1501 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 1502 }
52a11cbf
RH
1503
1504 return n->filter;
4956d07c
MS
1505}
1506
52a11cbf
RH
1507/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1508 to speed up the search. Return the filter value to be used. */
1509
1510static int
502b8322 1511add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
12670d88 1512{
52a11cbf
RH
1513 struct ttypes_filter **slot, *n;
1514 struct ttypes_filter dummy;
12670d88 1515
52a11cbf
RH
1516 dummy.t = list;
1517 slot = (struct ttypes_filter **)
1518 htab_find_slot (ehspec_hash, &dummy, INSERT);
1519
1520 if ((n = *slot) == NULL)
1521 {
1522 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1523
1524 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1525 n->t = list;
1526 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1527 *slot = n;
1528
1529 /* Look up each type in the list and encode its filter
1530 value as a uleb128. Terminate the list with 0. */
1531 for (; list ; list = TREE_CHAIN (list))
3f2c5d1a 1532 push_uleb128 (&cfun->eh->ehspec_data,
52a11cbf
RH
1533 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1534 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1535 }
1536
1537 return n->filter;
12670d88
RK
1538}
1539
52a11cbf
RH
1540/* Generate the action filter values to be used for CATCH and
1541 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1542 we use lots of landing pads, and so every type or list can share
1543 the same filter value, which saves table space. */
1544
1545static void
502b8322 1546assign_filter_values (void)
9a0d1e1b 1547{
52a11cbf
RH
1548 int i;
1549 htab_t ttypes, ehspec;
9a9deafc 1550
52a11cbf
RH
1551 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1552 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1553
52a11cbf
RH
1554 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1555 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1556
52a11cbf
RH
1557 for (i = cfun->eh->last_region_number; i > 0; --i)
1558 {
1559 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1560
52a11cbf
RH
1561 /* Mind we don't process a region more than once. */
1562 if (!r || r->region_number != i)
1563 continue;
9a0d1e1b 1564
52a11cbf
RH
1565 switch (r->type)
1566 {
1567 case ERT_CATCH:
6d41a92f
OH
1568 /* Whatever type_list is (NULL or true list), we build a list
1569 of filters for the region. */
1570 r->u.catch.filter_list = NULL_TREE;
1571
1572 if (r->u.catch.type_list != NULL)
1573 {
1574 /* Get a filter value for each of the types caught and store
1575 them in the region's dedicated list. */
1576 tree tp_node = r->u.catch.type_list;
1577
1578 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1579 {
1580 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1581 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1582
1583 r->u.catch.filter_list
6d41a92f
OH
1584 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1585 }
1586 }
1587 else
1588 {
1589 /* Get a filter value for the NULL list also since it will need
1590 an action record anyway. */
1591 int flt = add_ttypes_entry (ttypes, NULL);
1592 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1593
1594 r->u.catch.filter_list
6d41a92f
OH
1595 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1596 }
3f2c5d1a 1597
52a11cbf 1598 break;
bf71cd2e 1599
52a11cbf
RH
1600 case ERT_ALLOWED_EXCEPTIONS:
1601 r->u.allowed.filter
1602 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1603 break;
bf71cd2e 1604
52a11cbf
RH
1605 default:
1606 break;
1607 }
1608 }
1609
1610 htab_delete (ttypes);
1611 htab_delete (ehspec);
1612}
1613
ac850948
JM
1614/* Generate the code to actually handle exceptions, which will follow the
1615 landing pads. */
1616
52a11cbf 1617static void
502b8322 1618build_post_landing_pads (void)
52a11cbf
RH
1619{
1620 int i;
bf71cd2e 1621
52a11cbf 1622 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1623 {
52a11cbf
RH
1624 struct eh_region *region = cfun->eh->region_array[i];
1625 rtx seq;
bf71cd2e 1626
52a11cbf
RH
1627 /* Mind we don't process a region more than once. */
1628 if (!region || region->region_number != i)
1629 continue;
1630
1631 switch (region->type)
987009bf 1632 {
52a11cbf
RH
1633 case ERT_TRY:
1634 /* ??? Collect the set of all non-overlapping catch handlers
1635 all the way up the chain until blocked by a cleanup. */
1636 /* ??? Outer try regions can share landing pads with inner
1637 try regions if the types are completely non-overlapping,
a1f300c0 1638 and there are no intervening cleanups. */
bf71cd2e 1639
52a11cbf 1640 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1641
52a11cbf 1642 start_sequence ();
bf71cd2e 1643
52a11cbf 1644 emit_label (region->post_landing_pad);
bf71cd2e 1645
52a11cbf
RH
1646 /* ??? It is mighty inconvenient to call back into the
1647 switch statement generation code in expand_end_case.
1648 Rapid prototyping sez a sequence of ifs. */
1649 {
1650 struct eh_region *c;
1651 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1652 {
6d41a92f 1653 if (c->u.catch.type_list == NULL)
a944ceb9 1654 emit_jump (c->label);
52a11cbf 1655 else
6d41a92f
OH
1656 {
1657 /* Need for one cmp/jump per type caught. Each type
1658 list entry has a matching entry in the filter list
1659 (see assign_filter_values). */
1660 tree tp_node = c->u.catch.type_list;
1661 tree flt_node = c->u.catch.filter_list;
1662
1663 for (; tp_node; )
1664 {
1665 emit_cmp_and_jump_insns
1666 (cfun->eh->filter,
1667 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1668 EQ, NULL_RTX, word_mode, 0, c->label);
1669
1670 tp_node = TREE_CHAIN (tp_node);
1671 flt_node = TREE_CHAIN (flt_node);
1672 }
1673 }
52a11cbf
RH
1674 }
1675 }
bf71cd2e 1676
47c84870
JM
1677 /* We delay the generation of the _Unwind_Resume until we generate
1678 landing pads. We emit a marker here so as to get good control
1679 flow data in the meantime. */
1680 region->resume
1681 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1682 emit_barrier ();
1683
52a11cbf
RH
1684 seq = get_insns ();
1685 end_sequence ();
e6cfb550 1686
2f937369 1687 emit_insn_before (seq, region->u.try.catch->label);
52a11cbf 1688 break;
bf71cd2e 1689
52a11cbf
RH
1690 case ERT_ALLOWED_EXCEPTIONS:
1691 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1692
52a11cbf 1693 start_sequence ();
f54a7f6f 1694
52a11cbf 1695 emit_label (region->post_landing_pad);
f54a7f6f 1696
52a11cbf
RH
1697 emit_cmp_and_jump_insns (cfun->eh->filter,
1698 GEN_INT (region->u.allowed.filter),
a06ef755 1699 EQ, NULL_RTX, word_mode, 0, region->label);
f54a7f6f 1700
47c84870
JM
1701 /* We delay the generation of the _Unwind_Resume until we generate
1702 landing pads. We emit a marker here so as to get good control
1703 flow data in the meantime. */
1704 region->resume
1705 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1706 emit_barrier ();
1707
52a11cbf
RH
1708 seq = get_insns ();
1709 end_sequence ();
1710
2f937369 1711 emit_insn_before (seq, region->label);
52a11cbf 1712 break;
f54a7f6f 1713
52a11cbf 1714 case ERT_CLEANUP:
125ca8fd 1715 case ERT_MUST_NOT_THROW:
a944ceb9 1716 region->post_landing_pad = region->label;
125ca8fd
RH
1717 break;
1718
52a11cbf
RH
1719 case ERT_CATCH:
1720 case ERT_THROW:
1721 /* Nothing to do. */
1722 break;
1723
1724 default:
1725 abort ();
1726 }
1727 }
1728}
1e4ceb6f 1729
47c84870
JM
1730/* Replace RESX patterns with jumps to the next handler if any, or calls to
1731 _Unwind_Resume otherwise. */
1732
1e4ceb6f 1733static void
502b8322 1734connect_post_landing_pads (void)
1e4ceb6f 1735{
52a11cbf 1736 int i;
76fc91c7 1737
52a11cbf
RH
1738 for (i = cfun->eh->last_region_number; i > 0; --i)
1739 {
1740 struct eh_region *region = cfun->eh->region_array[i];
1741 struct eh_region *outer;
47c84870 1742 rtx seq;
1e4ceb6f 1743
52a11cbf
RH
1744 /* Mind we don't process a region more than once. */
1745 if (!region || region->region_number != i)
1746 continue;
1e4ceb6f 1747
47c84870
JM
1748 /* If there is no RESX, or it has been deleted by flow, there's
1749 nothing to fix up. */
1750 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 1751 continue;
76fc91c7 1752
52a11cbf
RH
1753 /* Search for another landing pad in this function. */
1754 for (outer = region->outer; outer ; outer = outer->outer)
1755 if (outer->post_landing_pad)
1756 break;
1e4ceb6f 1757
52a11cbf 1758 start_sequence ();
12670d88 1759
52a11cbf
RH
1760 if (outer)
1761 emit_jump (outer->post_landing_pad);
1762 else
9555a122 1763 emit_library_call (unwind_resume_libfunc, LCT_THROW,
26b10ae0 1764 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
4956d07c 1765
52a11cbf
RH
1766 seq = get_insns ();
1767 end_sequence ();
2f937369 1768 emit_insn_before (seq, region->resume);
53c17031 1769 delete_insn (region->resume);
52a11cbf
RH
1770 }
1771}
1772
1773\f
1774static void
502b8322 1775dw2_build_landing_pads (void)
4956d07c 1776{
ae0ed63a
JM
1777 int i;
1778 unsigned int j;
4956d07c 1779
52a11cbf
RH
1780 for (i = cfun->eh->last_region_number; i > 0; --i)
1781 {
1782 struct eh_region *region = cfun->eh->region_array[i];
1783 rtx seq;
5c701bb1 1784 bool clobbers_hard_regs = false;
4956d07c 1785
52a11cbf
RH
1786 /* Mind we don't process a region more than once. */
1787 if (!region || region->region_number != i)
1788 continue;
1418bb67 1789
52a11cbf
RH
1790 if (region->type != ERT_CLEANUP
1791 && region->type != ERT_TRY
1792 && region->type != ERT_ALLOWED_EXCEPTIONS)
1793 continue;
12670d88 1794
52a11cbf 1795 start_sequence ();
4956d07c 1796
52a11cbf
RH
1797 region->landing_pad = gen_label_rtx ();
1798 emit_label (region->landing_pad);
4956d07c 1799
52a11cbf
RH
1800#ifdef HAVE_exception_receiver
1801 if (HAVE_exception_receiver)
1802 emit_insn (gen_exception_receiver ());
1803 else
1804#endif
1805#ifdef HAVE_nonlocal_goto_receiver
1806 if (HAVE_nonlocal_goto_receiver)
1807 emit_insn (gen_nonlocal_goto_receiver ());
1808 else
1809#endif
1810 { /* Nothing */ }
4956d07c 1811
52a11cbf
RH
1812 /* If the eh_return data registers are call-saved, then we
1813 won't have considered them clobbered from the call that
1814 threw. Kill them now. */
1815 for (j = 0; ; ++j)
1816 {
1817 unsigned r = EH_RETURN_DATA_REGNO (j);
1818 if (r == INVALID_REGNUM)
1819 break;
1820 if (! call_used_regs[r])
5c701bb1
JS
1821 {
1822 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1823 clobbers_hard_regs = true;
1824 }
1825 }
1826
1827 if (clobbers_hard_regs)
1828 {
1829 /* @@@ This is a kludge. Not all machine descriptions define a
1830 blockage insn, but we must not allow the code we just generated
1831 to be reordered by scheduling. So emit an ASM_INPUT to act as
2ba84f36 1832 blockage insn. */
5c701bb1 1833 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
52a11cbf 1834 }
e701eb4d 1835
52a11cbf 1836 emit_move_insn (cfun->eh->exc_ptr,
26b10ae0 1837 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
52a11cbf 1838 emit_move_insn (cfun->eh->filter,
9e800206 1839 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 1840
52a11cbf
RH
1841 seq = get_insns ();
1842 end_sequence ();
5816cb14 1843
2f937369 1844 emit_insn_before (seq, region->post_landing_pad);
52a11cbf 1845 }
4956d07c
MS
1846}
1847
52a11cbf
RH
1848\f
1849struct sjlj_lp_info
1850{
1851 int directly_reachable;
1852 int action_index;
1853 int dispatch_index;
1854 int call_site_index;
1855};
4956d07c 1856
52a11cbf 1857static bool
502b8322 1858sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
4956d07c 1859{
52a11cbf
RH
1860 rtx insn;
1861 bool found_one = false;
4956d07c 1862
52a11cbf
RH
1863 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1864 {
1865 struct eh_region *region;
98ce21b3 1866 enum reachable_code rc;
52a11cbf
RH
1867 tree type_thrown;
1868 rtx note;
4956d07c 1869
52a11cbf
RH
1870 if (! INSN_P (insn))
1871 continue;
0d3453df 1872
52a11cbf
RH
1873 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1874 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1875 continue;
5dfa7520 1876
52a11cbf 1877 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 1878
52a11cbf
RH
1879 type_thrown = NULL_TREE;
1880 if (region->type == ERT_THROW)
1881 {
1882 type_thrown = region->u.throw.type;
1883 region = region->outer;
1884 }
12670d88 1885
52a11cbf
RH
1886 /* Find the first containing region that might handle the exception.
1887 That's the landing pad to which we will transfer control. */
98ce21b3 1888 rc = RNL_NOT_CAUGHT;
52a11cbf 1889 for (; region; region = region->outer)
98ce21b3
RH
1890 {
1891 rc = reachable_next_level (region, type_thrown, 0);
1892 if (rc != RNL_NOT_CAUGHT)
1893 break;
1894 }
1895 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
52a11cbf
RH
1896 {
1897 lp_info[region->region_number].directly_reachable = 1;
1898 found_one = true;
1899 }
1900 }
4956d07c 1901
52a11cbf
RH
1902 return found_one;
1903}
e701eb4d
JM
1904
1905static void
502b8322 1906sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
e701eb4d 1907{
52a11cbf
RH
1908 htab_t ar_hash;
1909 int i, index;
1910
1911 /* First task: build the action table. */
1912
1913 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1914 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1915
1916 for (i = cfun->eh->last_region_number; i > 0; --i)
1917 if (lp_info[i].directly_reachable)
e6cfb550 1918 {
52a11cbf
RH
1919 struct eh_region *r = cfun->eh->region_array[i];
1920 r->landing_pad = dispatch_label;
1921 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1922 if (lp_info[i].action_index != -1)
1923 cfun->uses_eh_lsda = 1;
e6cfb550 1924 }
e701eb4d 1925
52a11cbf 1926 htab_delete (ar_hash);
76fc91c7 1927
52a11cbf
RH
1928 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1929 landing pad label for the region. For sjlj though, there is one
1930 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 1931
52a11cbf
RH
1932 A region receives a dispatch index if it is directly reachable
1933 and requires in-function processing. Regions that share post-landing
eaec9b3d 1934 pads may share dispatch indices. */
52a11cbf
RH
1935 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1936 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 1937
52a11cbf
RH
1938 index = 0;
1939 for (i = cfun->eh->last_region_number; i > 0; --i)
98ce21b3 1940 if (lp_info[i].directly_reachable)
52a11cbf 1941 lp_info[i].dispatch_index = index++;
76fc91c7 1942
52a11cbf
RH
1943 /* Finally: assign call-site values. If dwarf2 terms, this would be
1944 the region number assigned by convert_to_eh_region_ranges, but
1945 handles no-action and must-not-throw differently. */
76fc91c7 1946
52a11cbf
RH
1947 call_site_base = 1;
1948 for (i = cfun->eh->last_region_number; i > 0; --i)
1949 if (lp_info[i].directly_reachable)
1950 {
1951 int action = lp_info[i].action_index;
1952
1953 /* Map must-not-throw to otherwise unused call-site index 0. */
1954 if (action == -2)
1955 index = 0;
1956 /* Map no-action to otherwise unused call-site index -1. */
1957 else if (action == -1)
1958 index = -1;
1959 /* Otherwise, look it up in the table. */
1960 else
1961 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1962
1963 lp_info[i].call_site_index = index;
1964 }
4956d07c 1965}
27a36778 1966
52a11cbf 1967static void
502b8322 1968sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
27a36778 1969{
52a11cbf
RH
1970 int last_call_site = -2;
1971 rtx insn, mem;
1972
52a11cbf 1973 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 1974 {
52a11cbf
RH
1975 struct eh_region *region;
1976 int this_call_site;
1977 rtx note, before, p;
27a36778 1978
52a11cbf
RH
1979 /* Reset value tracking at extended basic block boundaries. */
1980 if (GET_CODE (insn) == CODE_LABEL)
1981 last_call_site = -2;
27a36778 1982
52a11cbf
RH
1983 if (! INSN_P (insn))
1984 continue;
27a36778 1985
52a11cbf
RH
1986 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1987 if (!note)
1988 {
1989 /* Calls (and trapping insns) without notes are outside any
1990 exception handling region in this function. Mark them as
1991 no action. */
1992 if (GET_CODE (insn) == CALL_INSN
1993 || (flag_non_call_exceptions
1994 && may_trap_p (PATTERN (insn))))
1995 this_call_site = -1;
1996 else
1997 continue;
1998 }
1999 else
2000 {
2001 /* Calls that are known to not throw need not be marked. */
2002 if (INTVAL (XEXP (note, 0)) <= 0)
2003 continue;
27a36778 2004
52a11cbf
RH
2005 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2006 this_call_site = lp_info[region->region_number].call_site_index;
2007 }
27a36778 2008
52a11cbf
RH
2009 if (this_call_site == last_call_site)
2010 continue;
2011
2012 /* Don't separate a call from it's argument loads. */
2013 before = insn;
2014 if (GET_CODE (insn) == CALL_INSN)
0fb7aeda 2015 before = find_first_parameter_load (insn, NULL_RTX);
4956d07c 2016
52a11cbf 2017 start_sequence ();
fd2c57a9
AH
2018 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2019 sjlj_fc_call_site_ofs);
52a11cbf
RH
2020 emit_move_insn (mem, GEN_INT (this_call_site));
2021 p = get_insns ();
2022 end_sequence ();
12670d88 2023
2f937369 2024 emit_insn_before (p, before);
52a11cbf
RH
2025 last_call_site = this_call_site;
2026 }
2027}
4956d07c 2028
52a11cbf
RH
2029/* Construct the SjLj_Function_Context. */
2030
2031static void
502b8322 2032sjlj_emit_function_enter (rtx dispatch_label)
4956d07c 2033{
52a11cbf 2034 rtx fn_begin, fc, mem, seq;
4956d07c 2035
52a11cbf 2036 fc = cfun->eh->sjlj_fc;
4956d07c 2037
52a11cbf 2038 start_sequence ();
8a4451aa 2039
8979edec
JL
2040 /* We're storing this libcall's address into memory instead of
2041 calling it directly. Thus, we must call assemble_external_libcall
2042 here, as we can not depend on emit_library_call to do it for us. */
2043 assemble_external_libcall (eh_personality_libfunc);
f4ef873c 2044 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
52a11cbf
RH
2045 emit_move_insn (mem, eh_personality_libfunc);
2046
f4ef873c 2047 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
52a11cbf
RH
2048 if (cfun->uses_eh_lsda)
2049 {
2050 char buf[20];
df696a75 2051 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
52a11cbf 2052 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
8a4451aa 2053 }
52a11cbf
RH
2054 else
2055 emit_move_insn (mem, const0_rtx);
3f2c5d1a 2056
52a11cbf
RH
2057#ifdef DONT_USE_BUILTIN_SETJMP
2058 {
2059 rtx x, note;
9defc9b7 2060 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
52a11cbf
RH
2061 TYPE_MODE (integer_type_node), 1,
2062 plus_constant (XEXP (fc, 0),
2063 sjlj_fc_jbuf_ofs), Pmode);
2064
2e040219 2065 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
52a11cbf
RH
2066 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2067
2068 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
a06ef755 2069 TYPE_MODE (integer_type_node), 0, dispatch_label);
52a11cbf
RH
2070 }
2071#else
2072 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2073 dispatch_label);
4956d07c 2074#endif
4956d07c 2075
52a11cbf
RH
2076 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2077 1, XEXP (fc, 0), Pmode);
12670d88 2078
52a11cbf
RH
2079 seq = get_insns ();
2080 end_sequence ();
4956d07c 2081
52a11cbf
RH
2082 /* ??? Instead of doing this at the beginning of the function,
2083 do this in a block that is at loop level 0 and dominates all
2084 can_throw_internal instructions. */
4956d07c 2085
52a11cbf
RH
2086 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2087 if (GET_CODE (fn_begin) == NOTE
2088 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2089 break;
2f937369 2090 emit_insn_after (seq, fn_begin);
4956d07c
MS
2091}
2092
52a11cbf
RH
2093/* Call back from expand_function_end to know where we should put
2094 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 2095
52a11cbf 2096void
502b8322 2097sjlj_emit_function_exit_after (rtx after)
52a11cbf
RH
2098{
2099 cfun->eh->sjlj_exit_after = after;
2100}
4956d07c
MS
2101
2102static void
502b8322 2103sjlj_emit_function_exit (void)
52a11cbf
RH
2104{
2105 rtx seq;
4956d07c 2106
52a11cbf 2107 start_sequence ();
ce152ef8 2108
52a11cbf
RH
2109 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2110 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 2111
52a11cbf
RH
2112 seq = get_insns ();
2113 end_sequence ();
4956d07c 2114
52a11cbf
RH
2115 /* ??? Really this can be done in any block at loop level 0 that
2116 post-dominates all can_throw_internal instructions. This is
2117 the last possible moment. */
9a0d1e1b 2118
2f937369 2119 emit_insn_after (seq, cfun->eh->sjlj_exit_after);
9a0d1e1b
AM
2120}
2121
52a11cbf 2122static void
502b8322 2123sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
ce152ef8 2124{
52a11cbf
RH
2125 int i, first_reachable;
2126 rtx mem, dispatch, seq, fc;
2127
2128 fc = cfun->eh->sjlj_fc;
2129
2130 start_sequence ();
2131
2132 emit_label (dispatch_label);
3f2c5d1a 2133
52a11cbf
RH
2134#ifndef DONT_USE_BUILTIN_SETJMP
2135 expand_builtin_setjmp_receiver (dispatch_label);
2136#endif
2137
2138 /* Load up dispatch index, exc_ptr and filter values from the
2139 function context. */
f4ef873c
RK
2140 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2141 sjlj_fc_call_site_ofs);
52a11cbf
RH
2142 dispatch = copy_to_reg (mem);
2143
f4ef873c 2144 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
52a11cbf
RH
2145 if (word_mode != Pmode)
2146 {
2147#ifdef POINTERS_EXTEND_UNSIGNED
2148 mem = convert_memory_address (Pmode, mem);
2149#else
2150 mem = convert_to_mode (Pmode, mem, 0);
2151#endif
2152 }
2153 emit_move_insn (cfun->eh->exc_ptr, mem);
2154
f4ef873c 2155 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
52a11cbf 2156 emit_move_insn (cfun->eh->filter, mem);
4956d07c 2157
52a11cbf
RH
2158 /* Jump to one of the directly reachable regions. */
2159 /* ??? This really ought to be using a switch statement. */
2160
2161 first_reachable = 0;
2162 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 2163 {
98ce21b3 2164 if (! lp_info[i].directly_reachable)
52a11cbf 2165 continue;
a1622f83 2166
52a11cbf
RH
2167 if (! first_reachable)
2168 {
2169 first_reachable = i;
2170 continue;
2171 }
e6cfb550 2172
a06ef755
RK
2173 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2174 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
52a11cbf 2175 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 2176 }
9a0d1e1b 2177
52a11cbf
RH
2178 seq = get_insns ();
2179 end_sequence ();
4956d07c 2180
2f937369
DM
2181 emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2182 ->post_landing_pad));
ce152ef8
AM
2183}
2184
52a11cbf 2185static void
502b8322 2186sjlj_build_landing_pads (void)
ce152ef8 2187{
52a11cbf 2188 struct sjlj_lp_info *lp_info;
ce152ef8 2189
52a11cbf
RH
2190 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2191 sizeof (struct sjlj_lp_info));
ce152ef8 2192
52a11cbf
RH
2193 if (sjlj_find_directly_reachable_regions (lp_info))
2194 {
2195 rtx dispatch_label = gen_label_rtx ();
ce152ef8 2196
52a11cbf
RH
2197 cfun->eh->sjlj_fc
2198 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2199 int_size_in_bytes (sjlj_fc_type_node),
2200 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 2201
52a11cbf
RH
2202 sjlj_assign_call_site_values (dispatch_label, lp_info);
2203 sjlj_mark_call_sites (lp_info);
a1622f83 2204
52a11cbf
RH
2205 sjlj_emit_function_enter (dispatch_label);
2206 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2207 sjlj_emit_function_exit ();
2208 }
a1622f83 2209
52a11cbf 2210 free (lp_info);
4956d07c 2211}
ce152ef8 2212
ce152ef8 2213void
502b8322 2214finish_eh_generation (void)
ce152ef8 2215{
52a11cbf
RH
2216 /* Nothing to do if no regions created. */
2217 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
2218 return;
2219
52a11cbf
RH
2220 /* The object here is to provide find_basic_blocks with detailed
2221 information (via reachable_handlers) on how exception control
2222 flows within the function. In this first pass, we can include
2223 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2224 regions, and hope that it will be useful in deleting unreachable
2225 handlers. Subsequently, we will generate landing pads which will
2226 connect many of the handlers, and then type information will not
2227 be effective. Still, this is a win over previous implementations. */
2228
95479831 2229 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
52a11cbf
RH
2230
2231 /* These registers are used by the landing pads. Make sure they
2232 have been generated. */
86c99549
RH
2233 get_exception_pointer (cfun);
2234 get_exception_filter (cfun);
52a11cbf
RH
2235
2236 /* Construct the landing pads. */
2237
2238 assign_filter_values ();
2239 build_post_landing_pads ();
2240 connect_post_landing_pads ();
2241 if (USING_SJLJ_EXCEPTIONS)
2242 sjlj_build_landing_pads ();
2243 else
2244 dw2_build_landing_pads ();
ce152ef8 2245
52a11cbf 2246 cfun->eh->built_landing_pads = 1;
ce152ef8 2247
52a11cbf
RH
2248 /* We've totally changed the CFG. Start over. */
2249 find_exception_handler_labels ();
4793dca1 2250 rebuild_jump_labels (get_insns ());
52a11cbf 2251 find_basic_blocks (get_insns (), max_reg_num (), 0);
95479831 2252 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
ce152ef8 2253}
4956d07c 2254\f
6a58eee9 2255static hashval_t
502b8322 2256ehl_hash (const void *pentry)
6a58eee9
RH
2257{
2258 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2259
2260 /* 2^32 * ((sqrt(5) - 1) / 2) */
2261 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2262 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2263}
2264
2265static int
502b8322 2266ehl_eq (const void *pentry, const void *pdata)
6a58eee9
RH
2267{
2268 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2269 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2270
2271 return entry->label == data->label;
2272}
2273
52a11cbf 2274/* This section handles removing dead code for flow. */
154bba13 2275
6a58eee9 2276/* Remove LABEL from exception_handler_label_map. */
154bba13 2277
52a11cbf 2278static void
502b8322 2279remove_exception_handler_label (rtx label)
154bba13 2280{
6a58eee9 2281 struct ehl_map_entry **slot, tmp;
100d81d4 2282
6a58eee9 2283 /* If exception_handler_label_map was not built yet,
655dd289 2284 there is nothing to do. */
e2500fed 2285 if (cfun->eh->exception_handler_label_map == NULL)
655dd289
JJ
2286 return;
2287
6a58eee9
RH
2288 tmp.label = label;
2289 slot = (struct ehl_map_entry **)
e2500fed 2290 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
6a58eee9
RH
2291 if (! slot)
2292 abort ();
154bba13 2293
e2500fed 2294 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
154bba13
TT
2295}
2296
52a11cbf 2297/* Splice REGION from the region tree etc. */
12670d88 2298
f19c9228 2299static void
502b8322 2300remove_eh_handler (struct eh_region *region)
4956d07c 2301{
ff2c46ac 2302 struct eh_region **pp, **pp_start, *p, *outer, *inner;
52a11cbf 2303 rtx lab;
4956d07c 2304
52a11cbf
RH
2305 /* For the benefit of efficiently handling REG_EH_REGION notes,
2306 replace this region in the region array with its containing
2307 region. Note that previous region deletions may result in
6a58eee9
RH
2308 multiple copies of this region in the array, so we have a
2309 list of alternate numbers by which we are known. */
2310
ff2c46ac
RH
2311 outer = region->outer;
2312 cfun->eh->region_array[region->region_number] = outer;
6a58eee9
RH
2313 if (region->aka)
2314 {
2315 int i;
2316 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
ff2c46ac 2317 { cfun->eh->region_array[i] = outer; });
6a58eee9
RH
2318 }
2319
ff2c46ac 2320 if (outer)
6a58eee9 2321 {
ff2c46ac 2322 if (!outer->aka)
e2500fed 2323 outer->aka = BITMAP_GGC_ALLOC ();
6a58eee9 2324 if (region->aka)
ff2c46ac
RH
2325 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2326 bitmap_set_bit (outer->aka, region->region_number);
6a58eee9 2327 }
52a11cbf
RH
2328
2329 if (cfun->eh->built_landing_pads)
2330 lab = region->landing_pad;
2331 else
2332 lab = region->label;
2333 if (lab)
2334 remove_exception_handler_label (lab);
2335
ff2c46ac
RH
2336 if (outer)
2337 pp_start = &outer->inner;
52a11cbf 2338 else
ff2c46ac
RH
2339 pp_start = &cfun->eh->region_tree;
2340 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
52a11cbf 2341 continue;
ff2c46ac 2342 *pp = region->next_peer;
12670d88 2343
ff2c46ac
RH
2344 inner = region->inner;
2345 if (inner)
4956d07c 2346 {
ff2c46ac
RH
2347 for (p = inner; p->next_peer ; p = p->next_peer)
2348 p->outer = outer;
2349 p->outer = outer;
2350
2351 p->next_peer = *pp_start;
2352 *pp_start = inner;
4956d07c 2353 }
f19c9228 2354
52a11cbf
RH
2355 if (region->type == ERT_CATCH)
2356 {
2357 struct eh_region *try, *next, *prev;
f19c9228 2358
52a11cbf
RH
2359 for (try = region->next_peer;
2360 try->type == ERT_CATCH;
2361 try = try->next_peer)
2362 continue;
2363 if (try->type != ERT_TRY)
2364 abort ();
f19c9228 2365
52a11cbf
RH
2366 next = region->u.catch.next_catch;
2367 prev = region->u.catch.prev_catch;
f19c9228 2368
52a11cbf
RH
2369 if (next)
2370 next->u.catch.prev_catch = prev;
2371 else
2372 try->u.try.last_catch = prev;
2373 if (prev)
2374 prev->u.catch.next_catch = next;
2375 else
2376 {
2377 try->u.try.catch = next;
2378 if (! next)
2379 remove_eh_handler (try);
2380 }
2381 }
4956d07c
MS
2382}
2383
52a11cbf
RH
2384/* LABEL heads a basic block that is about to be deleted. If this
2385 label corresponds to an exception region, we may be able to
2386 delete the region. */
4956d07c
MS
2387
2388void
502b8322 2389maybe_remove_eh_handler (rtx label)
4956d07c 2390{
6a58eee9
RH
2391 struct ehl_map_entry **slot, tmp;
2392 struct eh_region *region;
4956d07c 2393
52a11cbf
RH
2394 /* ??? After generating landing pads, it's not so simple to determine
2395 if the region data is completely unused. One must examine the
2396 landing pad and the post landing pad, and whether an inner try block
2397 is referencing the catch handlers directly. */
2398 if (cfun->eh->built_landing_pads)
4956d07c
MS
2399 return;
2400
6a58eee9
RH
2401 tmp.label = label;
2402 slot = (struct ehl_map_entry **)
e2500fed 2403 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
6a58eee9
RH
2404 if (! slot)
2405 return;
2406 region = (*slot)->region;
2407 if (! region)
2408 return;
2409
2410 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2411 because there is no path to the fallback call to terminate.
2412 But the region continues to affect call-site data until there
2413 are no more contained calls, which we don't see here. */
2414 if (region->type == ERT_MUST_NOT_THROW)
87ff9c8e 2415 {
e2500fed 2416 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
6a58eee9 2417 region->label = NULL_RTX;
87ff9c8e 2418 }
6a58eee9
RH
2419 else
2420 remove_eh_handler (region);
2421}
2422
2423/* Invokes CALLBACK for every exception handler label. Only used by old
2424 loop hackery; should not be used by new code. */
2425
2426void
502b8322 2427for_each_eh_label (void (*callback) (rtx))
6a58eee9 2428{
e2500fed 2429 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
6a58eee9 2430 (void *)callback);
87ff9c8e
RH
2431}
2432
6a58eee9 2433static int
502b8322 2434for_each_eh_label_1 (void **pentry, void *data)
6a58eee9
RH
2435{
2436 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
502b8322 2437 void (*callback) (rtx) = (void (*) (rtx)) data;
6a58eee9
RH
2438
2439 (*callback) (entry->label);
2440 return 1;
2441}
52a11cbf
RH
2442\f
2443/* This section describes CFG exception edges for flow. */
87ff9c8e 2444
52a11cbf 2445/* For communicating between calls to reachable_next_level. */
e2500fed 2446struct reachable_info GTY(())
87ff9c8e 2447{
52a11cbf
RH
2448 tree types_caught;
2449 tree types_allowed;
2450 rtx handlers;
2451};
87ff9c8e 2452
52a11cbf
RH
2453/* A subroutine of reachable_next_level. Return true if TYPE, or a
2454 base class of TYPE, is in HANDLED. */
87ff9c8e 2455
52a11cbf 2456static int
502b8322 2457check_handled (tree handled, tree type)
87ff9c8e 2458{
52a11cbf
RH
2459 tree t;
2460
2461 /* We can check for exact matches without front-end help. */
2462 if (! lang_eh_type_covers)
f54a7f6f 2463 {
52a11cbf
RH
2464 for (t = handled; t ; t = TREE_CHAIN (t))
2465 if (TREE_VALUE (t) == type)
2466 return 1;
2467 }
2468 else
2469 {
2470 for (t = handled; t ; t = TREE_CHAIN (t))
2471 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2472 return 1;
f54a7f6f 2473 }
52a11cbf
RH
2474
2475 return 0;
87ff9c8e
RH
2476}
2477
52a11cbf
RH
2478/* A subroutine of reachable_next_level. If we are collecting a list
2479 of handlers, add one. After landing pad generation, reference
2480 it instead of the handlers themselves. Further, the handlers are
3f2c5d1a 2481 all wired together, so by referencing one, we've got them all.
52a11cbf
RH
2482 Before landing pad generation we reference each handler individually.
2483
2484 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2485
2486static void
502b8322 2487add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
87ff9c8e 2488{
52a11cbf
RH
2489 if (! info)
2490 return;
2491
2492 if (cfun->eh->built_landing_pads)
87ff9c8e 2493 {
52a11cbf
RH
2494 if (! info->handlers)
2495 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
87ff9c8e 2496 }
52a11cbf
RH
2497 else
2498 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
87ff9c8e
RH
2499}
2500
3f2c5d1a 2501/* Process one level of exception regions for reachability.
52a11cbf
RH
2502 If TYPE_THROWN is non-null, then it is the *exact* type being
2503 propagated. If INFO is non-null, then collect handler labels
2504 and caught/allowed type information between invocations. */
87ff9c8e 2505
52a11cbf 2506static enum reachable_code
502b8322
AJ
2507reachable_next_level (struct eh_region *region, tree type_thrown,
2508 struct reachable_info *info)
87ff9c8e 2509{
52a11cbf
RH
2510 switch (region->type)
2511 {
2512 case ERT_CLEANUP:
2513 /* Before landing-pad generation, we model control flow
2514 directly to the individual handlers. In this way we can
2515 see that catch handler types may shadow one another. */
2516 add_reachable_handler (info, region, region);
2517 return RNL_MAYBE_CAUGHT;
2518
2519 case ERT_TRY:
2520 {
2521 struct eh_region *c;
2522 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2523
52a11cbf
RH
2524 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2525 {
2526 /* A catch-all handler ends the search. */
6d41a92f 2527 if (c->u.catch.type_list == NULL)
52a11cbf
RH
2528 {
2529 add_reachable_handler (info, region, c);
2530 return RNL_CAUGHT;
2531 }
2532
2533 if (type_thrown)
2534 {
a8154559 2535 /* If we have at least one type match, end the search. */
6d41a92f 2536 tree tp_node = c->u.catch.type_list;
3f2c5d1a 2537
6d41a92f 2538 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
52a11cbf 2539 {
6d41a92f
OH
2540 tree type = TREE_VALUE (tp_node);
2541
2542 if (type == type_thrown
2543 || (lang_eh_type_covers
2544 && (*lang_eh_type_covers) (type, type_thrown)))
2545 {
2546 add_reachable_handler (info, region, c);
2547 return RNL_CAUGHT;
2548 }
52a11cbf
RH
2549 }
2550
2551 /* If we have definitive information of a match failure,
2552 the catch won't trigger. */
2553 if (lang_eh_type_covers)
2554 return RNL_NOT_CAUGHT;
2555 }
2556
6d41a92f
OH
2557 /* At this point, we either don't know what type is thrown or
2558 don't have front-end assistance to help deciding if it is
2559 covered by one of the types in the list for this region.
3f2c5d1a 2560
6d41a92f
OH
2561 We'd then like to add this region to the list of reachable
2562 handlers since it is indeed potentially reachable based on the
3f2c5d1a
RS
2563 information we have.
2564
6d41a92f
OH
2565 Actually, this handler is for sure not reachable if all the
2566 types it matches have already been caught. That is, it is only
2567 potentially reachable if at least one of the types it catches
2568 has not been previously caught. */
2569
52a11cbf
RH
2570 if (! info)
2571 ret = RNL_MAYBE_CAUGHT;
6d41a92f 2572 else
52a11cbf 2573 {
6d41a92f
OH
2574 tree tp_node = c->u.catch.type_list;
2575 bool maybe_reachable = false;
52a11cbf 2576
6d41a92f
OH
2577 /* Compute the potential reachability of this handler and
2578 update the list of types caught at the same time. */
2579 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2580 {
2581 tree type = TREE_VALUE (tp_node);
2582
2583 if (! check_handled (info->types_caught, type))
2584 {
2585 info->types_caught
2586 = tree_cons (NULL, type, info->types_caught);
3f2c5d1a 2587
6d41a92f
OH
2588 maybe_reachable = true;
2589 }
2590 }
3f2c5d1a 2591
6d41a92f
OH
2592 if (maybe_reachable)
2593 {
2594 add_reachable_handler (info, region, c);
3f2c5d1a 2595
6d41a92f
OH
2596 /* ??? If the catch type is a base class of every allowed
2597 type, then we know we can stop the search. */
2598 ret = RNL_MAYBE_CAUGHT;
2599 }
52a11cbf
RH
2600 }
2601 }
87ff9c8e 2602
52a11cbf
RH
2603 return ret;
2604 }
87ff9c8e 2605
52a11cbf
RH
2606 case ERT_ALLOWED_EXCEPTIONS:
2607 /* An empty list of types definitely ends the search. */
2608 if (region->u.allowed.type_list == NULL_TREE)
2609 {
2610 add_reachable_handler (info, region, region);
2611 return RNL_CAUGHT;
2612 }
87ff9c8e 2613
52a11cbf
RH
2614 /* Collect a list of lists of allowed types for use in detecting
2615 when a catch may be transformed into a catch-all. */
2616 if (info)
2617 info->types_allowed = tree_cons (NULL_TREE,
2618 region->u.allowed.type_list,
2619 info->types_allowed);
3f2c5d1a 2620
684d9f3b 2621 /* If we have definitive information about the type hierarchy,
52a11cbf
RH
2622 then we can tell if the thrown type will pass through the
2623 filter. */
2624 if (type_thrown && lang_eh_type_covers)
2625 {
2626 if (check_handled (region->u.allowed.type_list, type_thrown))
2627 return RNL_NOT_CAUGHT;
2628 else
2629 {
2630 add_reachable_handler (info, region, region);
2631 return RNL_CAUGHT;
2632 }
2633 }
21cd906e 2634
52a11cbf
RH
2635 add_reachable_handler (info, region, region);
2636 return RNL_MAYBE_CAUGHT;
21cd906e 2637
52a11cbf 2638 case ERT_CATCH:
fbe5a4a6 2639 /* Catch regions are handled by their controlling try region. */
52a11cbf 2640 return RNL_NOT_CAUGHT;
21cd906e 2641
52a11cbf
RH
2642 case ERT_MUST_NOT_THROW:
2643 /* Here we end our search, since no exceptions may propagate.
2644 If we've touched down at some landing pad previous, then the
2645 explicit function call we generated may be used. Otherwise
2646 the call is made by the runtime. */
2647 if (info && info->handlers)
21cd906e 2648 {
52a11cbf 2649 add_reachable_handler (info, region, region);
0fb7aeda 2650 return RNL_CAUGHT;
21cd906e 2651 }
52a11cbf
RH
2652 else
2653 return RNL_BLOCKED;
21cd906e 2654
52a11cbf
RH
2655 case ERT_THROW:
2656 case ERT_FIXUP:
3f2c5d1a 2657 case ERT_UNKNOWN:
52a11cbf
RH
2658 /* Shouldn't see these here. */
2659 break;
21cd906e 2660 }
fa51b01b 2661
52a11cbf 2662 abort ();
fa51b01b 2663}
4956d07c 2664
52a11cbf
RH
2665/* Retrieve a list of labels of exception handlers which can be
2666 reached by a given insn. */
4956d07c 2667
52a11cbf 2668rtx
502b8322 2669reachable_handlers (rtx insn)
4956d07c 2670{
52a11cbf
RH
2671 struct reachable_info info;
2672 struct eh_region *region;
2673 tree type_thrown;
2674 int region_number;
fb13d4d0 2675
52a11cbf
RH
2676 if (GET_CODE (insn) == JUMP_INSN
2677 && GET_CODE (PATTERN (insn)) == RESX)
2678 region_number = XINT (PATTERN (insn), 0);
2679 else
1ef1bf06
AM
2680 {
2681 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
52a11cbf
RH
2682 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2683 return NULL;
2684 region_number = INTVAL (XEXP (note, 0));
27a36778 2685 }
4956d07c 2686
52a11cbf 2687 memset (&info, 0, sizeof (info));
4956d07c 2688
52a11cbf 2689 region = cfun->eh->region_array[region_number];
fb13d4d0 2690
52a11cbf 2691 type_thrown = NULL_TREE;
7f206d8f
RH
2692 if (GET_CODE (insn) == JUMP_INSN
2693 && GET_CODE (PATTERN (insn)) == RESX)
2694 {
2695 /* A RESX leaves a region instead of entering it. Thus the
2696 region itself may have been deleted out from under us. */
2697 if (region == NULL)
2698 return NULL;
2699 region = region->outer;
2700 }
2701 else if (region->type == ERT_THROW)
52a11cbf
RH
2702 {
2703 type_thrown = region->u.throw.type;
2704 region = region->outer;
2705 }
fac62ecf 2706
bafb714b
MM
2707 while (region)
2708 {
2709 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
a944ceb9 2710 break;
bafb714b
MM
2711 /* If we have processed one cleanup, there is no point in
2712 processing any more of them. Each cleanup will have an edge
2713 to the next outer cleanup region, so the flow graph will be
2714 accurate. */
2715 if (region->type == ERT_CLEANUP)
2716 region = region->u.cleanup.prev_try;
2717 else
2718 region = region->outer;
2719 }
502b8322 2720
52a11cbf 2721 return info.handlers;
fb13d4d0
JM
2722}
2723
52a11cbf
RH
2724/* Determine if the given INSN can throw an exception that is caught
2725 within the function. */
4956d07c 2726
52a11cbf 2727bool
502b8322 2728can_throw_internal (rtx insn)
4956d07c 2729{
52a11cbf
RH
2730 struct eh_region *region;
2731 tree type_thrown;
2732 rtx note;
e6cfb550 2733
52a11cbf
RH
2734 if (! INSN_P (insn))
2735 return false;
12670d88 2736
52a11cbf
RH
2737 if (GET_CODE (insn) == INSN
2738 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2739 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 2740
52a11cbf
RH
2741 if (GET_CODE (insn) == CALL_INSN
2742 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2743 {
52a11cbf
RH
2744 int i;
2745 for (i = 0; i < 3; ++i)
4956d07c 2746 {
52a11cbf
RH
2747 rtx sub = XEXP (PATTERN (insn), i);
2748 for (; sub ; sub = NEXT_INSN (sub))
2749 if (can_throw_internal (sub))
2750 return true;
4956d07c 2751 }
52a11cbf 2752 return false;
4956d07c
MS
2753 }
2754
52a11cbf
RH
2755 /* Every insn that might throw has an EH_REGION note. */
2756 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2757 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2758 return false;
4956d07c 2759
52a11cbf 2760 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
4956d07c 2761
52a11cbf
RH
2762 type_thrown = NULL_TREE;
2763 if (region->type == ERT_THROW)
2764 {
2765 type_thrown = region->u.throw.type;
2766 region = region->outer;
2767 }
4956d07c 2768
52a11cbf
RH
2769 /* If this exception is ignored by each and every containing region,
2770 then control passes straight out. The runtime may handle some
2771 regions, which also do not require processing internally. */
2772 for (; region; region = region->outer)
2773 {
2774 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2775 if (how == RNL_BLOCKED)
2776 return false;
2777 if (how != RNL_NOT_CAUGHT)
0fb7aeda 2778 return true;
4956d07c 2779 }
4956d07c 2780
52a11cbf
RH
2781 return false;
2782}
4956d07c 2783
52a11cbf
RH
2784/* Determine if the given INSN can throw an exception that is
2785 visible outside the function. */
4956d07c 2786
52a11cbf 2787bool
502b8322 2788can_throw_external (rtx insn)
4956d07c 2789{
52a11cbf
RH
2790 struct eh_region *region;
2791 tree type_thrown;
2792 rtx note;
4956d07c 2793
52a11cbf
RH
2794 if (! INSN_P (insn))
2795 return false;
2796
2797 if (GET_CODE (insn) == INSN
2798 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2799 insn = XVECEXP (PATTERN (insn), 0, 0);
2800
2801 if (GET_CODE (insn) == CALL_INSN
2802 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2803 {
52a11cbf
RH
2804 int i;
2805 for (i = 0; i < 3; ++i)
4956d07c 2806 {
52a11cbf
RH
2807 rtx sub = XEXP (PATTERN (insn), i);
2808 for (; sub ; sub = NEXT_INSN (sub))
2809 if (can_throw_external (sub))
2810 return true;
4956d07c 2811 }
52a11cbf 2812 return false;
4956d07c 2813 }
52a11cbf
RH
2814
2815 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2816 if (!note)
2817 {
2818 /* Calls (and trapping insns) without notes are outside any
2819 exception handling region in this function. We have to
2820 assume it might throw. Given that the front end and middle
2821 ends mark known NOTHROW functions, this isn't so wildly
2822 inaccurate. */
2823 return (GET_CODE (insn) == CALL_INSN
2824 || (flag_non_call_exceptions
2825 && may_trap_p (PATTERN (insn))));
2826 }
2827 if (INTVAL (XEXP (note, 0)) <= 0)
2828 return false;
2829
2830 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2831
2832 type_thrown = NULL_TREE;
2833 if (region->type == ERT_THROW)
2834 {
2835 type_thrown = region->u.throw.type;
2836 region = region->outer;
2837 }
2838
2839 /* If the exception is caught or blocked by any containing region,
2840 then it is not seen by any calling function. */
2841 for (; region ; region = region->outer)
2842 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2843 return false;
2844
2845 return true;
4956d07c 2846}
1ef1bf06 2847
b6128b8c 2848/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
6814a8a0 2849
b6128b8c 2850void
502b8322 2851set_nothrow_function_flags (void)
1ef1bf06
AM
2852{
2853 rtx insn;
502b8322 2854
b6128b8c 2855 current_function_nothrow = 1;
1ef1bf06 2856
b6128b8c
SH
2857 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2858 something that can throw an exception. We specifically exempt
2859 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2860 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2861 is optimistic. */
1ef1bf06 2862
b6128b8c
SH
2863 cfun->all_throwers_are_sibcalls = 1;
2864
2865 if (! flag_exceptions)
2866 return;
502b8322 2867
1ef1bf06 2868 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf 2869 if (can_throw_external (insn))
b6128b8c
SH
2870 {
2871 current_function_nothrow = 0;
2872
2873 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2874 {
2875 cfun->all_throwers_are_sibcalls = 0;
2876 return;
2877 }
2878 }
2879
52a11cbf
RH
2880 for (insn = current_function_epilogue_delay_list; insn;
2881 insn = XEXP (insn, 1))
b6128b8c
SH
2882 if (can_throw_external (insn))
2883 {
2884 current_function_nothrow = 0;
4da896b2 2885
b6128b8c
SH
2886 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2887 {
2888 cfun->all_throwers_are_sibcalls = 0;
2889 return;
2890 }
2891 }
1ef1bf06 2892}
52a11cbf 2893
ca55abae 2894\f
52a11cbf 2895/* Various hooks for unwind library. */
ca55abae
JM
2896
2897/* Do any necessary initialization to access arbitrary stack frames.
2898 On the SPARC, this means flushing the register windows. */
2899
2900void
502b8322 2901expand_builtin_unwind_init (void)
ca55abae
JM
2902{
2903 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 2904 able to copy the saved values for any registers from frames we unwind. */
ca55abae
JM
2905 current_function_has_nonlocal_label = 1;
2906
2907#ifdef SETUP_FRAME_ADDRESSES
2908 SETUP_FRAME_ADDRESSES ();
2909#endif
2910}
2911
52a11cbf 2912rtx
502b8322 2913expand_builtin_eh_return_data_regno (tree arglist)
52a11cbf
RH
2914{
2915 tree which = TREE_VALUE (arglist);
2916 unsigned HOST_WIDE_INT iwhich;
2917
2918 if (TREE_CODE (which) != INTEGER_CST)
2919 {
2920 error ("argument of `__builtin_eh_return_regno' must be constant");
2921 return constm1_rtx;
2922 }
2923
2924 iwhich = tree_low_cst (which, 1);
2925 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2926 if (iwhich == INVALID_REGNUM)
2927 return constm1_rtx;
2928
2929#ifdef DWARF_FRAME_REGNUM
2930 iwhich = DWARF_FRAME_REGNUM (iwhich);
2931#else
2932 iwhich = DBX_REGISTER_NUMBER (iwhich);
2933#endif
2934
3f2c5d1a 2935 return GEN_INT (iwhich);
52a11cbf
RH
2936}
2937
ca55abae
JM
2938/* Given a value extracted from the return address register or stack slot,
2939 return the actual address encoded in that value. */
2940
2941rtx
502b8322 2942expand_builtin_extract_return_addr (tree addr_tree)
ca55abae
JM
2943{
2944 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf 2945
0ab38418
EC
2946 if (GET_MODE (addr) != Pmode
2947 && GET_MODE (addr) != VOIDmode)
2948 {
2949#ifdef POINTERS_EXTEND_UNSIGNED
2950 addr = convert_memory_address (Pmode, addr);
2951#else
2952 addr = convert_to_mode (Pmode, addr, 0);
2953#endif
2954 }
2955
52a11cbf
RH
2956 /* First mask out any unwanted bits. */
2957#ifdef MASK_RETURN_ADDR
22273300 2958 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
52a11cbf
RH
2959#endif
2960
2961 /* Then adjust to find the real return address. */
2962#if defined (RETURN_ADDR_OFFSET)
2963 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2964#endif
2965
2966 return addr;
ca55abae
JM
2967}
2968
2969/* Given an actual address in addr_tree, do any necessary encoding
2970 and return the value to be stored in the return address register or
2971 stack slot so the epilogue will return to that address. */
2972
2973rtx
502b8322 2974expand_builtin_frob_return_addr (tree addr_tree)
ca55abae 2975{
4b6c1672 2976 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
52a11cbf 2977
be128cd9 2978#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
2979 if (GET_MODE (addr) != Pmode)
2980 addr = convert_memory_address (Pmode, addr);
be128cd9
RK
2981#endif
2982
ca55abae 2983#ifdef RETURN_ADDR_OFFSET
52a11cbf 2984 addr = force_reg (Pmode, addr);
ca55abae
JM
2985 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2986#endif
52a11cbf 2987
ca55abae
JM
2988 return addr;
2989}
2990
52a11cbf
RH
2991/* Set up the epilogue with the magic bits we'll need to return to the
2992 exception handler. */
ca55abae 2993
52a11cbf 2994void
502b8322
AJ
2995expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2996 tree handler_tree)
ca55abae 2997{
34dc173c 2998 rtx tmp;
ca55abae 2999
34dc173c
UW
3000#ifdef EH_RETURN_STACKADJ_RTX
3001 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
be128cd9 3002#ifdef POINTERS_EXTEND_UNSIGNED
34dc173c
UW
3003 if (GET_MODE (tmp) != Pmode)
3004 tmp = convert_memory_address (Pmode, tmp);
3005#endif
3006 if (!cfun->eh->ehr_stackadj)
3007 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3008 else if (tmp != cfun->eh->ehr_stackadj)
3009 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
be128cd9
RK
3010#endif
3011
34dc173c
UW
3012 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3013#ifdef POINTERS_EXTEND_UNSIGNED
3014 if (GET_MODE (tmp) != Pmode)
3015 tmp = convert_memory_address (Pmode, tmp);
3016#endif
3017 if (!cfun->eh->ehr_handler)
3018 cfun->eh->ehr_handler = copy_to_reg (tmp);
3019 else if (tmp != cfun->eh->ehr_handler)
3020 emit_move_insn (cfun->eh->ehr_handler, tmp);
ca55abae 3021
34dc173c
UW
3022 if (!cfun->eh->ehr_label)
3023 cfun->eh->ehr_label = gen_label_rtx ();
52a11cbf 3024 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
3025}
3026
71038426 3027void
502b8322 3028expand_eh_return (void)
ca55abae 3029{
34dc173c 3030 rtx around_label;
ca55abae 3031
52a11cbf 3032 if (! cfun->eh->ehr_label)
71038426 3033 return;
ca55abae 3034
52a11cbf 3035 current_function_calls_eh_return = 1;
ca55abae 3036
34dc173c
UW
3037#ifdef EH_RETURN_STACKADJ_RTX
3038 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3039#endif
3040
52a11cbf 3041 around_label = gen_label_rtx ();
52a11cbf 3042 emit_jump (around_label);
ca55abae 3043
52a11cbf
RH
3044 emit_label (cfun->eh->ehr_label);
3045 clobber_return_register ();
ca55abae 3046
34dc173c
UW
3047#ifdef EH_RETURN_STACKADJ_RTX
3048 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3049#endif
3050
52a11cbf
RH
3051#ifdef HAVE_eh_return
3052 if (HAVE_eh_return)
34dc173c 3053 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
52a11cbf 3054 else
71038426 3055#endif
52a11cbf 3056 {
34dc173c
UW
3057#ifdef EH_RETURN_HANDLER_RTX
3058 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3059#else
3060 error ("__builtin_eh_return not supported on this target");
3061#endif
52a11cbf 3062 }
71038426 3063
52a11cbf 3064 emit_label (around_label);
71038426 3065}
77d33a84 3066\f
949f197f 3067/* In the following functions, we represent entries in the action table
eaec9b3d 3068 as 1-based indices. Special cases are:
949f197f
RH
3069
3070 0: null action record, non-null landing pad; implies cleanups
3071 -1: null action record, null landing pad; implies no action
3072 -2: no call-site entry; implies must_not_throw
3073 -3: we have yet to process outer regions
3074
3075 Further, no special cases apply to the "next" field of the record.
3076 For next, 0 means end of list. */
3077
52a11cbf
RH
3078struct action_record
3079{
3080 int offset;
3081 int filter;
3082 int next;
3083};
77d33a84 3084
52a11cbf 3085static int
502b8322 3086action_record_eq (const void *pentry, const void *pdata)
52a11cbf
RH
3087{
3088 const struct action_record *entry = (const struct action_record *) pentry;
3089 const struct action_record *data = (const struct action_record *) pdata;
3090 return entry->filter == data->filter && entry->next == data->next;
3091}
77d33a84 3092
52a11cbf 3093static hashval_t
502b8322 3094action_record_hash (const void *pentry)
52a11cbf
RH
3095{
3096 const struct action_record *entry = (const struct action_record *) pentry;
3097 return entry->next * 1009 + entry->filter;
3098}
77d33a84 3099
52a11cbf 3100static int
502b8322 3101add_action_record (htab_t ar_hash, int filter, int next)
77d33a84 3102{
52a11cbf
RH
3103 struct action_record **slot, *new, tmp;
3104
3105 tmp.filter = filter;
3106 tmp.next = next;
3107 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 3108
52a11cbf 3109 if ((new = *slot) == NULL)
77d33a84 3110 {
52a11cbf
RH
3111 new = (struct action_record *) xmalloc (sizeof (*new));
3112 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3113 new->filter = filter;
3114 new->next = next;
3115 *slot = new;
3116
3117 /* The filter value goes in untouched. The link to the next
3118 record is a "self-relative" byte offset, or zero to indicate
3119 that there is no next record. So convert the absolute 1 based
eaec9b3d 3120 indices we've been carrying around into a displacement. */
52a11cbf
RH
3121
3122 push_sleb128 (&cfun->eh->action_record_data, filter);
3123 if (next)
3124 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3125 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 3126 }
77d33a84 3127
52a11cbf
RH
3128 return new->offset;
3129}
77d33a84 3130
52a11cbf 3131static int
502b8322 3132collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
77d33a84 3133{
52a11cbf
RH
3134 struct eh_region *c;
3135 int next;
77d33a84 3136
52a11cbf
RH
3137 /* If we've reached the top of the region chain, then we have
3138 no actions, and require no landing pad. */
3139 if (region == NULL)
3140 return -1;
3141
3142 switch (region->type)
77d33a84 3143 {
52a11cbf
RH
3144 case ERT_CLEANUP:
3145 /* A cleanup adds a zero filter to the beginning of the chain, but
3146 there are special cases to look out for. If there are *only*
3147 cleanups along a path, then it compresses to a zero action.
3148 Further, if there are multiple cleanups along a path, we only
3149 need to represent one of them, as that is enough to trigger
3150 entry to the landing pad at runtime. */
3151 next = collect_one_action_chain (ar_hash, region->outer);
3152 if (next <= 0)
3153 return 0;
3154 for (c = region->outer; c ; c = c->outer)
3155 if (c->type == ERT_CLEANUP)
3156 return next;
3157 return add_action_record (ar_hash, 0, next);
3158
3159 case ERT_TRY:
3160 /* Process the associated catch regions in reverse order.
3161 If there's a catch-all handler, then we don't need to
3162 search outer regions. Use a magic -3 value to record
a1f300c0 3163 that we haven't done the outer search. */
52a11cbf
RH
3164 next = -3;
3165 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3166 {
6d41a92f
OH
3167 if (c->u.catch.type_list == NULL)
3168 {
3169 /* Retrieve the filter from the head of the filter list
3170 where we have stored it (see assign_filter_values). */
a944ceb9
RH
3171 int filter
3172 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3173
3174 next = add_action_record (ar_hash, filter, 0);
6d41a92f 3175 }
52a11cbf
RH
3176 else
3177 {
6d41a92f
OH
3178 /* Once the outer search is done, trigger an action record for
3179 each filter we have. */
3180 tree flt_node;
3181
52a11cbf
RH
3182 if (next == -3)
3183 {
3184 next = collect_one_action_chain (ar_hash, region->outer);
949f197f
RH
3185
3186 /* If there is no next action, terminate the chain. */
3187 if (next == -1)
52a11cbf 3188 next = 0;
949f197f
RH
3189 /* If all outer actions are cleanups or must_not_throw,
3190 we'll have no action record for it, since we had wanted
3191 to encode these states in the call-site record directly.
3192 Add a cleanup action to the chain to catch these. */
3193 else if (next <= 0)
3194 next = add_action_record (ar_hash, 0, 0);
52a11cbf 3195 }
3f2c5d1a 3196
6d41a92f
OH
3197 flt_node = c->u.catch.filter_list;
3198 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3199 {
3200 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3201 next = add_action_record (ar_hash, filter, next);
3202 }
52a11cbf
RH
3203 }
3204 }
3205 return next;
3206
3207 case ERT_ALLOWED_EXCEPTIONS:
3208 /* An exception specification adds its filter to the
3209 beginning of the chain. */
3210 next = collect_one_action_chain (ar_hash, region->outer);
3211 return add_action_record (ar_hash, region->u.allowed.filter,
3212 next < 0 ? 0 : next);
3213
3214 case ERT_MUST_NOT_THROW:
3215 /* A must-not-throw region with no inner handlers or cleanups
3216 requires no call-site entry. Note that this differs from
3217 the no handler or cleanup case in that we do require an lsda
3218 to be generated. Return a magic -2 value to record this. */
3219 return -2;
3220
3221 case ERT_CATCH:
3222 case ERT_THROW:
3223 /* CATCH regions are handled in TRY above. THROW regions are
3224 for optimization information only and produce no output. */
3225 return collect_one_action_chain (ar_hash, region->outer);
3226
3227 default:
3228 abort ();
77d33a84
AM
3229 }
3230}
3231
52a11cbf 3232static int
502b8322 3233add_call_site (rtx landing_pad, int action)
77d33a84 3234{
52a11cbf
RH
3235 struct call_site_record *data = cfun->eh->call_site_data;
3236 int used = cfun->eh->call_site_data_used;
3237 int size = cfun->eh->call_site_data_size;
77d33a84 3238
52a11cbf
RH
3239 if (used >= size)
3240 {
3241 size = (size ? size * 2 : 64);
3242 data = (struct call_site_record *)
e2500fed 3243 ggc_realloc (data, sizeof (*data) * size);
52a11cbf
RH
3244 cfun->eh->call_site_data = data;
3245 cfun->eh->call_site_data_size = size;
3246 }
77d33a84 3247
52a11cbf
RH
3248 data[used].landing_pad = landing_pad;
3249 data[used].action = action;
77d33a84 3250
52a11cbf 3251 cfun->eh->call_site_data_used = used + 1;
77d33a84 3252
52a11cbf 3253 return used + call_site_base;
77d33a84
AM
3254}
3255
52a11cbf
RH
3256/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3257 The new note numbers will not refer to region numbers, but
3258 instead to call site entries. */
77d33a84 3259
52a11cbf 3260void
502b8322 3261convert_to_eh_region_ranges (void)
77d33a84 3262{
52a11cbf
RH
3263 rtx insn, iter, note;
3264 htab_t ar_hash;
3265 int last_action = -3;
3266 rtx last_action_insn = NULL_RTX;
3267 rtx last_landing_pad = NULL_RTX;
3268 rtx first_no_action_insn = NULL_RTX;
ae0ed63a 3269 int call_site = 0;
77d33a84 3270
52a11cbf
RH
3271 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3272 return;
77d33a84 3273
52a11cbf 3274 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 3275
52a11cbf 3276 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 3277
52a11cbf
RH
3278 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3279 if (INSN_P (iter))
3280 {
3281 struct eh_region *region;
3282 int this_action;
3283 rtx this_landing_pad;
77d33a84 3284
52a11cbf
RH
3285 insn = iter;
3286 if (GET_CODE (insn) == INSN
3287 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3288 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 3289
52a11cbf
RH
3290 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3291 if (!note)
3292 {
3293 if (! (GET_CODE (insn) == CALL_INSN
3294 || (flag_non_call_exceptions
3295 && may_trap_p (PATTERN (insn)))))
3296 continue;
3297 this_action = -1;
3298 region = NULL;
3299 }
3300 else
3301 {
3302 if (INTVAL (XEXP (note, 0)) <= 0)
3303 continue;
3304 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3305 this_action = collect_one_action_chain (ar_hash, region);
3306 }
3307
3308 /* Existence of catch handlers, or must-not-throw regions
3309 implies that an lsda is needed (even if empty). */
3310 if (this_action != -1)
3311 cfun->uses_eh_lsda = 1;
3312
3313 /* Delay creation of region notes for no-action regions
3314 until we're sure that an lsda will be required. */
3315 else if (last_action == -3)
3316 {
3317 first_no_action_insn = iter;
3318 last_action = -1;
3319 }
1ef1bf06 3320
52a11cbf
RH
3321 /* Cleanups and handlers may share action chains but not
3322 landing pads. Collect the landing pad for this region. */
3323 if (this_action >= 0)
3324 {
3325 struct eh_region *o;
3326 for (o = region; ! o->landing_pad ; o = o->outer)
3327 continue;
3328 this_landing_pad = o->landing_pad;
3329 }
3330 else
3331 this_landing_pad = NULL_RTX;
1ef1bf06 3332
52a11cbf
RH
3333 /* Differing actions or landing pads implies a change in call-site
3334 info, which implies some EH_REGION note should be emitted. */
3335 if (last_action != this_action
3336 || last_landing_pad != this_landing_pad)
3337 {
3338 /* If we'd not seen a previous action (-3) or the previous
3339 action was must-not-throw (-2), then we do not need an
3340 end note. */
3341 if (last_action >= -1)
3342 {
3343 /* If we delayed the creation of the begin, do it now. */
3344 if (first_no_action_insn)
3345 {
3346 call_site = add_call_site (NULL_RTX, 0);
3347 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3348 first_no_action_insn);
3349 NOTE_EH_HANDLER (note) = call_site;
3350 first_no_action_insn = NULL_RTX;
3351 }
3352
3353 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3354 last_action_insn);
3355 NOTE_EH_HANDLER (note) = call_site;
3356 }
3357
3358 /* If the new action is must-not-throw, then no region notes
3359 are created. */
3360 if (this_action >= -1)
3361 {
3f2c5d1a 3362 call_site = add_call_site (this_landing_pad,
52a11cbf
RH
3363 this_action < 0 ? 0 : this_action);
3364 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3365 NOTE_EH_HANDLER (note) = call_site;
3366 }
3367
3368 last_action = this_action;
3369 last_landing_pad = this_landing_pad;
3370 }
3371 last_action_insn = iter;
3372 }
1ef1bf06 3373
52a11cbf 3374 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3375 {
52a11cbf
RH
3376 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3377 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3378 }
3379
52a11cbf
RH
3380 htab_delete (ar_hash);
3381}
1ef1bf06 3382
52a11cbf
RH
3383\f
3384static void
502b8322 3385push_uleb128 (varray_type *data_area, unsigned int value)
52a11cbf
RH
3386{
3387 do
3388 {
3389 unsigned char byte = value & 0x7f;
3390 value >>= 7;
3391 if (value)
3392 byte |= 0x80;
3393 VARRAY_PUSH_UCHAR (*data_area, byte);
3394 }
3395 while (value);
3396}
1ef1bf06 3397
52a11cbf 3398static void
502b8322 3399push_sleb128 (varray_type *data_area, int value)
52a11cbf
RH
3400{
3401 unsigned char byte;
3402 int more;
1ef1bf06 3403
52a11cbf 3404 do
1ef1bf06 3405 {
52a11cbf
RH
3406 byte = value & 0x7f;
3407 value >>= 7;
3408 more = ! ((value == 0 && (byte & 0x40) == 0)
3409 || (value == -1 && (byte & 0x40) != 0));
3410 if (more)
3411 byte |= 0x80;
3412 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3413 }
52a11cbf
RH
3414 while (more);
3415}
1ef1bf06 3416
52a11cbf 3417\f
52a11cbf
RH
3418#ifndef HAVE_AS_LEB128
3419static int
502b8322 3420dw2_size_of_call_site_table (void)
1ef1bf06 3421{
52a11cbf
RH
3422 int n = cfun->eh->call_site_data_used;
3423 int size = n * (4 + 4 + 4);
3424 int i;
1ef1bf06 3425
52a11cbf
RH
3426 for (i = 0; i < n; ++i)
3427 {
3428 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3429 size += size_of_uleb128 (cs->action);
3430 }
fac62ecf 3431
52a11cbf
RH
3432 return size;
3433}
3434
3435static int
502b8322 3436sjlj_size_of_call_site_table (void)
52a11cbf
RH
3437{
3438 int n = cfun->eh->call_site_data_used;
3439 int size = 0;
3440 int i;
77d33a84 3441
52a11cbf 3442 for (i = 0; i < n; ++i)
1ef1bf06 3443 {
52a11cbf
RH
3444 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3445 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3446 size += size_of_uleb128 (cs->action);
1ef1bf06 3447 }
52a11cbf
RH
3448
3449 return size;
3450}
3451#endif
3452
3453static void
502b8322 3454dw2_output_call_site_table (void)
52a11cbf 3455{
83182544 3456 const char *const function_start_lab
52a11cbf
RH
3457 = IDENTIFIER_POINTER (current_function_func_begin_label);
3458 int n = cfun->eh->call_site_data_used;
3459 int i;
3460
3461 for (i = 0; i < n; ++i)
1ef1bf06 3462 {
52a11cbf
RH
3463 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3464 char reg_start_lab[32];
3465 char reg_end_lab[32];
3466 char landing_pad_lab[32];
3467
3468 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3469 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3470
3471 if (cs->landing_pad)
3472 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3473 CODE_LABEL_NUMBER (cs->landing_pad));
3474
3475 /* ??? Perhaps use insn length scaling if the assembler supports
3476 generic arithmetic. */
3477 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3478 data4 if the function is small enough. */
3479#ifdef HAVE_AS_LEB128
3480 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3481 "region %d start", i);
3482 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3483 "length");
3484 if (cs->landing_pad)
3485 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3486 "landing pad");
3487 else
3488 dw2_asm_output_data_uleb128 (0, "landing pad");
3489#else
3490 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3491 "region %d start", i);
3492 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3493 if (cs->landing_pad)
3494 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3495 "landing pad");
3496 else
3497 dw2_asm_output_data (4, 0, "landing pad");
3498#endif
3499 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3500 }
3501
52a11cbf
RH
3502 call_site_base += n;
3503}
3504
3505static void
502b8322 3506sjlj_output_call_site_table (void)
52a11cbf
RH
3507{
3508 int n = cfun->eh->call_site_data_used;
3509 int i;
1ef1bf06 3510
52a11cbf 3511 for (i = 0; i < n; ++i)
1ef1bf06 3512 {
52a11cbf 3513 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3514
52a11cbf
RH
3515 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3516 "region %d landing pad", i);
3517 dw2_asm_output_data_uleb128 (cs->action, "action");
3518 }
4da896b2 3519
52a11cbf 3520 call_site_base += n;
1ef1bf06
AM
3521}
3522
96d0f4dc
JJ
3523/* Tell assembler to switch to the section for the exception handling
3524 table. */
3525
3526void
502b8322 3527default_exception_section (void)
96d0f4dc
JJ
3528{
3529 if (targetm.have_named_sections)
3530 {
96d0f4dc 3531 int flags;
96d0f4dc 3532#ifdef HAVE_LD_RO_RW_SECTION_MIXING
fe3f9515
KG
3533 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3534
96d0f4dc
JJ
3535 flags = (! flag_pic
3536 || ((tt_format & 0x70) != DW_EH_PE_absptr
3537 && (tt_format & 0x70) != DW_EH_PE_aligned))
3538 ? 0 : SECTION_WRITE;
3539#else
3540 flags = SECTION_WRITE;
3541#endif
3542 named_section_flags (".gcc_except_table", flags);
3543 }
3544 else if (flag_pic)
3545 data_section ();
3546 else
3547 readonly_data_section ();
3548}
3549
52a11cbf 3550void
502b8322 3551output_function_exception_table (void)
52a11cbf 3552{
2a1ee410 3553 int tt_format, cs_format, lp_format, i, n;
52a11cbf
RH
3554#ifdef HAVE_AS_LEB128
3555 char ttype_label[32];
3556 char cs_after_size_label[32];
3557 char cs_end_label[32];
3558#else
3559 int call_site_len;
3560#endif
3561 int have_tt_data;
ae0ed63a 3562 int tt_format_size = 0;
1ef1bf06 3563
52a11cbf
RH
3564 /* Not all functions need anything. */
3565 if (! cfun->uses_eh_lsda)
3566 return;
fac62ecf 3567
2a1ee410
RH
3568#ifdef IA64_UNWIND_INFO
3569 fputs ("\t.personality\t", asm_out_file);
3570 output_addr_const (asm_out_file, eh_personality_libfunc);
3571 fputs ("\n\t.handlerdata\n", asm_out_file);
3572 /* Note that varasm still thinks we're in the function's code section.
3573 The ".endp" directive that will immediately follow will take us back. */
3574#else
07c9d2eb 3575 (*targetm.asm_out.exception_section) ();
2a1ee410 3576#endif
52a11cbf
RH
3577
3578 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3579 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3580
b627d6fe
RH
3581 /* Indicate the format of the @TType entries. */
3582 if (! have_tt_data)
3583 tt_format = DW_EH_PE_omit;
3584 else
3585 {
3586 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3587#ifdef HAVE_AS_LEB128
df696a75
RH
3588 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3589 current_function_funcdef_no);
b627d6fe
RH
3590#endif
3591 tt_format_size = size_of_encoded_value (tt_format);
3592
7a900ebc 3593 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3594 }
52a11cbf 3595
4977bab6 3596 (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
df696a75 3597 current_function_funcdef_no);
52a11cbf
RH
3598
3599 /* The LSDA header. */
3600
3601 /* Indicate the format of the landing pad start pointer. An omitted
3602 field implies @LPStart == @Start. */
3603 /* Currently we always put @LPStart == @Start. This field would
3604 be most useful in moving the landing pads completely out of
3605 line to another section, but it could also be used to minimize
3606 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3607 lp_format = DW_EH_PE_omit;
3608 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3609 eh_data_format_name (lp_format));
52a11cbf
RH
3610
3611 /* @LPStart pointer would go here. */
3612
2a1ee410
RH
3613 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3614 eh_data_format_name (tt_format));
52a11cbf
RH
3615
3616#ifndef HAVE_AS_LEB128
3617 if (USING_SJLJ_EXCEPTIONS)
3618 call_site_len = sjlj_size_of_call_site_table ();
3619 else
3620 call_site_len = dw2_size_of_call_site_table ();
3621#endif
3622
3623 /* A pc-relative 4-byte displacement to the @TType data. */
3624 if (have_tt_data)
3625 {
3626#ifdef HAVE_AS_LEB128
3627 char ttype_after_disp_label[32];
3f2c5d1a 3628 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
df696a75 3629 current_function_funcdef_no);
52a11cbf
RH
3630 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3631 "@TType base offset");
3632 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3633#else
3634 /* Ug. Alignment queers things. */
b627d6fe 3635 unsigned int before_disp, after_disp, last_disp, disp;
52a11cbf 3636
52a11cbf
RH
3637 before_disp = 1 + 1;
3638 after_disp = (1 + size_of_uleb128 (call_site_len)
3639 + call_site_len
3640 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
b627d6fe
RH
3641 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3642 * tt_format_size));
52a11cbf
RH
3643
3644 disp = after_disp;
3645 do
1ef1bf06 3646 {
52a11cbf
RH
3647 unsigned int disp_size, pad;
3648
3649 last_disp = disp;
3650 disp_size = size_of_uleb128 (disp);
3651 pad = before_disp + disp_size + after_disp;
b627d6fe
RH
3652 if (pad % tt_format_size)
3653 pad = tt_format_size - (pad % tt_format_size);
52a11cbf
RH
3654 else
3655 pad = 0;
3656 disp = after_disp + pad;
1ef1bf06 3657 }
52a11cbf
RH
3658 while (disp != last_disp);
3659
3660 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3661#endif
1ef1bf06 3662 }
1ef1bf06 3663
52a11cbf
RH
3664 /* Indicate the format of the call-site offsets. */
3665#ifdef HAVE_AS_LEB128
2a1ee410 3666 cs_format = DW_EH_PE_uleb128;
52a11cbf 3667#else
2a1ee410 3668 cs_format = DW_EH_PE_udata4;
52a11cbf 3669#endif
2a1ee410
RH
3670 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3671 eh_data_format_name (cs_format));
52a11cbf
RH
3672
3673#ifdef HAVE_AS_LEB128
3674 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
df696a75 3675 current_function_funcdef_no);
52a11cbf 3676 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
df696a75 3677 current_function_funcdef_no);
52a11cbf
RH
3678 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3679 "Call-site table length");
3680 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3681 if (USING_SJLJ_EXCEPTIONS)
3682 sjlj_output_call_site_table ();
3683 else
3684 dw2_output_call_site_table ();
3685 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3686#else
3687 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3688 if (USING_SJLJ_EXCEPTIONS)
3689 sjlj_output_call_site_table ();
3690 else
3691 dw2_output_call_site_table ();
3692#endif
3693
3694 /* ??? Decode and interpret the data for flag_debug_asm. */
3695 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3696 for (i = 0; i < n; ++i)
3697 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3698 (i ? NULL : "Action record table"));
1ef1bf06 3699
52a11cbf 3700 if (have_tt_data)
7a900ebc 3701 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3702
52a11cbf
RH
3703 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3704 while (i-- > 0)
1ef1bf06 3705 {
52a11cbf 3706 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
225b9cb9 3707 rtx value;
52a11cbf
RH
3708
3709 if (type == NULL_TREE)
3710 type = integer_zero_node;
3711 else
3712 type = lookup_type_for_runtime (type);
3713
225b9cb9
RH
3714 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3715 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3716 assemble_integer (value, tt_format_size,
3717 tt_format_size * BITS_PER_UNIT, 1);
3718 else
0fb7aeda 3719 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
1ef1bf06 3720 }
52a11cbf
RH
3721
3722#ifdef HAVE_AS_LEB128
3723 if (have_tt_data)
3724 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3725#endif
3726
3727 /* ??? Decode and interpret the data for flag_debug_asm. */
3728 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3729 for (i = 0; i < n; ++i)
3730 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3731 (i ? NULL : "Exception specification table"));
3732
3733 function_section (current_function_decl);
1ef1bf06 3734}
e2500fed
GK
3735
3736#include "gt-except.h"