]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
PR libstdc++/5625
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
97bb6c17 1/* Implements exception handling.
1ed5443b 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
48b3d385 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
97ecdf3e 4 Contributed by Mike Stump <mrs@cygnus.com>.
5
f12b58b3 6This file is part of GCC.
97ecdf3e 7
f12b58b3 8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
97ecdf3e 12
f12b58b3 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
97ecdf3e 17
18You should have received a copy of the GNU General Public License
f12b58b3 19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
97ecdf3e 22
23
97bb6c17 24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
ad87de1e 27 be transferred to any arbitrary code associated with a function call
97bb6c17 28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
df4b504c 47 [ Add updated documentation on how to use this. ] */
97ecdf3e 48
49
50#include "config.h"
405711de 51#include "system.h"
805e22b2 52#include "coretypes.h"
53#include "tm.h"
97ecdf3e 54#include "rtl.h"
55#include "tree.h"
56#include "flags.h"
97ecdf3e 57#include "function.h"
97ecdf3e 58#include "expr.h"
d8fc4d0b 59#include "libfuncs.h"
97ecdf3e 60#include "insn-config.h"
df4b504c 61#include "except.h"
62#include "integrate.h"
63#include "hard-reg-set.h"
64#include "basic-block.h"
97ecdf3e 65#include "output.h"
df4b504c 66#include "dwarf2asm.h"
67#include "dwarf2out.h"
ad5818ae 68#include "dwarf2.h"
12874aaf 69#include "toplev.h"
df4b504c 70#include "hashtab.h"
723766b6 71#include "intl.h"
a7b0c170 72#include "ggc.h"
075136a2 73#include "tm_p.h"
02c8b767 74#include "target.h"
a1f71e15 75#include "langhooks.h"
3b919cde 76#include "cgraph.h"
df4b504c 77
78/* Provide defaults for stuff that may not be defined when using
79 sjlj exceptions. */
df4b504c 80#ifndef EH_RETURN_DATA_REGNO
81#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
98ad18c5 82#endif
83
8591d03a 84
df4b504c 85/* Nonzero means enable synchronous exceptions for non-call instructions. */
86int flag_non_call_exceptions;
8591d03a 87
df4b504c 88/* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
35cb5232 90tree (*lang_protect_cleanup_actions) (void);
8591d03a 91
df4b504c 92/* Return true if type A catches type B. */
35cb5232 93int (*lang_eh_type_covers) (tree a, tree b);
8591d03a 94
df4b504c 95/* Map a type to a runtime object to match type. */
35cb5232 96tree (*lang_eh_runtime_type) (tree);
97ecdf3e 97
8f8dcce4 98/* A hash table of label to region number. */
99
1f3233d1 100struct ehl_map_entry GTY(())
8f8dcce4 101{
102 rtx label;
103 struct eh_region *region;
104};
105
252cc619 106static GTY(()) int call_site_base;
1f3233d1 107static GTY ((param_is (union tree_node)))
108 htab_t type_to_runtime_map;
df4b504c 109
110/* Describe the SjLj_Function_Context structure. */
1f3233d1 111static GTY(()) tree sjlj_fc_type_node;
df4b504c 112static int sjlj_fc_call_site_ofs;
113static int sjlj_fc_data_ofs;
114static int sjlj_fc_personality_ofs;
115static int sjlj_fc_lsda_ofs;
116static int sjlj_fc_jbuf_ofs;
117\f
118/* Describes one exception region. */
1f3233d1 119struct eh_region GTY(())
df4b504c 120{
121 /* The immediately surrounding region. */
122 struct eh_region *outer;
ad87de1e 123
df4b504c 124 /* The list of immediately contained regions. */
125 struct eh_region *inner;
126 struct eh_region *next_peer;
ad87de1e 127
df4b504c 128 /* An identifier for this region. */
129 int region_number;
ec37ccb4 130
8f8dcce4 131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
133 bitmap aka;
134
df4b504c 135 /* Each region does exactly one thing. */
136 enum eh_region_type
137 {
5461f9e4 138 ERT_UNKNOWN = 0,
139 ERT_CLEANUP,
df4b504c 140 ERT_TRY,
141 ERT_CATCH,
142 ERT_ALLOWED_EXCEPTIONS,
143 ERT_MUST_NOT_THROW,
144 ERT_THROW,
145 ERT_FIXUP
146 } type;
147
4a82352a 148 /* Holds the action to perform based on the preceding type. */
1f3233d1 149 union eh_region_u {
df4b504c 150 /* A list of catch blocks, a surrounding try block,
151 and the label for continuing after a catch. */
1f3233d1 152 struct eh_region_u_try {
df4b504c 153 struct eh_region *catch;
154 struct eh_region *last_catch;
155 struct eh_region *prev_try;
156 rtx continue_label;
1f3233d1 157 } GTY ((tag ("ERT_TRY"))) try;
df4b504c 158
5c15c916 159 /* The list through the catch handlers, the list of type objects
160 matched, and the list of associated filters. */
1f3233d1 161 struct eh_region_u_catch {
df4b504c 162 struct eh_region *next_catch;
163 struct eh_region *prev_catch;
5c15c916 164 tree type_list;
165 tree filter_list;
1f3233d1 166 } GTY ((tag ("ERT_CATCH"))) catch;
df4b504c 167
168 /* A tree_list of allowed types. */
1f3233d1 169 struct eh_region_u_allowed {
df4b504c 170 tree type_list;
171 int filter;
1f3233d1 172 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
df4b504c 173
1ed5443b 174 /* The type given by a call to "throw foo();", or discovered
df4b504c 175 for a throw. */
1f3233d1 176 struct eh_region_u_throw {
df4b504c 177 tree type;
1f3233d1 178 } GTY ((tag ("ERT_THROW"))) throw;
df4b504c 179
180 /* Retain the cleanup expression even after expansion so that
181 we can match up fixup regions. */
1f3233d1 182 struct eh_region_u_cleanup {
df4b504c 183 tree exp;
0d3f1801 184 struct eh_region *prev_try;
1f3233d1 185 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
df4b504c 186
187 /* The real region (by expression and by pointer) that fixup code
188 should live in. */
1f3233d1 189 struct eh_region_u_fixup {
df4b504c 190 tree cleanup_exp;
191 struct eh_region *real_region;
618fa93e 192 bool resolved;
1f3233d1 193 } GTY ((tag ("ERT_FIXUP"))) fixup;
194 } GTY ((desc ("%0.type"))) u;
df4b504c 195
1639c6db 196 /* Entry point for this region's handler before landing pads are built. */
197 rtx label;
df4b504c 198
1639c6db 199 /* Entry point for this region's handler from the runtime eh library. */
df4b504c 200 rtx landing_pad;
201
1639c6db 202 /* Entry point for this region's handler from an inner region. */
df4b504c 203 rtx post_landing_pad;
1639c6db 204
205 /* The RESX insn for handing off control to the next outermost handler,
206 if appropriate. */
207 rtx resume;
95cedffb 208
209 /* True if something in this region may throw. */
210 unsigned may_contain_throw : 1;
df4b504c 211};
ec37ccb4 212
1f3233d1 213struct call_site_record GTY(())
214{
215 rtx landing_pad;
216 int action;
217};
218
df4b504c 219/* Used to save exception status for each function. */
1f3233d1 220struct eh_status GTY(())
df4b504c 221{
222 /* The tree of all regions for this function. */
223 struct eh_region *region_tree;
d63ea2f2 224
df4b504c 225 /* The same information as an indexable array. */
1f3233d1 226 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
d63ea2f2 227
df4b504c 228 /* The most recently open region. */
229 struct eh_region *cur_region;
d63ea2f2 230
df4b504c 231 /* This is the region for which we are processing catch blocks. */
232 struct eh_region *try_region;
ec37ccb4 233
df4b504c 234 rtx filter;
235 rtx exc_ptr;
97ecdf3e 236
df4b504c 237 int built_landing_pads;
238 int last_region_number;
d63ea2f2 239
df4b504c 240 varray_type ttype_data;
241 varray_type ehspec_data;
242 varray_type action_record_data;
ed74c60e 243
1f3233d1 244 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
245
35cb5232 246 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
1f3233d1 247 call_site_data;
df4b504c 248 int call_site_data_used;
249 int call_site_data_size;
250
251 rtx ehr_stackadj;
252 rtx ehr_handler;
253 rtx ehr_label;
254
255 rtx sjlj_fc;
256 rtx sjlj_exit_after;
257};
d63ea2f2 258
df4b504c 259\f
35cb5232 260static int t2r_eq (const void *, const void *);
261static hashval_t t2r_hash (const void *);
262static void add_type_for_runtime (tree);
263static tree lookup_type_for_runtime (tree);
264
265static struct eh_region *expand_eh_region_end (void);
266
267static rtx get_exception_filter (struct function *);
268
269static void collect_eh_region_array (void);
270static void resolve_fixup_regions (void);
271static void remove_fixup_regions (void);
272static void remove_unreachable_regions (rtx);
273static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
274
275static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
276 struct inline_remap *);
277static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
278static int ttypes_filter_eq (const void *, const void *);
279static hashval_t ttypes_filter_hash (const void *);
280static int ehspec_filter_eq (const void *, const void *);
281static hashval_t ehspec_filter_hash (const void *);
282static int add_ttypes_entry (htab_t, tree);
283static int add_ehspec_entry (htab_t, htab_t, tree);
284static void assign_filter_values (void);
285static void build_post_landing_pads (void);
286static void connect_post_landing_pads (void);
287static void dw2_build_landing_pads (void);
df4b504c 288
289struct sjlj_lp_info;
35cb5232 290static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
291static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
292static void sjlj_mark_call_sites (struct sjlj_lp_info *);
293static void sjlj_emit_function_enter (rtx);
294static void sjlj_emit_function_exit (void);
295static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
296static void sjlj_build_landing_pads (void);
297
298static hashval_t ehl_hash (const void *);
299static int ehl_eq (const void *, const void *);
300static void add_ehl_entry (rtx, struct eh_region *);
301static void remove_exception_handler_label (rtx);
302static void remove_eh_handler (struct eh_region *);
303static int for_each_eh_label_1 (void **, void *);
df4b504c 304
305struct reachable_info;
306
307/* The return value of reachable_next_level. */
308enum reachable_code
309{
310 /* The given exception is not processed by the given region. */
311 RNL_NOT_CAUGHT,
312 /* The given exception may need processing by the given region. */
313 RNL_MAYBE_CAUGHT,
314 /* The given exception is completely processed by the given region. */
315 RNL_CAUGHT,
316 /* The given exception is completely processed by the runtime. */
317 RNL_BLOCKED
318};
d63ea2f2 319
35cb5232 320static int check_handled (tree, tree);
321static void add_reachable_handler (struct reachable_info *,
322 struct eh_region *, struct eh_region *);
323static enum reachable_code reachable_next_level (struct eh_region *, tree,
324 struct reachable_info *);
325
326static int action_record_eq (const void *, const void *);
327static hashval_t action_record_hash (const void *);
328static int add_action_record (htab_t, int, int);
329static int collect_one_action_chain (htab_t, struct eh_region *);
330static int add_call_site (rtx, int);
331
332static void push_uleb128 (varray_type *, unsigned int);
333static void push_sleb128 (varray_type *, int);
df4b504c 334#ifndef HAVE_AS_LEB128
35cb5232 335static int dw2_size_of_call_site_table (void);
336static int sjlj_size_of_call_site_table (void);
df4b504c 337#endif
35cb5232 338static void dw2_output_call_site_table (void);
339static void sjlj_output_call_site_table (void);
d63ea2f2 340
df4b504c 341\f
342/* Routine to see if exception handling is turned on.
6ef828f9 343 DO_WARN is nonzero if we want to inform the user that exception
1ed5443b 344 handling is turned off.
97ecdf3e 345
df4b504c 346 This is used to ensure that -fexceptions has been specified if the
347 compiler tries to use any exception-specific functions. */
97ecdf3e 348
df4b504c 349int
35cb5232 350doing_eh (int do_warn)
df4b504c 351{
352 if (! flag_exceptions)
353 {
354 static int warned = 0;
355 if (! warned && do_warn)
356 {
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
359 }
360 return 0;
361 }
362 return 1;
97ecdf3e 363}
364
df4b504c 365\f
366void
35cb5232 367init_eh (void)
97ecdf3e 368{
df4b504c 369 if (! flag_exceptions)
370 return;
97ecdf3e 371
1f3233d1 372 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
97ecdf3e 373
df4b504c 374 /* Create the SjLj_Function_Context structure. This should match
375 the definition in unwind-sjlj.c. */
376 if (USING_SJLJ_EXCEPTIONS)
377 {
378 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
97ecdf3e 379
a1f71e15 380 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
011a7f23 381
df4b504c 382 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
383 build_pointer_type (sjlj_fc_type_node));
384 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
011a7f23 385
df4b504c 386 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
387 integer_type_node);
388 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
97ecdf3e 389
df4b504c 390 tmp = build_index_type (build_int_2 (4 - 1, 0));
771d21fa 391 tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
392 tmp);
df4b504c 393 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
394 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
011a7f23 395
df4b504c 396 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
397 ptr_type_node);
398 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
97ecdf3e 399
df4b504c 400 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
401 ptr_type_node);
402 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
ed74c60e 403
df4b504c 404#ifdef DONT_USE_BUILTIN_SETJMP
405#ifdef JMP_BUF_SIZE
406 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
407#else
408 /* Should be large enough for most systems, if it is not,
409 JMP_BUF_SIZE should be defined with the proper value. It will
410 also tend to be larger than necessary for most systems, a more
411 optimal port will define JMP_BUF_SIZE. */
412 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
413#endif
414#else
5b8192c3 415 /* builtin_setjmp takes a pointer to 5 words. */
416 tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
df4b504c 417#endif
418 tmp = build_index_type (tmp);
419 tmp = build_array_type (ptr_type_node, tmp);
420 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
421#ifdef DONT_USE_BUILTIN_SETJMP
422 /* We don't know what the alignment requirements of the
423 runtime's jmp_buf has. Overestimate. */
424 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
425 DECL_USER_ALIGN (f_jbuf) = 1;
426#endif
427 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
428
429 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
430 TREE_CHAIN (f_prev) = f_cs;
431 TREE_CHAIN (f_cs) = f_data;
432 TREE_CHAIN (f_data) = f_per;
433 TREE_CHAIN (f_per) = f_lsda;
434 TREE_CHAIN (f_lsda) = f_jbuf;
435
436 layout_type (sjlj_fc_type_node);
437
438 /* Cache the interesting field offsets so that we have
439 easy access from rtl. */
440 sjlj_fc_call_site_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
443 sjlj_fc_data_ofs
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
446 sjlj_fc_personality_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
449 sjlj_fc_lsda_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
452 sjlj_fc_jbuf_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
455 }
97ecdf3e 456}
457
df4b504c 458void
35cb5232 459init_eh_for_function (void)
97ecdf3e 460{
f0af5a88 461 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
8f8dcce4 462}
df4b504c 463\f
464/* Start an exception handling region. All instructions emitted
465 after this point are considered to be part of the region until
466 expand_eh_region_end is invoked. */
011a7f23 467
df4b504c 468void
35cb5232 469expand_eh_region_start (void)
011a7f23 470{
df4b504c 471 struct eh_region *new_region;
472 struct eh_region *cur_region;
473 rtx note;
011a7f23 474
df4b504c 475 if (! doing_eh (0))
476 return;
011a7f23 477
df4b504c 478 /* Insert a new blank region as a leaf in the tree. */
f0af5a88 479 new_region = ggc_alloc_cleared (sizeof (*new_region));
df4b504c 480 cur_region = cfun->eh->cur_region;
481 new_region->outer = cur_region;
482 if (cur_region)
011a7f23 483 {
df4b504c 484 new_region->next_peer = cur_region->inner;
485 cur_region->inner = new_region;
486 }
d63ea2f2 487 else
011a7f23 488 {
df4b504c 489 new_region->next_peer = cfun->eh->region_tree;
490 cfun->eh->region_tree = new_region;
011a7f23 491 }
df4b504c 492 cfun->eh->cur_region = new_region;
493
494 /* Create a note marking the start of this region. */
495 new_region->region_number = ++cfun->eh->last_region_number;
31b97e8f 496 note = emit_note (NOTE_INSN_EH_REGION_BEG);
df4b504c 497 NOTE_EH_HANDLER (note) = new_region->region_number;
011a7f23 498}
499
df4b504c 500/* Common code to end a region. Returns the region just ended. */
444be12c 501
df4b504c 502static struct eh_region *
35cb5232 503expand_eh_region_end (void)
444be12c 504{
df4b504c 505 struct eh_region *cur_region = cfun->eh->cur_region;
506 rtx note;
507
3fb1e43b 508 /* Create a note marking the end of this region. */
31b97e8f 509 note = emit_note (NOTE_INSN_EH_REGION_END);
df4b504c 510 NOTE_EH_HANDLER (note) = cur_region->region_number;
511
512 /* Pop. */
513 cfun->eh->cur_region = cur_region->outer;
514
df4b504c 515 return cur_region;
444be12c 516}
517
df4b504c 518/* End an exception handling region for a cleanup. HANDLER is an
519 expression to expand for the cleanup. */
a85413a6 520
df4b504c 521void
35cb5232 522expand_eh_region_end_cleanup (tree handler)
a85413a6 523{
df4b504c 524 struct eh_region *region;
49ee78f1 525 tree protect_cleanup_actions;
df4b504c 526 rtx around_label;
1639c6db 527 rtx data_save[2];
df4b504c 528
529 if (! doing_eh (0))
530 return;
a85413a6 531
df4b504c 532 region = expand_eh_region_end ();
533 region->type = ERT_CLEANUP;
534 region->label = gen_label_rtx ();
535 region->u.cleanup.exp = handler;
0d3f1801 536 region->u.cleanup.prev_try = cfun->eh->try_region;
a85413a6 537
df4b504c 538 around_label = gen_label_rtx ();
539 emit_jump (around_label);
a85413a6 540
df4b504c 541 emit_label (region->label);
a85413a6 542
ce9beb5c 543 if (flag_non_call_exceptions || region->may_contain_throw)
95cedffb 544 {
545 /* Give the language a chance to specify an action to be taken if an
546 exception is thrown that would propagate out of the HANDLER. */
547 protect_cleanup_actions
548 = (lang_protect_cleanup_actions
549 ? (*lang_protect_cleanup_actions) ()
550 : NULL_TREE);
49ee78f1 551
95cedffb 552 if (protect_cleanup_actions)
553 expand_eh_region_start ();
a85413a6 554
95cedffb 555 /* In case this cleanup involves an inline destructor with a try block in
556 it, we need to save the EH return data registers around it. */
557 data_save[0] = gen_reg_rtx (ptr_mode);
558 emit_move_insn (data_save[0], get_exception_pointer (cfun));
559 data_save[1] = gen_reg_rtx (word_mode);
560 emit_move_insn (data_save[1], get_exception_filter (cfun));
1639c6db 561
95cedffb 562 expand_expr (handler, const0_rtx, VOIDmode, 0);
a85413a6 563
95cedffb 564 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
565 emit_move_insn (cfun->eh->filter, data_save[1]);
1639c6db 566
95cedffb 567 if (protect_cleanup_actions)
568 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
c01175dc 569
95cedffb 570 /* We need any stack adjustment complete before the around_label. */
571 do_pending_stack_adjust ();
572 }
9dd8ca09 573
df4b504c 574 /* We delay the generation of the _Unwind_Resume until we generate
575 landing pads. We emit a marker here so as to get good control
576 flow data in the meantime. */
1639c6db 577 region->resume
578 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
df4b504c 579 emit_barrier ();
580
df4b504c 581 emit_label (around_label);
a85413a6 582}
583
df4b504c 584/* End an exception handling region for a try block, and prepares
585 for subsequent calls to expand_start_catch. */
011a7f23 586
df4b504c 587void
35cb5232 588expand_start_all_catch (void)
011a7f23 589{
df4b504c 590 struct eh_region *region;
011a7f23 591
df4b504c 592 if (! doing_eh (1))
593 return;
011a7f23 594
df4b504c 595 region = expand_eh_region_end ();
596 region->type = ERT_TRY;
597 region->u.try.prev_try = cfun->eh->try_region;
598 region->u.try.continue_label = gen_label_rtx ();
011a7f23 599
df4b504c 600 cfun->eh->try_region = region;
601
602 emit_jump (region->u.try.continue_label);
603}
011a7f23 604
5c15c916 605/* Begin a catch clause. TYPE is the type caught, a list of such types, or
606 null if this is a catch-all clause. Providing a type list enables to
607 associate the catch region with potentially several exception types, which
00e0eb3d 608 is useful e.g. for Ada. */
011a7f23 609
df4b504c 610void
35cb5232 611expand_start_catch (tree type_or_list)
011a7f23 612{
df4b504c 613 struct eh_region *t, *c, *l;
5c15c916 614 tree type_list;
df4b504c 615
616 if (! doing_eh (0))
617 return;
618
5c15c916 619 type_list = type_or_list;
620
621 if (type_or_list)
622 {
623 /* Ensure to always end up with a type list to normalize further
624 processing, then register each type against the runtime types
625 map. */
626 tree type_node;
627
628 if (TREE_CODE (type_or_list) != TREE_LIST)
ff385626 629 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
5c15c916 630
631 type_node = type_list;
632 for (; type_node; type_node = TREE_CHAIN (type_node))
ff385626 633 add_type_for_runtime (TREE_VALUE (type_node));
5c15c916 634 }
635
df4b504c 636 expand_eh_region_start ();
637
638 t = cfun->eh->try_region;
639 c = cfun->eh->cur_region;
640 c->type = ERT_CATCH;
5c15c916 641 c->u.catch.type_list = type_list;
df4b504c 642 c->label = gen_label_rtx ();
643
644 l = t->u.try.last_catch;
645 c->u.catch.prev_catch = l;
646 if (l)
647 l->u.catch.next_catch = c;
648 else
649 t->u.try.catch = c;
650 t->u.try.last_catch = c;
011a7f23 651
df4b504c 652 emit_label (c->label);
011a7f23 653}
654
df4b504c 655/* End a catch clause. Control will resume after the try/catch block. */
011a7f23 656
df4b504c 657void
35cb5232 658expand_end_catch (void)
011a7f23 659{
805e22b2 660 struct eh_region *try_region;
df4b504c 661
662 if (! doing_eh (0))
663 return;
664
805e22b2 665 expand_eh_region_end ();
df4b504c 666 try_region = cfun->eh->try_region;
667
668 emit_jump (try_region->u.try.continue_label);
011a7f23 669}
670
df4b504c 671/* End a sequence of catch handlers for a try block. */
011a7f23 672
df4b504c 673void
35cb5232 674expand_end_all_catch (void)
011a7f23 675{
df4b504c 676 struct eh_region *try_region;
677
678 if (! doing_eh (0))
679 return;
680
681 try_region = cfun->eh->try_region;
682 cfun->eh->try_region = try_region->u.try.prev_try;
683
684 emit_label (try_region->u.try.continue_label);
011a7f23 685}
686
df4b504c 687/* End an exception region for an exception type filter. ALLOWED is a
688 TREE_LIST of types to be matched by the runtime. FAILURE is an
424da949 689 expression to invoke if a mismatch occurs.
7de76e4b 690
691 ??? We could use these semantics for calls to rethrow, too; if we can
692 see the surrounding catch clause, we know that the exception we're
693 rethrowing satisfies the "filter" of the catch type. */
011a7f23 694
df4b504c 695void
35cb5232 696expand_eh_region_end_allowed (tree allowed, tree failure)
011a7f23 697{
df4b504c 698 struct eh_region *region;
699 rtx around_label;
011a7f23 700
df4b504c 701 if (! doing_eh (0))
702 return;
d63ea2f2 703
df4b504c 704 region = expand_eh_region_end ();
705 region->type = ERT_ALLOWED_EXCEPTIONS;
706 region->u.allowed.type_list = allowed;
707 region->label = gen_label_rtx ();
011a7f23 708
df4b504c 709 for (; allowed ; allowed = TREE_CHAIN (allowed))
710 add_type_for_runtime (TREE_VALUE (allowed));
011a7f23 711
df4b504c 712 /* We must emit the call to FAILURE here, so that if this function
713 throws a different exception, that it will be processed by the
714 correct region. */
011a7f23 715
df4b504c 716 around_label = gen_label_rtx ();
717 emit_jump (around_label);
718
719 emit_label (region->label);
720 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
97e0d249 721 /* We must adjust the stack before we reach the AROUND_LABEL because
722 the call to FAILURE does not occur on all paths to the
723 AROUND_LABEL. */
724 do_pending_stack_adjust ();
011a7f23 725
df4b504c 726 emit_label (around_label);
011a7f23 727}
728
df4b504c 729/* End an exception region for a must-not-throw filter. FAILURE is an
730 expression invoke if an uncaught exception propagates this far.
d63ea2f2 731
df4b504c 732 This is conceptually identical to expand_eh_region_end_allowed with
733 an empty allowed list (if you passed "std::terminate" instead of
734 "__cxa_call_unexpected"), but they are represented differently in
735 the C++ LSDA. */
ed74c60e 736
df4b504c 737void
35cb5232 738expand_eh_region_end_must_not_throw (tree failure)
d63ea2f2 739{
df4b504c 740 struct eh_region *region;
741 rtx around_label;
d63ea2f2 742
df4b504c 743 if (! doing_eh (0))
744 return;
ed74c60e 745
df4b504c 746 region = expand_eh_region_end ();
747 region->type = ERT_MUST_NOT_THROW;
748 region->label = gen_label_rtx ();
d63ea2f2 749
df4b504c 750 /* We must emit the call to FAILURE here, so that if this function
751 throws a different exception, that it will be processed by the
752 correct region. */
ed74c60e 753
df4b504c 754 around_label = gen_label_rtx ();
755 emit_jump (around_label);
ed74c60e 756
df4b504c 757 emit_label (region->label);
758 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
ed74c60e 759
df4b504c 760 emit_label (around_label);
d63ea2f2 761}
762
df4b504c 763/* End an exception region for a throw. No handling goes on here,
764 but it's the easiest way for the front-end to indicate what type
765 is being thrown. */
ed74c60e 766
df4b504c 767void
35cb5232 768expand_eh_region_end_throw (tree type)
d63ea2f2 769{
df4b504c 770 struct eh_region *region;
771
772 if (! doing_eh (0))
773 return;
774
775 region = expand_eh_region_end ();
776 region->type = ERT_THROW;
777 region->u.throw.type = type;
d63ea2f2 778}
779
df4b504c 780/* End a fixup region. Within this region the cleanups for the immediately
781 enclosing region are _not_ run. This is used for goto cleanup to avoid
782 destroying an object twice.
97bb6c17 783
df4b504c 784 This would be an extraordinarily simple prospect, were it not for the
785 fact that we don't actually know what the immediately enclosing region
786 is. This surprising fact is because expand_cleanups is currently
787 generating a sequence that it will insert somewhere else. We collect
788 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
97ecdf3e 789
df4b504c 790void
35cb5232 791expand_eh_region_end_fixup (tree handler)
97ecdf3e 792{
df4b504c 793 struct eh_region *fixup;
794
795 if (! doing_eh (0))
796 return;
797
798 fixup = expand_eh_region_end ();
799 fixup->type = ERT_FIXUP;
800 fixup->u.fixup.cleanup_exp = handler;
97ecdf3e 801}
802
95cedffb 803/* Note that the current EH region (if any) may contain a throw, or a
804 call to a function which itself may contain a throw. */
805
806void
35cb5232 807note_eh_region_may_contain_throw (void)
95cedffb 808{
809 struct eh_region *region;
810
811 region = cfun->eh->cur_region;
812 while (region && !region->may_contain_throw)
813 {
814 region->may_contain_throw = 1;
815 region = region->outer;
816 }
817}
818
1639c6db 819/* Return an rtl expression for a pointer to the exception object
df4b504c 820 within a handler. */
97ecdf3e 821
822rtx
35cb5232 823get_exception_pointer (struct function *fun)
97ecdf3e 824{
572fdaa3 825 rtx exc_ptr = fun->eh->exc_ptr;
826 if (fun == cfun && ! exc_ptr)
df4b504c 827 {
f797387a 828 exc_ptr = gen_reg_rtx (ptr_mode);
572fdaa3 829 fun->eh->exc_ptr = exc_ptr;
df4b504c 830 }
831 return exc_ptr;
832}
97ecdf3e 833
1639c6db 834/* Return an rtl expression for the exception dispatch filter
835 within a handler. */
836
837static rtx
35cb5232 838get_exception_filter (struct function *fun)
1639c6db 839{
572fdaa3 840 rtx filter = fun->eh->filter;
841 if (fun == cfun && ! filter)
1639c6db 842 {
49b1637a 843 filter = gen_reg_rtx (word_mode);
572fdaa3 844 fun->eh->filter = filter;
1639c6db 845 }
846 return filter;
847}
df4b504c 848\f
849/* This section is for the exception handling specific optimization pass. */
732992fa 850
df4b504c 851/* Random access the exception region tree. It's just as simple to
852 collect the regions this way as in expand_eh_region_start, but
853 without having to realloc memory. */
732992fa 854
df4b504c 855static void
35cb5232 856collect_eh_region_array (void)
732992fa 857{
df4b504c 858 struct eh_region **array, *i;
732992fa 859
df4b504c 860 i = cfun->eh->region_tree;
861 if (! i)
862 return;
732992fa 863
1f3233d1 864 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
865 * sizeof (*array));
df4b504c 866 cfun->eh->region_array = array;
732992fa 867
df4b504c 868 while (1)
869 {
870 array[i->region_number] = i;
871
872 /* If there are sub-regions, process them. */
873 if (i->inner)
874 i = i->inner;
875 /* If there are peers, process them. */
876 else if (i->next_peer)
877 i = i->next_peer;
878 /* Otherwise, step back up the tree to the next peer. */
879 else
880 {
881 do {
882 i = i->outer;
883 if (i == NULL)
884 return;
885 } while (i->next_peer == NULL);
886 i = i->next_peer;
887 }
888 }
8591d03a 889}
890
618fa93e 891static void
892resolve_one_fixup_region (struct eh_region *fixup)
893{
894 struct eh_region *cleanup, *real;
895 int j, n;
896
897 n = cfun->eh->last_region_number;
898 cleanup = 0;
899
900 for (j = 1; j <= n; ++j)
901 {
902 cleanup = cfun->eh->region_array[j];
903 if (cleanup && cleanup->type == ERT_CLEANUP
904 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
905 break;
906 }
907 if (j > n)
908 abort ();
909
910 real = cleanup->outer;
911 if (real && real->type == ERT_FIXUP)
912 {
913 if (!real->u.fixup.resolved)
914 resolve_one_fixup_region (real);
915 real = real->u.fixup.real_region;
916 }
917
918 fixup->u.fixup.real_region = real;
919 fixup->u.fixup.resolved = true;
920}
921
df4b504c 922static void
35cb5232 923resolve_fixup_regions (void)
8591d03a 924{
618fa93e 925 int i, n = cfun->eh->last_region_number;
8591d03a 926
df4b504c 927 for (i = 1; i <= n; ++i)
928 {
929 struct eh_region *fixup = cfun->eh->region_array[i];
8591d03a 930
618fa93e 931 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
df4b504c 932 continue;
8591d03a 933
618fa93e 934 resolve_one_fixup_region (fixup);
df4b504c 935 }
8591d03a 936}
8591d03a 937
df4b504c 938/* Now that we've discovered what region actually encloses a fixup,
939 we can shuffle pointers and remove them from the tree. */
8591d03a 940
941static void
35cb5232 942remove_fixup_regions (void)
8591d03a 943{
df4b504c 944 int i;
41a1fbdb 945 rtx insn, note;
946 struct eh_region *fixup;
8591d03a 947
41a1fbdb 948 /* Walk the insn chain and adjust the REG_EH_REGION numbers
949 for instructions referencing fixup regions. This is only
950 strictly necessary for fixup regions with no parent, but
951 doesn't hurt to do it for all regions. */
952 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
953 if (INSN_P (insn)
954 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
955 && INTVAL (XEXP (note, 0)) > 0
956 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
957 && fixup->type == ERT_FIXUP)
958 {
959 if (fixup->u.fixup.real_region)
a745c6ae 960 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
41a1fbdb 961 else
962 remove_note (insn, note);
963 }
964
965 /* Remove the fixup regions from the tree. */
df4b504c 966 for (i = cfun->eh->last_region_number; i > 0; --i)
967 {
41a1fbdb 968 fixup = cfun->eh->region_array[i];
df4b504c 969 if (! fixup)
970 continue;
8591d03a 971
df4b504c 972 /* Allow GC to maybe free some memory. */
973 if (fixup->type == ERT_CLEANUP)
ff385626 974 fixup->u.cleanup.exp = NULL_TREE;
8591d03a 975
df4b504c 976 if (fixup->type != ERT_FIXUP)
977 continue;
8591d03a 978
df4b504c 979 if (fixup->inner)
980 {
981 struct eh_region *parent, *p, **pp;
8591d03a 982
df4b504c 983 parent = fixup->u.fixup.real_region;
8591d03a 984
df4b504c 985 /* Fix up the children's parent pointers; find the end of
986 the list. */
987 for (p = fixup->inner; ; p = p->next_peer)
988 {
989 p->outer = parent;
990 if (! p->next_peer)
991 break;
992 }
8591d03a 993
df4b504c 994 /* In the tree of cleanups, only outer-inner ordering matters.
995 So link the children back in anywhere at the correct level. */
996 if (parent)
997 pp = &parent->inner;
998 else
999 pp = &cfun->eh->region_tree;
1000 p->next_peer = *pp;
1001 *pp = fixup->inner;
1002 fixup->inner = NULL;
1003 }
8591d03a 1004
df4b504c 1005 remove_eh_handler (fixup);
1006 }
8591d03a 1007}
1008
f491db07 1009/* Remove all regions whose labels are not reachable from insns. */
1010
1011static void
35cb5232 1012remove_unreachable_regions (rtx insns)
f491db07 1013{
1014 int i, *uid_region_num;
1015 bool *reachable;
1016 struct eh_region *r;
1017 rtx insn;
1018
1019 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1020 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1021
1022 for (i = cfun->eh->last_region_number; i > 0; --i)
1023 {
1024 r = cfun->eh->region_array[i];
1025 if (!r || r->region_number != i)
1026 continue;
1027
1028 if (r->resume)
ff385626 1029 {
f491db07 1030 if (uid_region_num[INSN_UID (r->resume)])
1031 abort ();
1032 uid_region_num[INSN_UID (r->resume)] = i;
ff385626 1033 }
f491db07 1034 if (r->label)
ff385626 1035 {
f491db07 1036 if (uid_region_num[INSN_UID (r->label)])
1037 abort ();
1038 uid_region_num[INSN_UID (r->label)] = i;
ff385626 1039 }
f491db07 1040 if (r->type == ERT_TRY && r->u.try.continue_label)
ff385626 1041 {
f491db07 1042 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1043 abort ();
1044 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
ff385626 1045 }
f491db07 1046 }
1047
1048 for (insn = insns; insn; insn = NEXT_INSN (insn))
1049 reachable[uid_region_num[INSN_UID (insn)]] = true;
1050
1051 for (i = cfun->eh->last_region_number; i > 0; --i)
1052 {
1053 r = cfun->eh->region_array[i];
1054 if (r && r->region_number == i && !reachable[i])
1055 {
1056 /* Don't remove ERT_THROW regions if their outer region
1057 is reachable. */
1058 if (r->type == ERT_THROW
1059 && r->outer
1060 && reachable[r->outer->region_number])
1061 continue;
1062
1063 remove_eh_handler (r);
1064 }
1065 }
1066
1067 free (reachable);
1068 free (uid_region_num);
1069}
1070
df4b504c 1071/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1072 can_throw instruction in the region. */
8591d03a 1073
1074static void
35cb5232 1075convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
8591d03a 1076{
df4b504c 1077 int *sp = orig_sp;
1078 rtx insn, next;
8591d03a 1079
df4b504c 1080 for (insn = *pinsns; insn ; insn = next)
1081 {
1082 next = NEXT_INSN (insn);
1083 if (GET_CODE (insn) == NOTE)
1084 {
1085 int kind = NOTE_LINE_NUMBER (insn);
1086 if (kind == NOTE_INSN_EH_REGION_BEG
1087 || kind == NOTE_INSN_EH_REGION_END)
1088 {
1089 if (kind == NOTE_INSN_EH_REGION_BEG)
1090 {
1091 struct eh_region *r;
8591d03a 1092
df4b504c 1093 *sp++ = cur;
1094 cur = NOTE_EH_HANDLER (insn);
8591d03a 1095
df4b504c 1096 r = cfun->eh->region_array[cur];
1097 if (r->type == ERT_FIXUP)
1098 {
1099 r = r->u.fixup.real_region;
1100 cur = r ? r->region_number : 0;
1101 }
1102 else if (r->type == ERT_CATCH)
1103 {
1104 r = r->outer;
1105 cur = r ? r->region_number : 0;
1106 }
1107 }
1108 else
1109 cur = *--sp;
1110
1111 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1112 requires extra care to adjust sequence start. */
1113 if (insn == *pinsns)
1114 *pinsns = next;
1115 remove_insn (insn);
1116 continue;
1117 }
1118 }
1119 else if (INSN_P (insn))
1120 {
ce9beb5c 1121 if (cur > 0
1122 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1123 /* Calls can always potentially throw exceptions, unless
1124 they have a REG_EH_REGION note with a value of 0 or less.
1125 Which should be the only possible kind so far. */
1126 && (GET_CODE (insn) == CALL_INSN
1127 /* If we wanted exceptions for non-call insns, then
1128 any may_trap_p instruction could throw. */
1129 || (flag_non_call_exceptions
1130 && GET_CODE (PATTERN (insn)) != CLOBBER
1131 && GET_CODE (PATTERN (insn)) != USE
1132 && may_trap_p (PATTERN (insn)))))
df4b504c 1133 {
ce9beb5c 1134 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
df4b504c 1135 REG_NOTES (insn));
1136 }
8591d03a 1137
df4b504c 1138 if (GET_CODE (insn) == CALL_INSN
1139 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1140 {
1141 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1142 sp, cur);
1143 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1144 sp, cur);
1145 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1146 sp, cur);
1147 }
1148 }
1149 }
8591d03a 1150
df4b504c 1151 if (sp != orig_sp)
1152 abort ();
1153}
8591d03a 1154
df4b504c 1155void
35cb5232 1156convert_from_eh_region_ranges (void)
df4b504c 1157{
1158 int *stack;
1159 rtx insns;
8591d03a 1160
df4b504c 1161 collect_eh_region_array ();
1162 resolve_fixup_regions ();
8591d03a 1163
df4b504c 1164 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1165 insns = get_insns ();
1166 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1167 free (stack);
8591d03a 1168
df4b504c 1169 remove_fixup_regions ();
f491db07 1170 remove_unreachable_regions (insns);
8591d03a 1171}
1172
8f8dcce4 1173static void
35cb5232 1174add_ehl_entry (rtx label, struct eh_region *region)
8f8dcce4 1175{
1176 struct ehl_map_entry **slot, *entry;
1177
1178 LABEL_PRESERVE_P (label) = 1;
1179
f0af5a88 1180 entry = ggc_alloc (sizeof (*entry));
8f8dcce4 1181 entry->label = label;
1182 entry->region = region;
1183
1184 slot = (struct ehl_map_entry **)
1f3233d1 1185 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
017cb544 1186
1187 /* Before landing pad creation, each exception handler has its own
1188 label. After landing pad creation, the exception handlers may
1189 share landing pads. This is ok, since maybe_remove_eh_handler
1190 only requires the 1-1 mapping before landing pad creation. */
1191 if (*slot && !cfun->eh->built_landing_pads)
8f8dcce4 1192 abort ();
017cb544 1193
8f8dcce4 1194 *slot = entry;
1195}
1196
df4b504c 1197void
35cb5232 1198find_exception_handler_labels (void)
8591d03a 1199{
df4b504c 1200 int i;
8591d03a 1201
1f3233d1 1202 if (cfun->eh->exception_handler_label_map)
1203 htab_empty (cfun->eh->exception_handler_label_map);
8f8dcce4 1204 else
1205 {
1206 /* ??? The expansion factor here (3/2) must be greater than the htab
1207 occupancy factor (4/3) to avoid unnecessary resizing. */
1f3233d1 1208 cfun->eh->exception_handler_label_map
1209 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1210 ehl_hash, ehl_eq, NULL);
8f8dcce4 1211 }
8591d03a 1212
df4b504c 1213 if (cfun->eh->region_tree == NULL)
1214 return;
8591d03a 1215
df4b504c 1216 for (i = cfun->eh->last_region_number; i > 0; --i)
1217 {
1218 struct eh_region *region = cfun->eh->region_array[i];
1219 rtx lab;
8591d03a 1220
f491db07 1221 if (! region || region->region_number != i)
df4b504c 1222 continue;
1223 if (cfun->eh->built_landing_pads)
1224 lab = region->landing_pad;
1225 else
1226 lab = region->label;
8591d03a 1227
df4b504c 1228 if (lab)
8f8dcce4 1229 add_ehl_entry (lab, region);
8591d03a 1230 }
1231
df4b504c 1232 /* For sjlj exceptions, need the return label to remain live until
1233 after landing pad generation. */
1234 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
8f8dcce4 1235 add_ehl_entry (return_label, NULL);
8591d03a 1236}
1237
7cb6ef9c 1238bool
35cb5232 1239current_function_has_exception_handlers (void)
7cb6ef9c 1240{
1241 int i;
1242
1243 for (i = cfun->eh->last_region_number; i > 0; --i)
1244 {
1245 struct eh_region *region = cfun->eh->region_array[i];
1246
1247 if (! region || region->region_number != i)
1248 continue;
1249 if (region->type != ERT_THROW)
1250 return true;
1251 }
1252
1253 return false;
1254}
df4b504c 1255\f
1256static struct eh_region *
35cb5232 1257duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
97ecdf3e 1258{
f0af5a88 1259 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
97ecdf3e 1260
df4b504c 1261 n->region_number = o->region_number + cfun->eh->last_region_number;
1262 n->type = o->type;
97ecdf3e 1263
df4b504c 1264 switch (n->type)
1265 {
1266 case ERT_CLEANUP:
1267 case ERT_MUST_NOT_THROW:
1268 break;
8591d03a 1269
df4b504c 1270 case ERT_TRY:
1271 if (o->u.try.continue_label)
1272 n->u.try.continue_label
1273 = get_label_from_map (map,
1274 CODE_LABEL_NUMBER (o->u.try.continue_label));
1275 break;
8591d03a 1276
df4b504c 1277 case ERT_CATCH:
5c15c916 1278 n->u.catch.type_list = o->u.catch.type_list;
df4b504c 1279 break;
8591d03a 1280
df4b504c 1281 case ERT_ALLOWED_EXCEPTIONS:
1282 n->u.allowed.type_list = o->u.allowed.type_list;
1283 break;
1284
1285 case ERT_THROW:
1286 n->u.throw.type = o->u.throw.type;
1ed5443b 1287
df4b504c 1288 default:
1289 abort ();
1290 }
1291
1292 if (o->label)
1293 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1639c6db 1294 if (o->resume)
777ddd59 1295 {
1639c6db 1296 n->resume = map->insn_map[INSN_UID (o->resume)];
1297 if (n->resume == NULL)
df4b504c 1298 abort ();
8591d03a 1299 }
97ecdf3e 1300
df4b504c 1301 return n;
97ecdf3e 1302}
1303
df4b504c 1304static void
35cb5232 1305duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
3ec33c57 1306{
df4b504c 1307 struct eh_region *n = n_array[o->region_number];
3ec33c57 1308
df4b504c 1309 switch (n->type)
1310 {
1311 case ERT_TRY:
1312 n->u.try.catch = n_array[o->u.try.catch->region_number];
1313 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1314 break;
97bb6c17 1315
df4b504c 1316 case ERT_CATCH:
1317 if (o->u.catch.next_catch)
ff385626 1318 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
df4b504c 1319 if (o->u.catch.prev_catch)
ff385626 1320 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
df4b504c 1321 break;
97bb6c17 1322
df4b504c 1323 default:
1324 break;
1325 }
97ecdf3e 1326
df4b504c 1327 if (o->outer)
1328 n->outer = n_array[o->outer->region_number];
1329 if (o->inner)
1330 n->inner = n_array[o->inner->region_number];
1331 if (o->next_peer)
1332 n->next_peer = n_array[o->next_peer->region_number];
1ed5443b 1333}
df4b504c 1334
1335int
35cb5232 1336duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
97ecdf3e 1337{
df4b504c 1338 int ifun_last_region_number = ifun->eh->last_region_number;
1339 struct eh_region **n_array, *root, *cur;
1340 int i;
97ecdf3e 1341
df4b504c 1342 if (ifun_last_region_number == 0)
1343 return 0;
97ecdf3e 1344
df4b504c 1345 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
97ecdf3e 1346
df4b504c 1347 for (i = 1; i <= ifun_last_region_number; ++i)
8591d03a 1348 {
df4b504c 1349 cur = ifun->eh->region_array[i];
1350 if (!cur || cur->region_number != i)
1351 continue;
1352 n_array[i] = duplicate_eh_region_1 (cur, map);
8591d03a 1353 }
df4b504c 1354 for (i = 1; i <= ifun_last_region_number; ++i)
8591d03a 1355 {
df4b504c 1356 cur = ifun->eh->region_array[i];
1357 if (!cur || cur->region_number != i)
1358 continue;
1359 duplicate_eh_region_2 (cur, n_array);
1360 }
8591d03a 1361
df4b504c 1362 root = n_array[ifun->eh->region_tree->region_number];
1363 cur = cfun->eh->cur_region;
1364 if (cur)
1365 {
1366 struct eh_region *p = cur->inner;
1367 if (p)
1368 {
1369 while (p->next_peer)
1370 p = p->next_peer;
1371 p->next_peer = root;
1372 }
1373 else
1374 cur->inner = root;
8591d03a 1375
df4b504c 1376 for (i = 1; i <= ifun_last_region_number; ++i)
d9fbafcd 1377 if (n_array[i] && n_array[i]->outer == NULL)
df4b504c 1378 n_array[i]->outer = cur;
1379 }
1380 else
1381 {
1382 struct eh_region *p = cfun->eh->region_tree;
1383 if (p)
1384 {
1385 while (p->next_peer)
1386 p = p->next_peer;
1387 p->next_peer = root;
1388 }
1389 else
1390 cfun->eh->region_tree = root;
8591d03a 1391 }
6c74b671 1392
df4b504c 1393 free (n_array);
6c74b671 1394
df4b504c 1395 i = cfun->eh->last_region_number;
1396 cfun->eh->last_region_number = i + ifun_last_region_number;
1397 return i;
97ecdf3e 1398}
1399
df4b504c 1400\f
df4b504c 1401static int
35cb5232 1402t2r_eq (const void *pentry, const void *pdata)
506b6864 1403{
df4b504c 1404 tree entry = (tree) pentry;
1405 tree data = (tree) pdata;
506b6864 1406
df4b504c 1407 return TREE_PURPOSE (entry) == data;
506b6864 1408}
1409
df4b504c 1410static hashval_t
35cb5232 1411t2r_hash (const void *pentry)
df4b504c 1412{
1413 tree entry = (tree) pentry;
1414 return TYPE_HASH (TREE_PURPOSE (entry));
1415}
506b6864 1416
df4b504c 1417static void
35cb5232 1418add_type_for_runtime (tree type)
df4b504c 1419{
1420 tree *slot;
506b6864 1421
df4b504c 1422 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1423 TYPE_HASH (type), INSERT);
1424 if (*slot == NULL)
1425 {
1426 tree runtime = (*lang_eh_runtime_type) (type);
1427 *slot = tree_cons (type, runtime, NULL_TREE);
1428 }
1429}
1ed5443b 1430
df4b504c 1431static tree
35cb5232 1432lookup_type_for_runtime (tree type)
df4b504c 1433{
1434 tree *slot;
8cd32046 1435
df4b504c 1436 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1437 TYPE_HASH (type), NO_INSERT);
8cd32046 1438
3fb1e43b 1439 /* We should have always inserted the data earlier. */
df4b504c 1440 return TREE_VALUE (*slot);
1441}
506b6864 1442
df4b504c 1443\f
1444/* Represent an entry in @TTypes for either catch actions
1445 or exception filter actions. */
1f3233d1 1446struct ttypes_filter GTY(())
df4b504c 1447{
1448 tree t;
1449 int filter;
1450};
8cd32046 1451
df4b504c 1452/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1453 (a tree) for a @TTypes type node we are thinking about adding. */
8cd32046 1454
df4b504c 1455static int
35cb5232 1456ttypes_filter_eq (const void *pentry, const void *pdata)
df4b504c 1457{
1458 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1459 tree data = (tree) pdata;
8cd32046 1460
df4b504c 1461 return entry->t == data;
506b6864 1462}
1463
df4b504c 1464static hashval_t
35cb5232 1465ttypes_filter_hash (const void *pentry)
df4b504c 1466{
1467 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1468 return TYPE_HASH (entry->t);
1469}
97ecdf3e 1470
df4b504c 1471/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1472 exception specification list we are thinking about adding. */
1473/* ??? Currently we use the type lists in the order given. Someone
1474 should put these in some canonical order. */
1475
1476static int
35cb5232 1477ehspec_filter_eq (const void *pentry, const void *pdata)
97ecdf3e 1478{
df4b504c 1479 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1480 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1481
1482 return type_list_equal (entry->t, data->t);
97ecdf3e 1483}
1484
df4b504c 1485/* Hash function for exception specification lists. */
97ecdf3e 1486
df4b504c 1487static hashval_t
35cb5232 1488ehspec_filter_hash (const void *pentry)
97ecdf3e 1489{
df4b504c 1490 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1491 hashval_t h = 0;
1492 tree list;
1493
1494 for (list = entry->t; list ; list = TREE_CHAIN (list))
1495 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1496 return h;
97ecdf3e 1497}
1498
df4b504c 1499/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1500 up the search. Return the filter value to be used. */
97ecdf3e 1501
df4b504c 1502static int
35cb5232 1503add_ttypes_entry (htab_t ttypes_hash, tree type)
97ecdf3e 1504{
df4b504c 1505 struct ttypes_filter **slot, *n;
97ecdf3e 1506
df4b504c 1507 slot = (struct ttypes_filter **)
1508 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1509
1510 if ((n = *slot) == NULL)
97ecdf3e 1511 {
df4b504c 1512 /* Filter value is a 1 based table index. */
97bb6c17 1513
f0af5a88 1514 n = xmalloc (sizeof (*n));
df4b504c 1515 n->t = type;
1516 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1517 *slot = n;
1518
1519 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
97ecdf3e 1520 }
df4b504c 1521
1522 return n->filter;
97ecdf3e 1523}
1524
df4b504c 1525/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1526 to speed up the search. Return the filter value to be used. */
1527
1528static int
35cb5232 1529add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
97bb6c17 1530{
df4b504c 1531 struct ttypes_filter **slot, *n;
1532 struct ttypes_filter dummy;
97bb6c17 1533
df4b504c 1534 dummy.t = list;
1535 slot = (struct ttypes_filter **)
1536 htab_find_slot (ehspec_hash, &dummy, INSERT);
1537
1538 if ((n = *slot) == NULL)
1539 {
1540 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1541
f0af5a88 1542 n = xmalloc (sizeof (*n));
df4b504c 1543 n->t = list;
1544 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1545 *slot = n;
1546
1547 /* Look up each type in the list and encode its filter
1548 value as a uleb128. Terminate the list with 0. */
1549 for (; list ; list = TREE_CHAIN (list))
1ed5443b 1550 push_uleb128 (&cfun->eh->ehspec_data,
df4b504c 1551 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1552 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1553 }
1554
1555 return n->filter;
97bb6c17 1556}
1557
df4b504c 1558/* Generate the action filter values to be used for CATCH and
1559 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1560 we use lots of landing pads, and so every type or list can share
1561 the same filter value, which saves table space. */
1562
1563static void
35cb5232 1564assign_filter_values (void)
011a7f23 1565{
df4b504c 1566 int i;
1567 htab_t ttypes, ehspec;
8753e6e9 1568
df4b504c 1569 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1570 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
8753e6e9 1571
df4b504c 1572 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1573 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
011a7f23 1574
df4b504c 1575 for (i = cfun->eh->last_region_number; i > 0; --i)
1576 {
1577 struct eh_region *r = cfun->eh->region_array[i];
011a7f23 1578
df4b504c 1579 /* Mind we don't process a region more than once. */
1580 if (!r || r->region_number != i)
1581 continue;
011a7f23 1582
df4b504c 1583 switch (r->type)
1584 {
1585 case ERT_CATCH:
5c15c916 1586 /* Whatever type_list is (NULL or true list), we build a list
1587 of filters for the region. */
1588 r->u.catch.filter_list = NULL_TREE;
1589
1590 if (r->u.catch.type_list != NULL)
1591 {
1592 /* Get a filter value for each of the types caught and store
1593 them in the region's dedicated list. */
1594 tree tp_node = r->u.catch.type_list;
1595
1596 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1597 {
1598 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1599 tree flt_node = build_int_2 (flt, 0);
1ed5443b 1600
1601 r->u.catch.filter_list
5c15c916 1602 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1603 }
1604 }
1605 else
1606 {
1607 /* Get a filter value for the NULL list also since it will need
1608 an action record anyway. */
1609 int flt = add_ttypes_entry (ttypes, NULL);
1610 tree flt_node = build_int_2 (flt, 0);
1ed5443b 1611
1612 r->u.catch.filter_list
5c15c916 1613 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1614 }
1ed5443b 1615
df4b504c 1616 break;
15f6e7d9 1617
df4b504c 1618 case ERT_ALLOWED_EXCEPTIONS:
1619 r->u.allowed.filter
1620 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1621 break;
15f6e7d9 1622
df4b504c 1623 default:
1624 break;
1625 }
1626 }
1627
1628 htab_delete (ttypes);
1629 htab_delete (ehspec);
1630}
1631
31804070 1632/* Generate the code to actually handle exceptions, which will follow the
1633 landing pads. */
1634
df4b504c 1635static void
35cb5232 1636build_post_landing_pads (void)
df4b504c 1637{
1638 int i;
15f6e7d9 1639
df4b504c 1640 for (i = cfun->eh->last_region_number; i > 0; --i)
15f6e7d9 1641 {
df4b504c 1642 struct eh_region *region = cfun->eh->region_array[i];
1643 rtx seq;
15f6e7d9 1644
df4b504c 1645 /* Mind we don't process a region more than once. */
1646 if (!region || region->region_number != i)
1647 continue;
1648
1649 switch (region->type)
dda90815 1650 {
df4b504c 1651 case ERT_TRY:
1652 /* ??? Collect the set of all non-overlapping catch handlers
1653 all the way up the chain until blocked by a cleanup. */
1654 /* ??? Outer try regions can share landing pads with inner
1655 try regions if the types are completely non-overlapping,
3fb1e43b 1656 and there are no intervening cleanups. */
15f6e7d9 1657
df4b504c 1658 region->post_landing_pad = gen_label_rtx ();
15f6e7d9 1659
df4b504c 1660 start_sequence ();
15f6e7d9 1661
df4b504c 1662 emit_label (region->post_landing_pad);
15f6e7d9 1663
df4b504c 1664 /* ??? It is mighty inconvenient to call back into the
1665 switch statement generation code in expand_end_case.
1666 Rapid prototyping sez a sequence of ifs. */
1667 {
1668 struct eh_region *c;
1669 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1670 {
5c15c916 1671 if (c->u.catch.type_list == NULL)
ce9beb5c 1672 emit_jump (c->label);
df4b504c 1673 else
5c15c916 1674 {
1675 /* Need for one cmp/jump per type caught. Each type
1676 list entry has a matching entry in the filter list
1677 (see assign_filter_values). */
1678 tree tp_node = c->u.catch.type_list;
1679 tree flt_node = c->u.catch.filter_list;
1680
1681 for (; tp_node; )
1682 {
1683 emit_cmp_and_jump_insns
1684 (cfun->eh->filter,
1685 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1686 EQ, NULL_RTX, word_mode, 0, c->label);
1687
1688 tp_node = TREE_CHAIN (tp_node);
1689 flt_node = TREE_CHAIN (flt_node);
1690 }
1691 }
df4b504c 1692 }
1693 }
15f6e7d9 1694
1639c6db 1695 /* We delay the generation of the _Unwind_Resume until we generate
1696 landing pads. We emit a marker here so as to get good control
1697 flow data in the meantime. */
1698 region->resume
1699 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1700 emit_barrier ();
1701
df4b504c 1702 seq = get_insns ();
1703 end_sequence ();
d63ea2f2 1704
31d3e01c 1705 emit_insn_before (seq, region->u.try.catch->label);
df4b504c 1706 break;
15f6e7d9 1707
df4b504c 1708 case ERT_ALLOWED_EXCEPTIONS:
1709 region->post_landing_pad = gen_label_rtx ();
011a7f23 1710
df4b504c 1711 start_sequence ();
d3a0267f 1712
df4b504c 1713 emit_label (region->post_landing_pad);
d3a0267f 1714
df4b504c 1715 emit_cmp_and_jump_insns (cfun->eh->filter,
1716 GEN_INT (region->u.allowed.filter),
2b96c5f6 1717 EQ, NULL_RTX, word_mode, 0, region->label);
d3a0267f 1718
1639c6db 1719 /* We delay the generation of the _Unwind_Resume until we generate
1720 landing pads. We emit a marker here so as to get good control
1721 flow data in the meantime. */
1722 region->resume
1723 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1724 emit_barrier ();
1725
df4b504c 1726 seq = get_insns ();
1727 end_sequence ();
1728
31d3e01c 1729 emit_insn_before (seq, region->label);
df4b504c 1730 break;
d3a0267f 1731
df4b504c 1732 case ERT_CLEANUP:
b2b188b5 1733 case ERT_MUST_NOT_THROW:
ce9beb5c 1734 region->post_landing_pad = region->label;
b2b188b5 1735 break;
1736
df4b504c 1737 case ERT_CATCH:
1738 case ERT_THROW:
1739 /* Nothing to do. */
1740 break;
1741
1742 default:
1743 abort ();
1744 }
1745 }
1746}
6c74b671 1747
1639c6db 1748/* Replace RESX patterns with jumps to the next handler if any, or calls to
1749 _Unwind_Resume otherwise. */
1750
6c74b671 1751static void
35cb5232 1752connect_post_landing_pads (void)
6c74b671 1753{
df4b504c 1754 int i;
1228f0d8 1755
df4b504c 1756 for (i = cfun->eh->last_region_number; i > 0; --i)
1757 {
1758 struct eh_region *region = cfun->eh->region_array[i];
1759 struct eh_region *outer;
1639c6db 1760 rtx seq;
6c74b671 1761
df4b504c 1762 /* Mind we don't process a region more than once. */
1763 if (!region || region->region_number != i)
1764 continue;
6c74b671 1765
1639c6db 1766 /* If there is no RESX, or it has been deleted by flow, there's
1767 nothing to fix up. */
1768 if (! region->resume || INSN_DELETED_P (region->resume))
df4b504c 1769 continue;
1228f0d8 1770
df4b504c 1771 /* Search for another landing pad in this function. */
1772 for (outer = region->outer; outer ; outer = outer->outer)
1773 if (outer->post_landing_pad)
1774 break;
6c74b671 1775
df4b504c 1776 start_sequence ();
97bb6c17 1777
df4b504c 1778 if (outer)
1779 emit_jump (outer->post_landing_pad);
1780 else
ab7ccfa2 1781 emit_library_call (unwind_resume_libfunc, LCT_THROW,
f797387a 1782 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
97ecdf3e 1783
df4b504c 1784 seq = get_insns ();
1785 end_sequence ();
31d3e01c 1786 emit_insn_before (seq, region->resume);
e4bf866d 1787 delete_insn (region->resume);
df4b504c 1788 }
1789}
1790
1791\f
1792static void
35cb5232 1793dw2_build_landing_pads (void)
97ecdf3e 1794{
97b330ca 1795 int i;
1796 unsigned int j;
97ecdf3e 1797
df4b504c 1798 for (i = cfun->eh->last_region_number; i > 0; --i)
1799 {
1800 struct eh_region *region = cfun->eh->region_array[i];
1801 rtx seq;
77918ba4 1802 bool clobbers_hard_regs = false;
97ecdf3e 1803
df4b504c 1804 /* Mind we don't process a region more than once. */
1805 if (!region || region->region_number != i)
1806 continue;
2f2bc719 1807
df4b504c 1808 if (region->type != ERT_CLEANUP
1809 && region->type != ERT_TRY
1810 && region->type != ERT_ALLOWED_EXCEPTIONS)
1811 continue;
97bb6c17 1812
df4b504c 1813 start_sequence ();
97ecdf3e 1814
df4b504c 1815 region->landing_pad = gen_label_rtx ();
1816 emit_label (region->landing_pad);
97ecdf3e 1817
df4b504c 1818#ifdef HAVE_exception_receiver
1819 if (HAVE_exception_receiver)
1820 emit_insn (gen_exception_receiver ());
1821 else
1822#endif
1823#ifdef HAVE_nonlocal_goto_receiver
1824 if (HAVE_nonlocal_goto_receiver)
1825 emit_insn (gen_nonlocal_goto_receiver ());
1826 else
1827#endif
1828 { /* Nothing */ }
97ecdf3e 1829
df4b504c 1830 /* If the eh_return data registers are call-saved, then we
1831 won't have considered them clobbered from the call that
1832 threw. Kill them now. */
1833 for (j = 0; ; ++j)
1834 {
1835 unsigned r = EH_RETURN_DATA_REGNO (j);
1836 if (r == INVALID_REGNUM)
1837 break;
1838 if (! call_used_regs[r])
77918ba4 1839 {
1840 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1841 clobbers_hard_regs = true;
1842 }
1843 }
1844
1845 if (clobbers_hard_regs)
1846 {
1847 /* @@@ This is a kludge. Not all machine descriptions define a
1848 blockage insn, but we must not allow the code we just generated
1849 to be reordered by scheduling. So emit an ASM_INPUT to act as
88b5b080 1850 blockage insn. */
77918ba4 1851 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
df4b504c 1852 }
23ceb7b2 1853
df4b504c 1854 emit_move_insn (cfun->eh->exc_ptr,
f797387a 1855 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
df4b504c 1856 emit_move_insn (cfun->eh->filter,
b92c85dc 1857 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
011a7f23 1858
df4b504c 1859 seq = get_insns ();
1860 end_sequence ();
64db8f62 1861
31d3e01c 1862 emit_insn_before (seq, region->post_landing_pad);
df4b504c 1863 }
97ecdf3e 1864}
1865
df4b504c 1866\f
1867struct sjlj_lp_info
1868{
1869 int directly_reachable;
1870 int action_index;
1871 int dispatch_index;
1872 int call_site_index;
1873};
97ecdf3e 1874
df4b504c 1875static bool
35cb5232 1876sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
97ecdf3e 1877{
df4b504c 1878 rtx insn;
1879 bool found_one = false;
97ecdf3e 1880
df4b504c 1881 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1882 {
1883 struct eh_region *region;
d33336ad 1884 enum reachable_code rc;
df4b504c 1885 tree type_thrown;
1886 rtx note;
97ecdf3e 1887
df4b504c 1888 if (! INSN_P (insn))
1889 continue;
c50af1d9 1890
df4b504c 1891 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1892 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1893 continue;
c401b398 1894
df4b504c 1895 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
c401b398 1896
df4b504c 1897 type_thrown = NULL_TREE;
1898 if (region->type == ERT_THROW)
1899 {
1900 type_thrown = region->u.throw.type;
1901 region = region->outer;
1902 }
97bb6c17 1903
df4b504c 1904 /* Find the first containing region that might handle the exception.
1905 That's the landing pad to which we will transfer control. */
d33336ad 1906 rc = RNL_NOT_CAUGHT;
df4b504c 1907 for (; region; region = region->outer)
d33336ad 1908 {
1909 rc = reachable_next_level (region, type_thrown, 0);
1910 if (rc != RNL_NOT_CAUGHT)
1911 break;
1912 }
1913 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
df4b504c 1914 {
1915 lp_info[region->region_number].directly_reachable = 1;
1916 found_one = true;
1917 }
1918 }
97ecdf3e 1919
df4b504c 1920 return found_one;
1921}
23ceb7b2 1922
1923static void
35cb5232 1924sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
23ceb7b2 1925{
df4b504c 1926 htab_t ar_hash;
1927 int i, index;
1928
1929 /* First task: build the action table. */
1930
1931 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1932 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1933
1934 for (i = cfun->eh->last_region_number; i > 0; --i)
1935 if (lp_info[i].directly_reachable)
d63ea2f2 1936 {
df4b504c 1937 struct eh_region *r = cfun->eh->region_array[i];
1938 r->landing_pad = dispatch_label;
1939 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1940 if (lp_info[i].action_index != -1)
1941 cfun->uses_eh_lsda = 1;
d63ea2f2 1942 }
23ceb7b2 1943
df4b504c 1944 htab_delete (ar_hash);
1228f0d8 1945
df4b504c 1946 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1947 landing pad label for the region. For sjlj though, there is one
1948 common landing pad from which we dispatch to the post-landing pads.
1228f0d8 1949
df4b504c 1950 A region receives a dispatch index if it is directly reachable
1951 and requires in-function processing. Regions that share post-landing
4a82352a 1952 pads may share dispatch indices. */
df4b504c 1953 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1954 (see build_post_landing_pads) so we don't bother checking for it. */
97ecdf3e 1955
df4b504c 1956 index = 0;
1957 for (i = cfun->eh->last_region_number; i > 0; --i)
d33336ad 1958 if (lp_info[i].directly_reachable)
df4b504c 1959 lp_info[i].dispatch_index = index++;
1228f0d8 1960
df4b504c 1961 /* Finally: assign call-site values. If dwarf2 terms, this would be
1962 the region number assigned by convert_to_eh_region_ranges, but
1963 handles no-action and must-not-throw differently. */
1228f0d8 1964
df4b504c 1965 call_site_base = 1;
1966 for (i = cfun->eh->last_region_number; i > 0; --i)
1967 if (lp_info[i].directly_reachable)
1968 {
1969 int action = lp_info[i].action_index;
1970
1971 /* Map must-not-throw to otherwise unused call-site index 0. */
1972 if (action == -2)
1973 index = 0;
1974 /* Map no-action to otherwise unused call-site index -1. */
1975 else if (action == -1)
1976 index = -1;
1977 /* Otherwise, look it up in the table. */
1978 else
1979 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1980
1981 lp_info[i].call_site_index = index;
1982 }
97ecdf3e 1983}
8591d03a 1984
df4b504c 1985static void
35cb5232 1986sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
8591d03a 1987{
df4b504c 1988 int last_call_site = -2;
1989 rtx insn, mem;
1990
df4b504c 1991 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8591d03a 1992 {
df4b504c 1993 struct eh_region *region;
1994 int this_call_site;
1995 rtx note, before, p;
8591d03a 1996
df4b504c 1997 /* Reset value tracking at extended basic block boundaries. */
1998 if (GET_CODE (insn) == CODE_LABEL)
1999 last_call_site = -2;
8591d03a 2000
df4b504c 2001 if (! INSN_P (insn))
2002 continue;
8591d03a 2003
df4b504c 2004 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2005 if (!note)
2006 {
2007 /* Calls (and trapping insns) without notes are outside any
2008 exception handling region in this function. Mark them as
2009 no action. */
2010 if (GET_CODE (insn) == CALL_INSN
2011 || (flag_non_call_exceptions
2012 && may_trap_p (PATTERN (insn))))
2013 this_call_site = -1;
2014 else
2015 continue;
2016 }
2017 else
2018 {
2019 /* Calls that are known to not throw need not be marked. */
2020 if (INTVAL (XEXP (note, 0)) <= 0)
2021 continue;
8591d03a 2022
df4b504c 2023 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2024 this_call_site = lp_info[region->region_number].call_site_index;
2025 }
8591d03a 2026
df4b504c 2027 if (this_call_site == last_call_site)
2028 continue;
2029
2030 /* Don't separate a call from it's argument loads. */
2031 before = insn;
2032 if (GET_CODE (insn) == CALL_INSN)
ff385626 2033 before = find_first_parameter_load (insn, NULL_RTX);
97ecdf3e 2034
df4b504c 2035 start_sequence ();
82832d9a 2036 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2037 sjlj_fc_call_site_ofs);
df4b504c 2038 emit_move_insn (mem, GEN_INT (this_call_site));
2039 p = get_insns ();
2040 end_sequence ();
97bb6c17 2041
31d3e01c 2042 emit_insn_before (p, before);
df4b504c 2043 last_call_site = this_call_site;
2044 }
2045}
97ecdf3e 2046
df4b504c 2047/* Construct the SjLj_Function_Context. */
2048
2049static void
35cb5232 2050sjlj_emit_function_enter (rtx dispatch_label)
97ecdf3e 2051{
df4b504c 2052 rtx fn_begin, fc, mem, seq;
97ecdf3e 2053
df4b504c 2054 fc = cfun->eh->sjlj_fc;
97ecdf3e 2055
df4b504c 2056 start_sequence ();
de46d199 2057
b52a90a7 2058 /* We're storing this libcall's address into memory instead of
2059 calling it directly. Thus, we must call assemble_external_libcall
2060 here, as we can not depend on emit_library_call to do it for us. */
2061 assemble_external_libcall (eh_personality_libfunc);
e513d163 2062 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
df4b504c 2063 emit_move_insn (mem, eh_personality_libfunc);
2064
e513d163 2065 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
df4b504c 2066 if (cfun->uses_eh_lsda)
2067 {
2068 char buf[20];
134c13b5 2069 rtx sym;
2070
4781f9b9 2071 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
134c13b5 2072 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2073 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2074 emit_move_insn (mem, sym);
de46d199 2075 }
df4b504c 2076 else
2077 emit_move_insn (mem, const0_rtx);
1ed5443b 2078
df4b504c 2079#ifdef DONT_USE_BUILTIN_SETJMP
2080 {
2081 rtx x, note;
0ff18307 2082 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
df4b504c 2083 TYPE_MODE (integer_type_node), 1,
2084 plus_constant (XEXP (fc, 0),
2085 sjlj_fc_jbuf_ofs), Pmode);
2086
31b97e8f 2087 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
df4b504c 2088 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2089
2090 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2b96c5f6 2091 TYPE_MODE (integer_type_node), 0, dispatch_label);
df4b504c 2092 }
2093#else
2094 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2095 dispatch_label);
97ecdf3e 2096#endif
97ecdf3e 2097
df4b504c 2098 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2099 1, XEXP (fc, 0), Pmode);
97bb6c17 2100
df4b504c 2101 seq = get_insns ();
2102 end_sequence ();
97ecdf3e 2103
df4b504c 2104 /* ??? Instead of doing this at the beginning of the function,
2105 do this in a block that is at loop level 0 and dominates all
2106 can_throw_internal instructions. */
97ecdf3e 2107
df4b504c 2108 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2109 if (GET_CODE (fn_begin) == NOTE
2110 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2111 break;
31d3e01c 2112 emit_insn_after (seq, fn_begin);
97ecdf3e 2113}
2114
df4b504c 2115/* Call back from expand_function_end to know where we should put
2116 the call to unwind_sjlj_unregister_libfunc if needed. */
97bb6c17 2117
df4b504c 2118void
35cb5232 2119sjlj_emit_function_exit_after (rtx after)
df4b504c 2120{
2121 cfun->eh->sjlj_exit_after = after;
2122}
97ecdf3e 2123
2124static void
35cb5232 2125sjlj_emit_function_exit (void)
df4b504c 2126{
2127 rtx seq;
97ecdf3e 2128
df4b504c 2129 start_sequence ();
fbba5463 2130
df4b504c 2131 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2132 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
d63ea2f2 2133
df4b504c 2134 seq = get_insns ();
2135 end_sequence ();
97ecdf3e 2136
df4b504c 2137 /* ??? Really this can be done in any block at loop level 0 that
2138 post-dominates all can_throw_internal instructions. This is
2139 the last possible moment. */
011a7f23 2140
31d3e01c 2141 emit_insn_after (seq, cfun->eh->sjlj_exit_after);
011a7f23 2142}
2143
df4b504c 2144static void
35cb5232 2145sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
fbba5463 2146{
df4b504c 2147 int i, first_reachable;
2148 rtx mem, dispatch, seq, fc;
2149
2150 fc = cfun->eh->sjlj_fc;
2151
2152 start_sequence ();
2153
2154 emit_label (dispatch_label);
1ed5443b 2155
df4b504c 2156#ifndef DONT_USE_BUILTIN_SETJMP
2157 expand_builtin_setjmp_receiver (dispatch_label);
2158#endif
2159
2160 /* Load up dispatch index, exc_ptr and filter values from the
2161 function context. */
e513d163 2162 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2163 sjlj_fc_call_site_ofs);
df4b504c 2164 dispatch = copy_to_reg (mem);
2165
e513d163 2166 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
a64908cd 2167 if (word_mode != ptr_mode)
df4b504c 2168 {
2169#ifdef POINTERS_EXTEND_UNSIGNED
a64908cd 2170 mem = convert_memory_address (ptr_mode, mem);
df4b504c 2171#else
a64908cd 2172 mem = convert_to_mode (ptr_mode, mem, 0);
df4b504c 2173#endif
2174 }
2175 emit_move_insn (cfun->eh->exc_ptr, mem);
2176
e513d163 2177 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
df4b504c 2178 emit_move_insn (cfun->eh->filter, mem);
97ecdf3e 2179
df4b504c 2180 /* Jump to one of the directly reachable regions. */
2181 /* ??? This really ought to be using a switch statement. */
2182
2183 first_reachable = 0;
2184 for (i = cfun->eh->last_region_number; i > 0; --i)
173f0bec 2185 {
d33336ad 2186 if (! lp_info[i].directly_reachable)
df4b504c 2187 continue;
173f0bec 2188
df4b504c 2189 if (! first_reachable)
2190 {
2191 first_reachable = i;
2192 continue;
2193 }
d63ea2f2 2194
2b96c5f6 2195 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2196 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
df4b504c 2197 cfun->eh->region_array[i]->post_landing_pad);
173f0bec 2198 }
011a7f23 2199
df4b504c 2200 seq = get_insns ();
2201 end_sequence ();
97ecdf3e 2202
31d3e01c 2203 emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
2204 ->post_landing_pad));
fbba5463 2205}
2206
df4b504c 2207static void
35cb5232 2208sjlj_build_landing_pads (void)
fbba5463 2209{
df4b504c 2210 struct sjlj_lp_info *lp_info;
fbba5463 2211
f0af5a88 2212 lp_info = xcalloc (cfun->eh->last_region_number + 1,
2213 sizeof (struct sjlj_lp_info));
fbba5463 2214
df4b504c 2215 if (sjlj_find_directly_reachable_regions (lp_info))
2216 {
2217 rtx dispatch_label = gen_label_rtx ();
fbba5463 2218
df4b504c 2219 cfun->eh->sjlj_fc
2220 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2221 int_size_in_bytes (sjlj_fc_type_node),
2222 TYPE_ALIGN (sjlj_fc_type_node));
97ecdf3e 2223
df4b504c 2224 sjlj_assign_call_site_values (dispatch_label, lp_info);
2225 sjlj_mark_call_sites (lp_info);
173f0bec 2226
df4b504c 2227 sjlj_emit_function_enter (dispatch_label);
2228 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2229 sjlj_emit_function_exit ();
2230 }
173f0bec 2231
df4b504c 2232 free (lp_info);
97ecdf3e 2233}
fbba5463 2234
fbba5463 2235void
35cb5232 2236finish_eh_generation (void)
fbba5463 2237{
df4b504c 2238 /* Nothing to do if no regions created. */
2239 if (cfun->eh->region_tree == NULL)
fbba5463 2240 return;
2241
df4b504c 2242 /* The object here is to provide find_basic_blocks with detailed
2243 information (via reachable_handlers) on how exception control
2244 flows within the function. In this first pass, we can include
2245 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2246 regions, and hope that it will be useful in deleting unreachable
2247 handlers. Subsequently, we will generate landing pads which will
2248 connect many of the handlers, and then type information will not
2249 be effective. Still, this is a win over previous implementations. */
2250
43a852ea 2251 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
df4b504c 2252
2253 /* These registers are used by the landing pads. Make sure they
2254 have been generated. */
572fdaa3 2255 get_exception_pointer (cfun);
2256 get_exception_filter (cfun);
df4b504c 2257
2258 /* Construct the landing pads. */
2259
2260 assign_filter_values ();
2261 build_post_landing_pads ();
2262 connect_post_landing_pads ();
2263 if (USING_SJLJ_EXCEPTIONS)
2264 sjlj_build_landing_pads ();
2265 else
2266 dw2_build_landing_pads ();
fbba5463 2267
df4b504c 2268 cfun->eh->built_landing_pads = 1;
fbba5463 2269
df4b504c 2270 /* We've totally changed the CFG. Start over. */
2271 find_exception_handler_labels ();
6d866f03 2272 rebuild_jump_labels (get_insns ());
df4b504c 2273 find_basic_blocks (get_insns (), max_reg_num (), 0);
43a852ea 2274 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
fbba5463 2275}
97ecdf3e 2276\f
8f8dcce4 2277static hashval_t
35cb5232 2278ehl_hash (const void *pentry)
8f8dcce4 2279{
2280 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2281
2282 /* 2^32 * ((sqrt(5) - 1) / 2) */
2283 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2284 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2285}
2286
2287static int
35cb5232 2288ehl_eq (const void *pentry, const void *pdata)
8f8dcce4 2289{
2290 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2291 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2292
2293 return entry->label == data->label;
2294}
2295
df4b504c 2296/* This section handles removing dead code for flow. */
732992fa 2297
8f8dcce4 2298/* Remove LABEL from exception_handler_label_map. */
732992fa 2299
df4b504c 2300static void
35cb5232 2301remove_exception_handler_label (rtx label)
732992fa 2302{
8f8dcce4 2303 struct ehl_map_entry **slot, tmp;
30618d5e 2304
8f8dcce4 2305 /* If exception_handler_label_map was not built yet,
f491db07 2306 there is nothing to do. */
1f3233d1 2307 if (cfun->eh->exception_handler_label_map == NULL)
f491db07 2308 return;
2309
8f8dcce4 2310 tmp.label = label;
2311 slot = (struct ehl_map_entry **)
1f3233d1 2312 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
8f8dcce4 2313 if (! slot)
2314 abort ();
732992fa 2315
1f3233d1 2316 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
732992fa 2317}
2318
df4b504c 2319/* Splice REGION from the region tree etc. */
97bb6c17 2320
98b4572e 2321static void
35cb5232 2322remove_eh_handler (struct eh_region *region)
97ecdf3e 2323{
cd7e4c20 2324 struct eh_region **pp, **pp_start, *p, *outer, *inner;
df4b504c 2325 rtx lab;
97ecdf3e 2326
df4b504c 2327 /* For the benefit of efficiently handling REG_EH_REGION notes,
2328 replace this region in the region array with its containing
2329 region. Note that previous region deletions may result in
8f8dcce4 2330 multiple copies of this region in the array, so we have a
2331 list of alternate numbers by which we are known. */
2332
cd7e4c20 2333 outer = region->outer;
2334 cfun->eh->region_array[region->region_number] = outer;
8f8dcce4 2335 if (region->aka)
2336 {
2337 int i;
2338 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
cd7e4c20 2339 { cfun->eh->region_array[i] = outer; });
8f8dcce4 2340 }
2341
cd7e4c20 2342 if (outer)
8f8dcce4 2343 {
cd7e4c20 2344 if (!outer->aka)
1f3233d1 2345 outer->aka = BITMAP_GGC_ALLOC ();
8f8dcce4 2346 if (region->aka)
cd7e4c20 2347 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2348 bitmap_set_bit (outer->aka, region->region_number);
8f8dcce4 2349 }
df4b504c 2350
2351 if (cfun->eh->built_landing_pads)
2352 lab = region->landing_pad;
2353 else
2354 lab = region->label;
2355 if (lab)
2356 remove_exception_handler_label (lab);
2357
cd7e4c20 2358 if (outer)
2359 pp_start = &outer->inner;
df4b504c 2360 else
cd7e4c20 2361 pp_start = &cfun->eh->region_tree;
2362 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
df4b504c 2363 continue;
cd7e4c20 2364 *pp = region->next_peer;
97bb6c17 2365
cd7e4c20 2366 inner = region->inner;
2367 if (inner)
97ecdf3e 2368 {
cd7e4c20 2369 for (p = inner; p->next_peer ; p = p->next_peer)
2370 p->outer = outer;
2371 p->outer = outer;
2372
2373 p->next_peer = *pp_start;
2374 *pp_start = inner;
97ecdf3e 2375 }
98b4572e 2376
df4b504c 2377 if (region->type == ERT_CATCH)
2378 {
2379 struct eh_region *try, *next, *prev;
98b4572e 2380
df4b504c 2381 for (try = region->next_peer;
2382 try->type == ERT_CATCH;
2383 try = try->next_peer)
2384 continue;
2385 if (try->type != ERT_TRY)
2386 abort ();
98b4572e 2387
df4b504c 2388 next = region->u.catch.next_catch;
2389 prev = region->u.catch.prev_catch;
98b4572e 2390
df4b504c 2391 if (next)
2392 next->u.catch.prev_catch = prev;
2393 else
2394 try->u.try.last_catch = prev;
2395 if (prev)
2396 prev->u.catch.next_catch = next;
2397 else
2398 {
2399 try->u.try.catch = next;
2400 if (! next)
2401 remove_eh_handler (try);
2402 }
2403 }
97ecdf3e 2404}
2405
df4b504c 2406/* LABEL heads a basic block that is about to be deleted. If this
2407 label corresponds to an exception region, we may be able to
2408 delete the region. */
97ecdf3e 2409
2410void
35cb5232 2411maybe_remove_eh_handler (rtx label)
97ecdf3e 2412{
8f8dcce4 2413 struct ehl_map_entry **slot, tmp;
2414 struct eh_region *region;
97ecdf3e 2415
df4b504c 2416 /* ??? After generating landing pads, it's not so simple to determine
2417 if the region data is completely unused. One must examine the
2418 landing pad and the post landing pad, and whether an inner try block
2419 is referencing the catch handlers directly. */
2420 if (cfun->eh->built_landing_pads)
97ecdf3e 2421 return;
2422
8f8dcce4 2423 tmp.label = label;
2424 slot = (struct ehl_map_entry **)
1f3233d1 2425 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
8f8dcce4 2426 if (! slot)
2427 return;
2428 region = (*slot)->region;
2429 if (! region)
2430 return;
2431
2432 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2433 because there is no path to the fallback call to terminate.
2434 But the region continues to affect call-site data until there
2435 are no more contained calls, which we don't see here. */
2436 if (region->type == ERT_MUST_NOT_THROW)
a7b0c170 2437 {
1f3233d1 2438 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
8f8dcce4 2439 region->label = NULL_RTX;
a7b0c170 2440 }
8f8dcce4 2441 else
2442 remove_eh_handler (region);
2443}
2444
2445/* Invokes CALLBACK for every exception handler label. Only used by old
2446 loop hackery; should not be used by new code. */
2447
2448void
35cb5232 2449for_each_eh_label (void (*callback) (rtx))
8f8dcce4 2450{
1f3233d1 2451 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
48b3d385 2452 (void *) &callback);
a7b0c170 2453}
2454
8f8dcce4 2455static int
35cb5232 2456for_each_eh_label_1 (void **pentry, void *data)
8f8dcce4 2457{
2458 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
48b3d385 2459 void (*callback) (rtx) = *(void (**) (rtx)) data;
8f8dcce4 2460
2461 (*callback) (entry->label);
2462 return 1;
2463}
df4b504c 2464\f
2465/* This section describes CFG exception edges for flow. */
a7b0c170 2466
df4b504c 2467/* For communicating between calls to reachable_next_level. */
1f3233d1 2468struct reachable_info GTY(())
a7b0c170 2469{
df4b504c 2470 tree types_caught;
2471 tree types_allowed;
2472 rtx handlers;
2473};
a7b0c170 2474
df4b504c 2475/* A subroutine of reachable_next_level. Return true if TYPE, or a
2476 base class of TYPE, is in HANDLED. */
a7b0c170 2477
df4b504c 2478static int
35cb5232 2479check_handled (tree handled, tree type)
a7b0c170 2480{
df4b504c 2481 tree t;
2482
2483 /* We can check for exact matches without front-end help. */
2484 if (! lang_eh_type_covers)
d3a0267f 2485 {
df4b504c 2486 for (t = handled; t ; t = TREE_CHAIN (t))
2487 if (TREE_VALUE (t) == type)
2488 return 1;
2489 }
2490 else
2491 {
2492 for (t = handled; t ; t = TREE_CHAIN (t))
2493 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2494 return 1;
d3a0267f 2495 }
df4b504c 2496
2497 return 0;
a7b0c170 2498}
2499
df4b504c 2500/* A subroutine of reachable_next_level. If we are collecting a list
2501 of handlers, add one. After landing pad generation, reference
2502 it instead of the handlers themselves. Further, the handlers are
1ed5443b 2503 all wired together, so by referencing one, we've got them all.
df4b504c 2504 Before landing pad generation we reference each handler individually.
2505
2506 LP_REGION contains the landing pad; REGION is the handler. */
a7b0c170 2507
2508static void
35cb5232 2509add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
a7b0c170 2510{
df4b504c 2511 if (! info)
2512 return;
2513
2514 if (cfun->eh->built_landing_pads)
a7b0c170 2515 {
df4b504c 2516 if (! info->handlers)
2517 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
a7b0c170 2518 }
df4b504c 2519 else
2520 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
a7b0c170 2521}
2522
1ed5443b 2523/* Process one level of exception regions for reachability.
df4b504c 2524 If TYPE_THROWN is non-null, then it is the *exact* type being
2525 propagated. If INFO is non-null, then collect handler labels
2526 and caught/allowed type information between invocations. */
a7b0c170 2527
df4b504c 2528static enum reachable_code
35cb5232 2529reachable_next_level (struct eh_region *region, tree type_thrown,
2530 struct reachable_info *info)
a7b0c170 2531{
df4b504c 2532 switch (region->type)
2533 {
2534 case ERT_CLEANUP:
2535 /* Before landing-pad generation, we model control flow
2536 directly to the individual handlers. In this way we can
2537 see that catch handler types may shadow one another. */
2538 add_reachable_handler (info, region, region);
2539 return RNL_MAYBE_CAUGHT;
2540
2541 case ERT_TRY:
2542 {
2543 struct eh_region *c;
2544 enum reachable_code ret = RNL_NOT_CAUGHT;
3c3bb268 2545
df4b504c 2546 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2547 {
2548 /* A catch-all handler ends the search. */
5c15c916 2549 if (c->u.catch.type_list == NULL)
df4b504c 2550 {
2551 add_reachable_handler (info, region, c);
2552 return RNL_CAUGHT;
2553 }
2554
2555 if (type_thrown)
2556 {
5d4f270c 2557 /* If we have at least one type match, end the search. */
5c15c916 2558 tree tp_node = c->u.catch.type_list;
1ed5443b 2559
5c15c916 2560 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
df4b504c 2561 {
5c15c916 2562 tree type = TREE_VALUE (tp_node);
2563
2564 if (type == type_thrown
2565 || (lang_eh_type_covers
2566 && (*lang_eh_type_covers) (type, type_thrown)))
2567 {
2568 add_reachable_handler (info, region, c);
2569 return RNL_CAUGHT;
2570 }
df4b504c 2571 }
2572
2573 /* If we have definitive information of a match failure,
2574 the catch won't trigger. */
2575 if (lang_eh_type_covers)
2576 return RNL_NOT_CAUGHT;
2577 }
2578
5c15c916 2579 /* At this point, we either don't know what type is thrown or
2580 don't have front-end assistance to help deciding if it is
2581 covered by one of the types in the list for this region.
1ed5443b 2582
5c15c916 2583 We'd then like to add this region to the list of reachable
2584 handlers since it is indeed potentially reachable based on the
1ed5443b 2585 information we have.
2586
5c15c916 2587 Actually, this handler is for sure not reachable if all the
2588 types it matches have already been caught. That is, it is only
2589 potentially reachable if at least one of the types it catches
2590 has not been previously caught. */
2591
df4b504c 2592 if (! info)
2593 ret = RNL_MAYBE_CAUGHT;
5c15c916 2594 else
df4b504c 2595 {
5c15c916 2596 tree tp_node = c->u.catch.type_list;
2597 bool maybe_reachable = false;
df4b504c 2598
5c15c916 2599 /* Compute the potential reachability of this handler and
2600 update the list of types caught at the same time. */
2601 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2602 {
2603 tree type = TREE_VALUE (tp_node);
2604
2605 if (! check_handled (info->types_caught, type))
2606 {
2607 info->types_caught
2608 = tree_cons (NULL, type, info->types_caught);
1ed5443b 2609
5c15c916 2610 maybe_reachable = true;
2611 }
2612 }
1ed5443b 2613
5c15c916 2614 if (maybe_reachable)
2615 {
2616 add_reachable_handler (info, region, c);
1ed5443b 2617
5c15c916 2618 /* ??? If the catch type is a base class of every allowed
2619 type, then we know we can stop the search. */
2620 ret = RNL_MAYBE_CAUGHT;
2621 }
df4b504c 2622 }
2623 }
a7b0c170 2624
df4b504c 2625 return ret;
2626 }
a7b0c170 2627
df4b504c 2628 case ERT_ALLOWED_EXCEPTIONS:
2629 /* An empty list of types definitely ends the search. */
2630 if (region->u.allowed.type_list == NULL_TREE)
2631 {
2632 add_reachable_handler (info, region, region);
2633 return RNL_CAUGHT;
2634 }
a7b0c170 2635
df4b504c 2636 /* Collect a list of lists of allowed types for use in detecting
2637 when a catch may be transformed into a catch-all. */
2638 if (info)
2639 info->types_allowed = tree_cons (NULL_TREE,
2640 region->u.allowed.type_list,
2641 info->types_allowed);
1ed5443b 2642
734c98be 2643 /* If we have definitive information about the type hierarchy,
df4b504c 2644 then we can tell if the thrown type will pass through the
2645 filter. */
2646 if (type_thrown && lang_eh_type_covers)
2647 {
2648 if (check_handled (region->u.allowed.type_list, type_thrown))
2649 return RNL_NOT_CAUGHT;
2650 else
2651 {
2652 add_reachable_handler (info, region, region);
2653 return RNL_CAUGHT;
2654 }
2655 }
c788feb1 2656
df4b504c 2657 add_reachable_handler (info, region, region);
2658 return RNL_MAYBE_CAUGHT;
c788feb1 2659
df4b504c 2660 case ERT_CATCH:
98667efb 2661 /* Catch regions are handled by their controlling try region. */
df4b504c 2662 return RNL_NOT_CAUGHT;
c788feb1 2663
df4b504c 2664 case ERT_MUST_NOT_THROW:
2665 /* Here we end our search, since no exceptions may propagate.
2666 If we've touched down at some landing pad previous, then the
2667 explicit function call we generated may be used. Otherwise
2668 the call is made by the runtime. */
2669 if (info && info->handlers)
c788feb1 2670 {
df4b504c 2671 add_reachable_handler (info, region, region);
ff385626 2672 return RNL_CAUGHT;
c788feb1 2673 }
df4b504c 2674 else
2675 return RNL_BLOCKED;
c788feb1 2676
df4b504c 2677 case ERT_THROW:
2678 case ERT_FIXUP:
1ed5443b 2679 case ERT_UNKNOWN:
df4b504c 2680 /* Shouldn't see these here. */
2681 break;
c788feb1 2682 }
3c3bb268 2683
df4b504c 2684 abort ();
3c3bb268 2685}
97ecdf3e 2686
df4b504c 2687/* Retrieve a list of labels of exception handlers which can be
2688 reached by a given insn. */
97ecdf3e 2689
df4b504c 2690rtx
35cb5232 2691reachable_handlers (rtx insn)
97ecdf3e 2692{
df4b504c 2693 struct reachable_info info;
2694 struct eh_region *region;
2695 tree type_thrown;
2696 int region_number;
f929a98a 2697
df4b504c 2698 if (GET_CODE (insn) == JUMP_INSN
2699 && GET_CODE (PATTERN (insn)) == RESX)
2700 region_number = XINT (PATTERN (insn), 0);
2701 else
da5038a3 2702 {
2703 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
df4b504c 2704 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2705 return NULL;
2706 region_number = INTVAL (XEXP (note, 0));
8591d03a 2707 }
97ecdf3e 2708
df4b504c 2709 memset (&info, 0, sizeof (info));
97ecdf3e 2710
df4b504c 2711 region = cfun->eh->region_array[region_number];
f929a98a 2712
df4b504c 2713 type_thrown = NULL_TREE;
385df8c5 2714 if (GET_CODE (insn) == JUMP_INSN
2715 && GET_CODE (PATTERN (insn)) == RESX)
2716 {
2717 /* A RESX leaves a region instead of entering it. Thus the
2718 region itself may have been deleted out from under us. */
2719 if (region == NULL)
2720 return NULL;
2721 region = region->outer;
2722 }
2723 else if (region->type == ERT_THROW)
df4b504c 2724 {
2725 type_thrown = region->u.throw.type;
2726 region = region->outer;
2727 }
8b4f3d64 2728
0d3f1801 2729 while (region)
2730 {
2731 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
ce9beb5c 2732 break;
0d3f1801 2733 /* If we have processed one cleanup, there is no point in
2734 processing any more of them. Each cleanup will have an edge
2735 to the next outer cleanup region, so the flow graph will be
2736 accurate. */
2737 if (region->type == ERT_CLEANUP)
2738 region = region->u.cleanup.prev_try;
2739 else
2740 region = region->outer;
2741 }
35cb5232 2742
df4b504c 2743 return info.handlers;
f929a98a 2744}
2745
df4b504c 2746/* Determine if the given INSN can throw an exception that is caught
2747 within the function. */
97ecdf3e 2748
df4b504c 2749bool
35cb5232 2750can_throw_internal (rtx insn)
97ecdf3e 2751{
df4b504c 2752 struct eh_region *region;
2753 tree type_thrown;
2754 rtx note;
d63ea2f2 2755
df4b504c 2756 if (! INSN_P (insn))
2757 return false;
97bb6c17 2758
df4b504c 2759 if (GET_CODE (insn) == INSN
2760 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2761 insn = XVECEXP (PATTERN (insn), 0, 0);
97ecdf3e 2762
df4b504c 2763 if (GET_CODE (insn) == CALL_INSN
2764 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
97ecdf3e 2765 {
df4b504c 2766 int i;
2767 for (i = 0; i < 3; ++i)
97ecdf3e 2768 {
df4b504c 2769 rtx sub = XEXP (PATTERN (insn), i);
2770 for (; sub ; sub = NEXT_INSN (sub))
2771 if (can_throw_internal (sub))
2772 return true;
97ecdf3e 2773 }
df4b504c 2774 return false;
97ecdf3e 2775 }
2776
df4b504c 2777 /* Every insn that might throw has an EH_REGION note. */
2778 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2779 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2780 return false;
97ecdf3e 2781
df4b504c 2782 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
97ecdf3e 2783
df4b504c 2784 type_thrown = NULL_TREE;
2785 if (region->type == ERT_THROW)
2786 {
2787 type_thrown = region->u.throw.type;
2788 region = region->outer;
2789 }
97ecdf3e 2790
df4b504c 2791 /* If this exception is ignored by each and every containing region,
2792 then control passes straight out. The runtime may handle some
2793 regions, which also do not require processing internally. */
2794 for (; region; region = region->outer)
2795 {
2796 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2797 if (how == RNL_BLOCKED)
2798 return false;
2799 if (how != RNL_NOT_CAUGHT)
ff385626 2800 return true;
97ecdf3e 2801 }
97ecdf3e 2802
df4b504c 2803 return false;
2804}
97ecdf3e 2805
df4b504c 2806/* Determine if the given INSN can throw an exception that is
2807 visible outside the function. */
97ecdf3e 2808
df4b504c 2809bool
35cb5232 2810can_throw_external (rtx insn)
97ecdf3e 2811{
df4b504c 2812 struct eh_region *region;
2813 tree type_thrown;
2814 rtx note;
97ecdf3e 2815
df4b504c 2816 if (! INSN_P (insn))
2817 return false;
2818
2819 if (GET_CODE (insn) == INSN
2820 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2821 insn = XVECEXP (PATTERN (insn), 0, 0);
2822
2823 if (GET_CODE (insn) == CALL_INSN
2824 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
97ecdf3e 2825 {
df4b504c 2826 int i;
2827 for (i = 0; i < 3; ++i)
97ecdf3e 2828 {
df4b504c 2829 rtx sub = XEXP (PATTERN (insn), i);
2830 for (; sub ; sub = NEXT_INSN (sub))
2831 if (can_throw_external (sub))
2832 return true;
97ecdf3e 2833 }
df4b504c 2834 return false;
97ecdf3e 2835 }
df4b504c 2836
2837 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2838 if (!note)
2839 {
2840 /* Calls (and trapping insns) without notes are outside any
2841 exception handling region in this function. We have to
2842 assume it might throw. Given that the front end and middle
2843 ends mark known NOTHROW functions, this isn't so wildly
2844 inaccurate. */
2845 return (GET_CODE (insn) == CALL_INSN
2846 || (flag_non_call_exceptions
2847 && may_trap_p (PATTERN (insn))));
2848 }
2849 if (INTVAL (XEXP (note, 0)) <= 0)
2850 return false;
2851
2852 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2853
2854 type_thrown = NULL_TREE;
2855 if (region->type == ERT_THROW)
2856 {
2857 type_thrown = region->u.throw.type;
2858 region = region->outer;
2859 }
2860
2861 /* If the exception is caught or blocked by any containing region,
2862 then it is not seen by any calling function. */
2863 for (; region ; region = region->outer)
2864 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2865 return false;
2866
2867 return true;
97ecdf3e 2868}
da5038a3 2869
04396483 2870/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
ed74c60e 2871
04396483 2872void
35cb5232 2873set_nothrow_function_flags (void)
da5038a3 2874{
2875 rtx insn;
35cb5232 2876
04396483 2877 current_function_nothrow = 1;
da5038a3 2878
04396483 2879 /* Assume cfun->all_throwers_are_sibcalls until we encounter
2880 something that can throw an exception. We specifically exempt
2881 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2882 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2883 is optimistic. */
da5038a3 2884
04396483 2885 cfun->all_throwers_are_sibcalls = 1;
2886
2887 if (! flag_exceptions)
2888 return;
35cb5232 2889
da5038a3 2890 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
df4b504c 2891 if (can_throw_external (insn))
04396483 2892 {
2893 current_function_nothrow = 0;
2894
2895 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2896 {
2897 cfun->all_throwers_are_sibcalls = 0;
2898 return;
2899 }
2900 }
2901
df4b504c 2902 for (insn = current_function_epilogue_delay_list; insn;
2903 insn = XEXP (insn, 1))
04396483 2904 if (can_throw_external (insn))
2905 {
2906 current_function_nothrow = 0;
b9cf3f63 2907
04396483 2908 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
2909 {
2910 cfun->all_throwers_are_sibcalls = 0;
2911 return;
2912 }
2913 }
da5038a3 2914}
df4b504c 2915
447a9eb9 2916\f
df4b504c 2917/* Various hooks for unwind library. */
447a9eb9 2918
2919/* Do any necessary initialization to access arbitrary stack frames.
2920 On the SPARC, this means flushing the register windows. */
2921
2922void
35cb5232 2923expand_builtin_unwind_init (void)
447a9eb9 2924{
2925 /* Set this so all the registers get saved in our frame; we need to be
6312a35e 2926 able to copy the saved values for any registers from frames we unwind. */
447a9eb9 2927 current_function_has_nonlocal_label = 1;
2928
2929#ifdef SETUP_FRAME_ADDRESSES
2930 SETUP_FRAME_ADDRESSES ();
2931#endif
2932}
2933
df4b504c 2934rtx
35cb5232 2935expand_builtin_eh_return_data_regno (tree arglist)
df4b504c 2936{
2937 tree which = TREE_VALUE (arglist);
2938 unsigned HOST_WIDE_INT iwhich;
2939
2940 if (TREE_CODE (which) != INTEGER_CST)
2941 {
2942 error ("argument of `__builtin_eh_return_regno' must be constant");
2943 return constm1_rtx;
2944 }
2945
2946 iwhich = tree_low_cst (which, 1);
2947 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2948 if (iwhich == INVALID_REGNUM)
2949 return constm1_rtx;
2950
2951#ifdef DWARF_FRAME_REGNUM
2952 iwhich = DWARF_FRAME_REGNUM (iwhich);
2953#else
2954 iwhich = DBX_REGISTER_NUMBER (iwhich);
2955#endif
2956
1ed5443b 2957 return GEN_INT (iwhich);
df4b504c 2958}
2959
447a9eb9 2960/* Given a value extracted from the return address register or stack slot,
2961 return the actual address encoded in that value. */
2962
2963rtx
35cb5232 2964expand_builtin_extract_return_addr (tree addr_tree)
447a9eb9 2965{
2966 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
df4b504c 2967
52a14611 2968 if (GET_MODE (addr) != Pmode
2969 && GET_MODE (addr) != VOIDmode)
2970 {
2971#ifdef POINTERS_EXTEND_UNSIGNED
2972 addr = convert_memory_address (Pmode, addr);
2973#else
2974 addr = convert_to_mode (Pmode, addr, 0);
2975#endif
2976 }
2977
df4b504c 2978 /* First mask out any unwanted bits. */
2979#ifdef MASK_RETURN_ADDR
6de9716c 2980 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
df4b504c 2981#endif
2982
2983 /* Then adjust to find the real return address. */
2984#if defined (RETURN_ADDR_OFFSET)
2985 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2986#endif
2987
2988 return addr;
447a9eb9 2989}
2990
2991/* Given an actual address in addr_tree, do any necessary encoding
2992 and return the value to be stored in the return address register or
2993 stack slot so the epilogue will return to that address. */
2994
2995rtx
35cb5232 2996expand_builtin_frob_return_addr (tree addr_tree)
447a9eb9 2997{
479e4d5e 2998 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
df4b504c 2999
85d654dd 3000 addr = convert_memory_address (Pmode, addr);
be275a4a 3001
447a9eb9 3002#ifdef RETURN_ADDR_OFFSET
df4b504c 3003 addr = force_reg (Pmode, addr);
447a9eb9 3004 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3005#endif
df4b504c 3006
447a9eb9 3007 return addr;
3008}
3009
df4b504c 3010/* Set up the epilogue with the magic bits we'll need to return to the
3011 exception handler. */
447a9eb9 3012
df4b504c 3013void
35cb5232 3014expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3015 tree handler_tree)
447a9eb9 3016{
cd4e2223 3017 rtx tmp;
447a9eb9 3018
cd4e2223 3019#ifdef EH_RETURN_STACKADJ_RTX
3020 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
85d654dd 3021 tmp = convert_memory_address (Pmode, tmp);
cd4e2223 3022 if (!cfun->eh->ehr_stackadj)
3023 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3024 else if (tmp != cfun->eh->ehr_stackadj)
3025 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
be275a4a 3026#endif
3027
cd4e2223 3028 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
85d654dd 3029 tmp = convert_memory_address (Pmode, tmp);
cd4e2223 3030 if (!cfun->eh->ehr_handler)
3031 cfun->eh->ehr_handler = copy_to_reg (tmp);
3032 else if (tmp != cfun->eh->ehr_handler)
3033 emit_move_insn (cfun->eh->ehr_handler, tmp);
447a9eb9 3034
cd4e2223 3035 if (!cfun->eh->ehr_label)
3036 cfun->eh->ehr_label = gen_label_rtx ();
df4b504c 3037 emit_jump (cfun->eh->ehr_label);
173f0bec 3038}
3039
ec37ccb4 3040void
35cb5232 3041expand_eh_return (void)
447a9eb9 3042{
cd4e2223 3043 rtx around_label;
447a9eb9 3044
df4b504c 3045 if (! cfun->eh->ehr_label)
ec37ccb4 3046 return;
447a9eb9 3047
df4b504c 3048 current_function_calls_eh_return = 1;
447a9eb9 3049
cd4e2223 3050#ifdef EH_RETURN_STACKADJ_RTX
3051 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3052#endif
3053
df4b504c 3054 around_label = gen_label_rtx ();
df4b504c 3055 emit_jump (around_label);
447a9eb9 3056
df4b504c 3057 emit_label (cfun->eh->ehr_label);
3058 clobber_return_register ();
447a9eb9 3059
cd4e2223 3060#ifdef EH_RETURN_STACKADJ_RTX
3061 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3062#endif
3063
df4b504c 3064#ifdef HAVE_eh_return
3065 if (HAVE_eh_return)
cd4e2223 3066 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
df4b504c 3067 else
ec37ccb4 3068#endif
df4b504c 3069 {
cd4e2223 3070#ifdef EH_RETURN_HANDLER_RTX
3071 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3072#else
3073 error ("__builtin_eh_return not supported on this target");
3074#endif
df4b504c 3075 }
ec37ccb4 3076
df4b504c 3077 emit_label (around_label);
ec37ccb4 3078}
26093bf4 3079
3080/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3081 POINTERS_EXTEND_UNSIGNED and return it. */
3082
3083rtx
3084expand_builtin_extend_pointer (tree addr_tree)
3085{
3086 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3087 int extend;
3088
3089#ifdef POINTERS_EXTEND_UNSIGNED
3090 extend = POINTERS_EXTEND_UNSIGNED;
3091#else
3092 /* The previous EH code did an unsigned extend by default, so we do this also
3093 for consistency. */
3094 extend = 1;
3095#endif
3096
3097 return convert_modes (word_mode, ptr_mode, addr, extend);
3098}
cac66fd5 3099\f
98668546 3100/* In the following functions, we represent entries in the action table
4a82352a 3101 as 1-based indices. Special cases are:
98668546 3102
3103 0: null action record, non-null landing pad; implies cleanups
3104 -1: null action record, null landing pad; implies no action
3105 -2: no call-site entry; implies must_not_throw
3106 -3: we have yet to process outer regions
3107
3108 Further, no special cases apply to the "next" field of the record.
3109 For next, 0 means end of list. */
3110
df4b504c 3111struct action_record
3112{
3113 int offset;
3114 int filter;
3115 int next;
3116};
cac66fd5 3117
df4b504c 3118static int
35cb5232 3119action_record_eq (const void *pentry, const void *pdata)
df4b504c 3120{
3121 const struct action_record *entry = (const struct action_record *) pentry;
3122 const struct action_record *data = (const struct action_record *) pdata;
3123 return entry->filter == data->filter && entry->next == data->next;
3124}
cac66fd5 3125
df4b504c 3126static hashval_t
35cb5232 3127action_record_hash (const void *pentry)
df4b504c 3128{
3129 const struct action_record *entry = (const struct action_record *) pentry;
3130 return entry->next * 1009 + entry->filter;
3131}
cac66fd5 3132
df4b504c 3133static int
35cb5232 3134add_action_record (htab_t ar_hash, int filter, int next)
cac66fd5 3135{
df4b504c 3136 struct action_record **slot, *new, tmp;
3137
3138 tmp.filter = filter;
3139 tmp.next = next;
3140 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
cac66fd5 3141
df4b504c 3142 if ((new = *slot) == NULL)
cac66fd5 3143 {
f0af5a88 3144 new = xmalloc (sizeof (*new));
df4b504c 3145 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3146 new->filter = filter;
3147 new->next = next;
3148 *slot = new;
3149
3150 /* The filter value goes in untouched. The link to the next
3151 record is a "self-relative" byte offset, or zero to indicate
3152 that there is no next record. So convert the absolute 1 based
4a82352a 3153 indices we've been carrying around into a displacement. */
df4b504c 3154
3155 push_sleb128 (&cfun->eh->action_record_data, filter);
3156 if (next)
3157 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3158 push_sleb128 (&cfun->eh->action_record_data, next);
cac66fd5 3159 }
cac66fd5 3160
df4b504c 3161 return new->offset;
3162}
cac66fd5 3163
df4b504c 3164static int
35cb5232 3165collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
cac66fd5 3166{
df4b504c 3167 struct eh_region *c;
3168 int next;
cac66fd5 3169
df4b504c 3170 /* If we've reached the top of the region chain, then we have
3171 no actions, and require no landing pad. */
3172 if (region == NULL)
3173 return -1;
3174
3175 switch (region->type)
cac66fd5 3176 {
df4b504c 3177 case ERT_CLEANUP:
3178 /* A cleanup adds a zero filter to the beginning of the chain, but
3179 there are special cases to look out for. If there are *only*
3180 cleanups along a path, then it compresses to a zero action.
3181 Further, if there are multiple cleanups along a path, we only
3182 need to represent one of them, as that is enough to trigger
3183 entry to the landing pad at runtime. */
3184 next = collect_one_action_chain (ar_hash, region->outer);
3185 if (next <= 0)
3186 return 0;
3187 for (c = region->outer; c ; c = c->outer)
3188 if (c->type == ERT_CLEANUP)
3189 return next;
3190 return add_action_record (ar_hash, 0, next);
3191
3192 case ERT_TRY:
3193 /* Process the associated catch regions in reverse order.
3194 If there's a catch-all handler, then we don't need to
3195 search outer regions. Use a magic -3 value to record
3fb1e43b 3196 that we haven't done the outer search. */
df4b504c 3197 next = -3;
3198 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3199 {
5c15c916 3200 if (c->u.catch.type_list == NULL)
3201 {
3202 /* Retrieve the filter from the head of the filter list
3203 where we have stored it (see assign_filter_values). */
ce9beb5c 3204 int filter
3205 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3206
3207 next = add_action_record (ar_hash, filter, 0);
5c15c916 3208 }
df4b504c 3209 else
3210 {
5c15c916 3211 /* Once the outer search is done, trigger an action record for
3212 each filter we have. */
3213 tree flt_node;
3214
df4b504c 3215 if (next == -3)
3216 {
3217 next = collect_one_action_chain (ar_hash, region->outer);
98668546 3218
3219 /* If there is no next action, terminate the chain. */
3220 if (next == -1)
df4b504c 3221 next = 0;
98668546 3222 /* If all outer actions are cleanups or must_not_throw,
3223 we'll have no action record for it, since we had wanted
3224 to encode these states in the call-site record directly.
3225 Add a cleanup action to the chain to catch these. */
3226 else if (next <= 0)
3227 next = add_action_record (ar_hash, 0, 0);
df4b504c 3228 }
1ed5443b 3229
5c15c916 3230 flt_node = c->u.catch.filter_list;
3231 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3232 {
3233 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3234 next = add_action_record (ar_hash, filter, next);
3235 }
df4b504c 3236 }
3237 }
3238 return next;
3239
3240 case ERT_ALLOWED_EXCEPTIONS:
3241 /* An exception specification adds its filter to the
3242 beginning of the chain. */
3243 next = collect_one_action_chain (ar_hash, region->outer);
3244 return add_action_record (ar_hash, region->u.allowed.filter,
3245 next < 0 ? 0 : next);
3246
3247 case ERT_MUST_NOT_THROW:
3248 /* A must-not-throw region with no inner handlers or cleanups
3249 requires no call-site entry. Note that this differs from
3250 the no handler or cleanup case in that we do require an lsda
3251 to be generated. Return a magic -2 value to record this. */
3252 return -2;
3253
3254 case ERT_CATCH:
3255 case ERT_THROW:
3256 /* CATCH regions are handled in TRY above. THROW regions are
3257 for optimization information only and produce no output. */
3258 return collect_one_action_chain (ar_hash, region->outer);
3259
3260 default:
3261 abort ();
cac66fd5 3262 }
3263}
3264
df4b504c 3265static int
35cb5232 3266add_call_site (rtx landing_pad, int action)
cac66fd5 3267{
df4b504c 3268 struct call_site_record *data = cfun->eh->call_site_data;
3269 int used = cfun->eh->call_site_data_used;
3270 int size = cfun->eh->call_site_data_size;
cac66fd5 3271
df4b504c 3272 if (used >= size)
3273 {
3274 size = (size ? size * 2 : 64);
f0af5a88 3275 data = ggc_realloc (data, sizeof (*data) * size);
df4b504c 3276 cfun->eh->call_site_data = data;
3277 cfun->eh->call_site_data_size = size;
3278 }
cac66fd5 3279
df4b504c 3280 data[used].landing_pad = landing_pad;
3281 data[used].action = action;
cac66fd5 3282
df4b504c 3283 cfun->eh->call_site_data_used = used + 1;
cac66fd5 3284
df4b504c 3285 return used + call_site_base;
cac66fd5 3286}
3287
df4b504c 3288/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3289 The new note numbers will not refer to region numbers, but
3290 instead to call site entries. */
cac66fd5 3291
df4b504c 3292void
35cb5232 3293convert_to_eh_region_ranges (void)
cac66fd5 3294{
df4b504c 3295 rtx insn, iter, note;
3296 htab_t ar_hash;
3297 int last_action = -3;
3298 rtx last_action_insn = NULL_RTX;
3299 rtx last_landing_pad = NULL_RTX;
3300 rtx first_no_action_insn = NULL_RTX;
97b330ca 3301 int call_site = 0;
cac66fd5 3302
df4b504c 3303 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3304 return;
cac66fd5 3305
df4b504c 3306 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
cac66fd5 3307
df4b504c 3308 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
cac66fd5 3309
df4b504c 3310 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3311 if (INSN_P (iter))
3312 {
3313 struct eh_region *region;
3314 int this_action;
3315 rtx this_landing_pad;
cac66fd5 3316
df4b504c 3317 insn = iter;
3318 if (GET_CODE (insn) == INSN
3319 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3320 insn = XVECEXP (PATTERN (insn), 0, 0);
da5038a3 3321
df4b504c 3322 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3323 if (!note)
3324 {
3325 if (! (GET_CODE (insn) == CALL_INSN
3326 || (flag_non_call_exceptions
3327 && may_trap_p (PATTERN (insn)))))
3328 continue;
3329 this_action = -1;
3330 region = NULL;
3331 }
3332 else
3333 {
3334 if (INTVAL (XEXP (note, 0)) <= 0)
3335 continue;
3336 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3337 this_action = collect_one_action_chain (ar_hash, region);
3338 }
3339
3340 /* Existence of catch handlers, or must-not-throw regions
3341 implies that an lsda is needed (even if empty). */
3342 if (this_action != -1)
3343 cfun->uses_eh_lsda = 1;
3344
3345 /* Delay creation of region notes for no-action regions
3346 until we're sure that an lsda will be required. */
3347 else if (last_action == -3)
3348 {
3349 first_no_action_insn = iter;
3350 last_action = -1;
3351 }
da5038a3 3352
df4b504c 3353 /* Cleanups and handlers may share action chains but not
3354 landing pads. Collect the landing pad for this region. */
3355 if (this_action >= 0)
3356 {
3357 struct eh_region *o;
3358 for (o = region; ! o->landing_pad ; o = o->outer)
3359 continue;
3360 this_landing_pad = o->landing_pad;
3361 }
3362 else
3363 this_landing_pad = NULL_RTX;
da5038a3 3364
df4b504c 3365 /* Differing actions or landing pads implies a change in call-site
3366 info, which implies some EH_REGION note should be emitted. */
3367 if (last_action != this_action
3368 || last_landing_pad != this_landing_pad)
3369 {
3370 /* If we'd not seen a previous action (-3) or the previous
3371 action was must-not-throw (-2), then we do not need an
3372 end note. */
3373 if (last_action >= -1)
3374 {
3375 /* If we delayed the creation of the begin, do it now. */
3376 if (first_no_action_insn)
3377 {
3378 call_site = add_call_site (NULL_RTX, 0);
3379 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3380 first_no_action_insn);
3381 NOTE_EH_HANDLER (note) = call_site;
3382 first_no_action_insn = NULL_RTX;
3383 }
3384
3385 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3386 last_action_insn);
3387 NOTE_EH_HANDLER (note) = call_site;
3388 }
3389
3390 /* If the new action is must-not-throw, then no region notes
3391 are created. */
3392 if (this_action >= -1)
3393 {
1ed5443b 3394 call_site = add_call_site (this_landing_pad,
df4b504c 3395 this_action < 0 ? 0 : this_action);
3396 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3397 NOTE_EH_HANDLER (note) = call_site;
3398 }
3399
3400 last_action = this_action;
3401 last_landing_pad = this_landing_pad;
3402 }
3403 last_action_insn = iter;
3404 }
da5038a3 3405
df4b504c 3406 if (last_action >= -1 && ! first_no_action_insn)
da5038a3 3407 {
df4b504c 3408 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3409 NOTE_EH_HANDLER (note) = call_site;
da5038a3 3410 }
3411
df4b504c 3412 htab_delete (ar_hash);
3413}
da5038a3 3414
df4b504c 3415\f
3416static void
35cb5232 3417push_uleb128 (varray_type *data_area, unsigned int value)
df4b504c 3418{
3419 do
3420 {
3421 unsigned char byte = value & 0x7f;
3422 value >>= 7;
3423 if (value)
3424 byte |= 0x80;
3425 VARRAY_PUSH_UCHAR (*data_area, byte);
3426 }
3427 while (value);
3428}
da5038a3 3429
df4b504c 3430static void
35cb5232 3431push_sleb128 (varray_type *data_area, int value)
df4b504c 3432{
3433 unsigned char byte;
3434 int more;
da5038a3 3435
df4b504c 3436 do
da5038a3 3437 {
df4b504c 3438 byte = value & 0x7f;
3439 value >>= 7;
3440 more = ! ((value == 0 && (byte & 0x40) == 0)
3441 || (value == -1 && (byte & 0x40) != 0));
3442 if (more)
3443 byte |= 0x80;
3444 VARRAY_PUSH_UCHAR (*data_area, byte);
da5038a3 3445 }
df4b504c 3446 while (more);
3447}
da5038a3 3448
df4b504c 3449\f
df4b504c 3450#ifndef HAVE_AS_LEB128
3451static int
35cb5232 3452dw2_size_of_call_site_table (void)
da5038a3 3453{
df4b504c 3454 int n = cfun->eh->call_site_data_used;
3455 int size = n * (4 + 4 + 4);
3456 int i;
da5038a3 3457
df4b504c 3458 for (i = 0; i < n; ++i)
3459 {
3460 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3461 size += size_of_uleb128 (cs->action);
3462 }
8b4f3d64 3463
df4b504c 3464 return size;
3465}
3466
3467static int
35cb5232 3468sjlj_size_of_call_site_table (void)
df4b504c 3469{
3470 int n = cfun->eh->call_site_data_used;
3471 int size = 0;
3472 int i;
cac66fd5 3473
df4b504c 3474 for (i = 0; i < n; ++i)
da5038a3 3475 {
df4b504c 3476 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3477 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3478 size += size_of_uleb128 (cs->action);
da5038a3 3479 }
df4b504c 3480
3481 return size;
3482}
3483#endif
3484
3485static void
35cb5232 3486dw2_output_call_site_table (void)
df4b504c 3487{
0d95286f 3488 const char *const function_start_lab
df4b504c 3489 = IDENTIFIER_POINTER (current_function_func_begin_label);
3490 int n = cfun->eh->call_site_data_used;
3491 int i;
3492
3493 for (i = 0; i < n; ++i)
da5038a3 3494 {
df4b504c 3495 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3496 char reg_start_lab[32];
3497 char reg_end_lab[32];
3498 char landing_pad_lab[32];
3499
3500 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3501 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3502
3503 if (cs->landing_pad)
3504 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3505 CODE_LABEL_NUMBER (cs->landing_pad));
3506
3507 /* ??? Perhaps use insn length scaling if the assembler supports
3508 generic arithmetic. */
3509 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3510 data4 if the function is small enough. */
3511#ifdef HAVE_AS_LEB128
3512 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3513 "region %d start", i);
3514 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3515 "length");
3516 if (cs->landing_pad)
3517 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3518 "landing pad");
3519 else
3520 dw2_asm_output_data_uleb128 (0, "landing pad");
3521#else
3522 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3523 "region %d start", i);
3524 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3525 if (cs->landing_pad)
3526 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3527 "landing pad");
3528 else
3529 dw2_asm_output_data (4, 0, "landing pad");
3530#endif
3531 dw2_asm_output_data_uleb128 (cs->action, "action");
da5038a3 3532 }
3533
df4b504c 3534 call_site_base += n;
3535}
3536
3537static void
35cb5232 3538sjlj_output_call_site_table (void)
df4b504c 3539{
3540 int n = cfun->eh->call_site_data_used;
3541 int i;
da5038a3 3542
df4b504c 3543 for (i = 0; i < n; ++i)
da5038a3 3544 {
df4b504c 3545 struct call_site_record *cs = &cfun->eh->call_site_data[i];
b9cf3f63 3546
df4b504c 3547 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3548 "region %d landing pad", i);
3549 dw2_asm_output_data_uleb128 (cs->action, "action");
3550 }
b9cf3f63 3551
df4b504c 3552 call_site_base += n;
da5038a3 3553}
3554
d5720b0c 3555/* Tell assembler to switch to the section for the exception handling
3556 table. */
3557
3558void
35cb5232 3559default_exception_section (void)
d5720b0c 3560{
3561 if (targetm.have_named_sections)
3562 {
d5720b0c 3563 int flags;
d5720b0c 3564#ifdef HAVE_LD_RO_RW_SECTION_MIXING
01bffb8a 3565 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3566
d5720b0c 3567 flags = (! flag_pic
3568 || ((tt_format & 0x70) != DW_EH_PE_absptr
3569 && (tt_format & 0x70) != DW_EH_PE_aligned))
3570 ? 0 : SECTION_WRITE;
3571#else
3572 flags = SECTION_WRITE;
3573#endif
3574 named_section_flags (".gcc_except_table", flags);
3575 }
3576 else if (flag_pic)
3577 data_section ();
3578 else
3579 readonly_data_section ();
3580}
3581
df4b504c 3582void
35cb5232 3583output_function_exception_table (void)
df4b504c 3584{
ad5818ae 3585 int tt_format, cs_format, lp_format, i, n;
df4b504c 3586#ifdef HAVE_AS_LEB128
3587 char ttype_label[32];
3588 char cs_after_size_label[32];
3589 char cs_end_label[32];
3590#else
3591 int call_site_len;
3592#endif
3593 int have_tt_data;
97b330ca 3594 int tt_format_size = 0;
da5038a3 3595
df4b504c 3596 /* Not all functions need anything. */
3597 if (! cfun->uses_eh_lsda)
3598 return;
8b4f3d64 3599
ad5818ae 3600#ifdef IA64_UNWIND_INFO
3601 fputs ("\t.personality\t", asm_out_file);
3602 output_addr_const (asm_out_file, eh_personality_libfunc);
3603 fputs ("\n\t.handlerdata\n", asm_out_file);
3604 /* Note that varasm still thinks we're in the function's code section.
3605 The ".endp" directive that will immediately follow will take us back. */
3606#else
02c8b767 3607 (*targetm.asm_out.exception_section) ();
ad5818ae 3608#endif
df4b504c 3609
3610 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3611 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3612
631413ff 3613 /* Indicate the format of the @TType entries. */
3614 if (! have_tt_data)
3615 tt_format = DW_EH_PE_omit;
3616 else
3617 {
3618 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3619#ifdef HAVE_AS_LEB128
4781f9b9 3620 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3621 current_function_funcdef_no);
631413ff 3622#endif
3623 tt_format_size = size_of_encoded_value (tt_format);
3624
4aa1f91c 3625 assemble_align (tt_format_size * BITS_PER_UNIT);
631413ff 3626 }
df4b504c 3627
805e22b2 3628 (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
4781f9b9 3629 current_function_funcdef_no);
df4b504c 3630
3631 /* The LSDA header. */
3632
3633 /* Indicate the format of the landing pad start pointer. An omitted
3634 field implies @LPStart == @Start. */
3635 /* Currently we always put @LPStart == @Start. This field would
3636 be most useful in moving the landing pads completely out of
3637 line to another section, but it could also be used to minimize
3638 the size of uleb128 landing pad offsets. */
ad5818ae 3639 lp_format = DW_EH_PE_omit;
3640 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3641 eh_data_format_name (lp_format));
df4b504c 3642
3643 /* @LPStart pointer would go here. */
3644
ad5818ae 3645 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3646 eh_data_format_name (tt_format));
df4b504c 3647
3648#ifndef HAVE_AS_LEB128
3649 if (USING_SJLJ_EXCEPTIONS)
3650 call_site_len = sjlj_size_of_call_site_table ();
3651 else
3652 call_site_len = dw2_size_of_call_site_table ();
3653#endif
3654
3655 /* A pc-relative 4-byte displacement to the @TType data. */
3656 if (have_tt_data)
3657 {
3658#ifdef HAVE_AS_LEB128
3659 char ttype_after_disp_label[32];
1ed5443b 3660 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4781f9b9 3661 current_function_funcdef_no);
df4b504c 3662 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3663 "@TType base offset");
3664 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3665#else
3666 /* Ug. Alignment queers things. */
631413ff 3667 unsigned int before_disp, after_disp, last_disp, disp;
df4b504c 3668
df4b504c 3669 before_disp = 1 + 1;
3670 after_disp = (1 + size_of_uleb128 (call_site_len)
3671 + call_site_len
3672 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
631413ff 3673 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3674 * tt_format_size));
df4b504c 3675
3676 disp = after_disp;
3677 do
da5038a3 3678 {
df4b504c 3679 unsigned int disp_size, pad;
3680
3681 last_disp = disp;
3682 disp_size = size_of_uleb128 (disp);
3683 pad = before_disp + disp_size + after_disp;
631413ff 3684 if (pad % tt_format_size)
3685 pad = tt_format_size - (pad % tt_format_size);
df4b504c 3686 else
3687 pad = 0;
3688 disp = after_disp + pad;
da5038a3 3689 }
df4b504c 3690 while (disp != last_disp);
3691
3692 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3693#endif
da5038a3 3694 }
da5038a3 3695
df4b504c 3696 /* Indicate the format of the call-site offsets. */
3697#ifdef HAVE_AS_LEB128
ad5818ae 3698 cs_format = DW_EH_PE_uleb128;
df4b504c 3699#else
ad5818ae 3700 cs_format = DW_EH_PE_udata4;
df4b504c 3701#endif
ad5818ae 3702 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3703 eh_data_format_name (cs_format));
df4b504c 3704
3705#ifdef HAVE_AS_LEB128
3706 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4781f9b9 3707 current_function_funcdef_no);
df4b504c 3708 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4781f9b9 3709 current_function_funcdef_no);
df4b504c 3710 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3711 "Call-site table length");
3712 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3713 if (USING_SJLJ_EXCEPTIONS)
3714 sjlj_output_call_site_table ();
3715 else
3716 dw2_output_call_site_table ();
3717 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3718#else
3719 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3720 if (USING_SJLJ_EXCEPTIONS)
3721 sjlj_output_call_site_table ();
3722 else
3723 dw2_output_call_site_table ();
3724#endif
3725
3726 /* ??? Decode and interpret the data for flag_debug_asm. */
3727 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3728 for (i = 0; i < n; ++i)
3729 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3730 (i ? NULL : "Action record table"));
da5038a3 3731
df4b504c 3732 if (have_tt_data)
4aa1f91c 3733 assemble_align (tt_format_size * BITS_PER_UNIT);
da5038a3 3734
df4b504c 3735 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3736 while (i-- > 0)
da5038a3 3737 {
df4b504c 3738 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
52ec67c4 3739 rtx value;
df4b504c 3740
3741 if (type == NULL_TREE)
3b919cde 3742 value = const0_rtx;
df4b504c 3743 else
3b919cde 3744 {
3745 struct cgraph_varpool_node *node;
3746
3747 type = lookup_type_for_runtime (type);
3748 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3749
3750 /* Let cgraph know that the rtti decl is used. Not all of the
3751 paths below go through assemble_integer, which would take
3752 care of this for us. */
7a4f3d8a 3753 if (TREE_CODE (type) == ADDR_EXPR)
3754 {
88b71346 3755 type = TREE_OPERAND (type, 0);
7a4f3d8a 3756 node = cgraph_varpool_node (type);
3757 if (node)
3758 cgraph_varpool_mark_needed_node (node);
3759 }
3760 else if (TREE_CODE (type) != INTEGER_CST)
3b919cde 3761 abort ();
3b919cde 3762 }
df4b504c 3763
52ec67c4 3764 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3765 assemble_integer (value, tt_format_size,
3766 tt_format_size * BITS_PER_UNIT, 1);
3767 else
ff385626 3768 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
da5038a3 3769 }
df4b504c 3770
3771#ifdef HAVE_AS_LEB128
3772 if (have_tt_data)
3773 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3774#endif
3775
3776 /* ??? Decode and interpret the data for flag_debug_asm. */
3777 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3778 for (i = 0; i < n; ++i)
3779 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3780 (i ? NULL : "Exception specification table"));
3781
3782 function_section (current_function_decl);
da5038a3 3783}
1f3233d1 3784
3785#include "gt-except.h"