]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
re PR c++/5998 (regression, all builtins disabled)
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
3f2c5d1a 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
a8154559 3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
1322177d 6This file is part of GCC.
4956d07c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
4956d07c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
4956d07c
MS
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
4956d07c
MS
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4956d07c
MS
52#include "rtl.h"
53#include "tree.h"
54#include "flags.h"
4956d07c 55#include "function.h"
4956d07c 56#include "expr.h"
e78d8e51 57#include "libfuncs.h"
4956d07c 58#include "insn-config.h"
52a11cbf
RH
59#include "except.h"
60#include "integrate.h"
61#include "hard-reg-set.h"
62#include "basic-block.h"
4956d07c 63#include "output.h"
52a11cbf
RH
64#include "dwarf2asm.h"
65#include "dwarf2out.h"
2a1ee410 66#include "dwarf2.h"
10f0ad3d 67#include "toplev.h"
52a11cbf 68#include "hashtab.h"
2b12ffe0 69#include "intl.h"
87ff9c8e 70#include "ggc.h"
b1474bb7 71#include "tm_p.h"
07c9d2eb 72#include "target.h"
f1e639b1 73#include "langhooks.h"
52a11cbf
RH
74
75/* Provide defaults for stuff that may not be defined when using
76 sjlj exceptions. */
77#ifndef EH_RETURN_STACKADJ_RTX
78#define EH_RETURN_STACKADJ_RTX 0
79#endif
80#ifndef EH_RETURN_HANDLER_RTX
81#define EH_RETURN_HANDLER_RTX 0
82#endif
83#ifndef EH_RETURN_DATA_REGNO
84#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
85#endif
86
27a36778 87
52a11cbf
RH
88/* Nonzero means enable synchronous exceptions for non-call instructions. */
89int flag_non_call_exceptions;
27a36778 90
52a11cbf
RH
91/* Protect cleanup actions with must-not-throw regions, with a call
92 to the given failure handler. */
e6855a2d 93tree (*lang_protect_cleanup_actions) PARAMS ((void));
27a36778 94
52a11cbf
RH
95/* Return true if type A catches type B. */
96int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
27a36778 97
52a11cbf
RH
98/* Map a type to a runtime object to match type. */
99tree (*lang_eh_runtime_type) PARAMS ((tree));
4956d07c 100
52a11cbf 101/* A list of labels used for exception handlers. */
4956d07c
MS
102rtx exception_handler_labels;
103
52a11cbf 104static int call_site_base;
ae0ed63a 105static unsigned int sjlj_funcdef_number;
52a11cbf
RH
106static htab_t type_to_runtime_map;
107
108/* Describe the SjLj_Function_Context structure. */
109static tree sjlj_fc_type_node;
110static int sjlj_fc_call_site_ofs;
111static int sjlj_fc_data_ofs;
112static int sjlj_fc_personality_ofs;
113static int sjlj_fc_lsda_ofs;
114static int sjlj_fc_jbuf_ofs;
115\f
116/* Describes one exception region. */
117struct eh_region
118{
119 /* The immediately surrounding region. */
120 struct eh_region *outer;
956d6950 121
52a11cbf
RH
122 /* The list of immediately contained regions. */
123 struct eh_region *inner;
124 struct eh_region *next_peer;
956d6950 125
52a11cbf
RH
126 /* An identifier for this region. */
127 int region_number;
71038426 128
52a11cbf
RH
129 /* Each region does exactly one thing. */
130 enum eh_region_type
131 {
572202a7
RK
132 ERT_UNKNOWN = 0,
133 ERT_CLEANUP,
52a11cbf
RH
134 ERT_TRY,
135 ERT_CATCH,
136 ERT_ALLOWED_EXCEPTIONS,
137 ERT_MUST_NOT_THROW,
138 ERT_THROW,
139 ERT_FIXUP
140 } type;
141
eaec9b3d 142 /* Holds the action to perform based on the preceding type. */
52a11cbf
RH
143 union {
144 /* A list of catch blocks, a surrounding try block,
145 and the label for continuing after a catch. */
146 struct {
147 struct eh_region *catch;
148 struct eh_region *last_catch;
149 struct eh_region *prev_try;
150 rtx continue_label;
151 } try;
152
6d41a92f
OH
153 /* The list through the catch handlers, the list of type objects
154 matched, and the list of associated filters. */
52a11cbf
RH
155 struct {
156 struct eh_region *next_catch;
157 struct eh_region *prev_catch;
6d41a92f
OH
158 tree type_list;
159 tree filter_list;
52a11cbf
RH
160 } catch;
161
162 /* A tree_list of allowed types. */
163 struct {
164 tree type_list;
165 int filter;
166 } allowed;
167
3f2c5d1a 168 /* The type given by a call to "throw foo();", or discovered
52a11cbf
RH
169 for a throw. */
170 struct {
171 tree type;
172 } throw;
173
174 /* Retain the cleanup expression even after expansion so that
175 we can match up fixup regions. */
176 struct {
177 tree exp;
178 } cleanup;
179
180 /* The real region (by expression and by pointer) that fixup code
181 should live in. */
182 struct {
183 tree cleanup_exp;
184 struct eh_region *real_region;
185 } fixup;
186 } u;
187
47c84870
JM
188 /* Entry point for this region's handler before landing pads are built. */
189 rtx label;
52a11cbf 190
47c84870 191 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
192 rtx landing_pad;
193
47c84870 194 /* Entry point for this region's handler from an inner region. */
52a11cbf 195 rtx post_landing_pad;
47c84870
JM
196
197 /* The RESX insn for handing off control to the next outermost handler,
198 if appropriate. */
199 rtx resume;
52a11cbf 200};
71038426 201
52a11cbf
RH
202/* Used to save exception status for each function. */
203struct eh_status
204{
205 /* The tree of all regions for this function. */
206 struct eh_region *region_tree;
e6cfb550 207
52a11cbf
RH
208 /* The same information as an indexable array. */
209 struct eh_region **region_array;
e6cfb550 210
52a11cbf
RH
211 /* The most recently open region. */
212 struct eh_region *cur_region;
e6cfb550 213
52a11cbf
RH
214 /* This is the region for which we are processing catch blocks. */
215 struct eh_region *try_region;
71038426 216
52a11cbf
RH
217 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
218 node is itself a TREE_CHAINed list of handlers for regions that
219 are not yet closed. The TREE_VALUE of each entry contains the
220 handler for the corresponding entry on the ehstack. */
221 tree protect_list;
1e4ceb6f 222
52a11cbf
RH
223 rtx filter;
224 rtx exc_ptr;
4956d07c 225
52a11cbf
RH
226 int built_landing_pads;
227 int last_region_number;
e6cfb550 228
52a11cbf
RH
229 varray_type ttype_data;
230 varray_type ehspec_data;
231 varray_type action_record_data;
6814a8a0 232
52a11cbf
RH
233 struct call_site_record
234 {
235 rtx landing_pad;
236 int action;
237 } *call_site_data;
238 int call_site_data_used;
239 int call_site_data_size;
240
241 rtx ehr_stackadj;
242 rtx ehr_handler;
243 rtx ehr_label;
244
245 rtx sjlj_fc;
246 rtx sjlj_exit_after;
247};
e6cfb550 248
52a11cbf
RH
249\f
250static void mark_eh_region PARAMS ((struct eh_region *));
251
252static int t2r_eq PARAMS ((const PTR,
253 const PTR));
254static hashval_t t2r_hash PARAMS ((const PTR));
255static int t2r_mark_1 PARAMS ((PTR *, PTR));
256static void t2r_mark PARAMS ((PTR));
257static void add_type_for_runtime PARAMS ((tree));
258static tree lookup_type_for_runtime PARAMS ((tree));
259
260static struct eh_region *expand_eh_region_end PARAMS ((void));
261
86c99549 262static rtx get_exception_filter PARAMS ((struct function *));
47c84870 263
52a11cbf
RH
264static void collect_eh_region_array PARAMS ((void));
265static void resolve_fixup_regions PARAMS ((void));
266static void remove_fixup_regions PARAMS ((void));
655dd289 267static void remove_unreachable_regions PARAMS ((rtx));
52a11cbf
RH
268static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
269
270static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
271 struct inline_remap *));
272static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
273 struct eh_region **));
274static int ttypes_filter_eq PARAMS ((const PTR,
275 const PTR));
276static hashval_t ttypes_filter_hash PARAMS ((const PTR));
277static int ehspec_filter_eq PARAMS ((const PTR,
278 const PTR));
279static hashval_t ehspec_filter_hash PARAMS ((const PTR));
280static int add_ttypes_entry PARAMS ((htab_t, tree));
281static int add_ehspec_entry PARAMS ((htab_t, htab_t,
282 tree));
283static void assign_filter_values PARAMS ((void));
284static void build_post_landing_pads PARAMS ((void));
285static void connect_post_landing_pads PARAMS ((void));
286static void dw2_build_landing_pads PARAMS ((void));
287
288struct sjlj_lp_info;
289static bool sjlj_find_directly_reachable_regions
290 PARAMS ((struct sjlj_lp_info *));
291static void sjlj_assign_call_site_values
292 PARAMS ((rtx, struct sjlj_lp_info *));
293static void sjlj_mark_call_sites
294 PARAMS ((struct sjlj_lp_info *));
295static void sjlj_emit_function_enter PARAMS ((rtx));
296static void sjlj_emit_function_exit PARAMS ((void));
297static void sjlj_emit_dispatch_table
298 PARAMS ((rtx, struct sjlj_lp_info *));
299static void sjlj_build_landing_pads PARAMS ((void));
300
301static void remove_exception_handler_label PARAMS ((rtx));
302static void remove_eh_handler PARAMS ((struct eh_region *));
303
304struct reachable_info;
305
306/* The return value of reachable_next_level. */
307enum reachable_code
308{
309 /* The given exception is not processed by the given region. */
310 RNL_NOT_CAUGHT,
311 /* The given exception may need processing by the given region. */
312 RNL_MAYBE_CAUGHT,
313 /* The given exception is completely processed by the given region. */
314 RNL_CAUGHT,
315 /* The given exception is completely processed by the runtime. */
316 RNL_BLOCKED
317};
e6cfb550 318
52a11cbf
RH
319static int check_handled PARAMS ((tree, tree));
320static void add_reachable_handler
321 PARAMS ((struct reachable_info *, struct eh_region *,
322 struct eh_region *));
323static enum reachable_code reachable_next_level
324 PARAMS ((struct eh_region *, tree, struct reachable_info *));
325
326static int action_record_eq PARAMS ((const PTR,
327 const PTR));
328static hashval_t action_record_hash PARAMS ((const PTR));
329static int add_action_record PARAMS ((htab_t, int, int));
330static int collect_one_action_chain PARAMS ((htab_t,
331 struct eh_region *));
332static int add_call_site PARAMS ((rtx, int));
333
334static void push_uleb128 PARAMS ((varray_type *,
335 unsigned int));
336static void push_sleb128 PARAMS ((varray_type *, int));
52a11cbf
RH
337#ifndef HAVE_AS_LEB128
338static int dw2_size_of_call_site_table PARAMS ((void));
339static int sjlj_size_of_call_site_table PARAMS ((void));
340#endif
341static void dw2_output_call_site_table PARAMS ((void));
342static void sjlj_output_call_site_table PARAMS ((void));
e6cfb550 343
52a11cbf
RH
344\f
345/* Routine to see if exception handling is turned on.
346 DO_WARN is non-zero if we want to inform the user that exception
3f2c5d1a 347 handling is turned off.
4956d07c 348
52a11cbf
RH
349 This is used to ensure that -fexceptions has been specified if the
350 compiler tries to use any exception-specific functions. */
4956d07c 351
52a11cbf
RH
352int
353doing_eh (do_warn)
354 int do_warn;
355{
356 if (! flag_exceptions)
357 {
358 static int warned = 0;
359 if (! warned && do_warn)
360 {
361 error ("exception handling disabled, use -fexceptions to enable");
362 warned = 1;
363 }
364 return 0;
365 }
366 return 1;
4956d07c
MS
367}
368
52a11cbf
RH
369\f
370void
371init_eh ()
4956d07c 372{
52a11cbf 373 ggc_add_rtx_root (&exception_handler_labels, 1);
4956d07c 374
52a11cbf
RH
375 if (! flag_exceptions)
376 return;
4956d07c 377
52a11cbf
RH
378 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
379 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
4956d07c 380
52a11cbf
RH
381 /* Create the SjLj_Function_Context structure. This should match
382 the definition in unwind-sjlj.c. */
383 if (USING_SJLJ_EXCEPTIONS)
384 {
385 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 386
f1e639b1 387 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
52a11cbf 388 ggc_add_tree_root (&sjlj_fc_type_node, 1);
9a0d1e1b 389
52a11cbf
RH
390 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
391 build_pointer_type (sjlj_fc_type_node));
392 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 393
52a11cbf
RH
394 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
395 integer_type_node);
396 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 397
52a11cbf
RH
398 tmp = build_index_type (build_int_2 (4 - 1, 0));
399 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
400 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
401 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 402
52a11cbf
RH
403 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
404 ptr_type_node);
405 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 406
52a11cbf
RH
407 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
408 ptr_type_node);
409 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 410
52a11cbf
RH
411#ifdef DONT_USE_BUILTIN_SETJMP
412#ifdef JMP_BUF_SIZE
413 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
414#else
415 /* Should be large enough for most systems, if it is not,
416 JMP_BUF_SIZE should be defined with the proper value. It will
417 also tend to be larger than necessary for most systems, a more
418 optimal port will define JMP_BUF_SIZE. */
419 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
420#endif
421#else
422 /* This is 2 for builtin_setjmp, plus whatever the target requires
423 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
424 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
425 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
426#endif
427 tmp = build_index_type (tmp);
428 tmp = build_array_type (ptr_type_node, tmp);
429 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
430#ifdef DONT_USE_BUILTIN_SETJMP
431 /* We don't know what the alignment requirements of the
432 runtime's jmp_buf has. Overestimate. */
433 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
434 DECL_USER_ALIGN (f_jbuf) = 1;
435#endif
436 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
437
438 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
439 TREE_CHAIN (f_prev) = f_cs;
440 TREE_CHAIN (f_cs) = f_data;
441 TREE_CHAIN (f_data) = f_per;
442 TREE_CHAIN (f_per) = f_lsda;
443 TREE_CHAIN (f_lsda) = f_jbuf;
444
445 layout_type (sjlj_fc_type_node);
446
447 /* Cache the interesting field offsets so that we have
448 easy access from rtl. */
449 sjlj_fc_call_site_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
452 sjlj_fc_data_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
455 sjlj_fc_personality_ofs
456 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
457 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
458 sjlj_fc_lsda_ofs
459 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
460 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
461 sjlj_fc_jbuf_ofs
462 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
463 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
464 }
4956d07c
MS
465}
466
52a11cbf
RH
467void
468init_eh_for_function ()
4956d07c 469{
52a11cbf 470 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
4956d07c
MS
471}
472
52a11cbf 473/* Mark EH for GC. */
4956d07c
MS
474
475static void
52a11cbf
RH
476mark_eh_region (region)
477 struct eh_region *region;
4956d07c 478{
52a11cbf
RH
479 if (! region)
480 return;
4956d07c 481
52a11cbf
RH
482 switch (region->type)
483 {
572202a7
RK
484 case ERT_UNKNOWN:
485 /* This can happen if a nested function is inside the body of a region
486 and we do a GC as part of processing it. */
487 break;
52a11cbf
RH
488 case ERT_CLEANUP:
489 ggc_mark_tree (region->u.cleanup.exp);
490 break;
491 case ERT_TRY:
492 ggc_mark_rtx (region->u.try.continue_label);
493 break;
494 case ERT_CATCH:
6d41a92f
OH
495 ggc_mark_tree (region->u.catch.type_list);
496 ggc_mark_tree (region->u.catch.filter_list);
52a11cbf
RH
497 break;
498 case ERT_ALLOWED_EXCEPTIONS:
499 ggc_mark_tree (region->u.allowed.type_list);
500 break;
501 case ERT_MUST_NOT_THROW:
502 break;
503 case ERT_THROW:
504 ggc_mark_tree (region->u.throw.type);
505 break;
506 case ERT_FIXUP:
507 ggc_mark_tree (region->u.fixup.cleanup_exp);
508 break;
509 default:
510 abort ();
511 }
4956d07c 512
52a11cbf 513 ggc_mark_rtx (region->label);
47c84870 514 ggc_mark_rtx (region->resume);
52a11cbf
RH
515 ggc_mark_rtx (region->landing_pad);
516 ggc_mark_rtx (region->post_landing_pad);
4956d07c
MS
517}
518
52a11cbf
RH
519void
520mark_eh_status (eh)
521 struct eh_status *eh;
4956d07c 522{
52a11cbf
RH
523 int i;
524
525 if (eh == 0)
526 return;
527
528 /* If we've called collect_eh_region_array, use it. Otherwise walk
529 the tree non-recursively. */
530 if (eh->region_array)
531 {
532 for (i = eh->last_region_number; i > 0; --i)
533 {
534 struct eh_region *r = eh->region_array[i];
535 if (r && r->region_number == i)
536 mark_eh_region (r);
537 }
538 }
539 else if (eh->region_tree)
540 {
541 struct eh_region *r = eh->region_tree;
542 while (1)
543 {
544 mark_eh_region (r);
545 if (r->inner)
546 r = r->inner;
547 else if (r->next_peer)
548 r = r->next_peer;
549 else
550 {
551 do {
552 r = r->outer;
553 if (r == NULL)
554 goto tree_done;
555 } while (r->next_peer == NULL);
556 r = r->next_peer;
557 }
558 }
559 tree_done:;
560 }
4956d07c 561
52a11cbf
RH
562 ggc_mark_tree (eh->protect_list);
563 ggc_mark_rtx (eh->filter);
564 ggc_mark_rtx (eh->exc_ptr);
565 ggc_mark_tree_varray (eh->ttype_data);
4956d07c 566
52a11cbf
RH
567 if (eh->call_site_data)
568 {
569 for (i = eh->call_site_data_used - 1; i >= 0; --i)
570 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
571 }
4956d07c 572
52a11cbf
RH
573 ggc_mark_rtx (eh->ehr_stackadj);
574 ggc_mark_rtx (eh->ehr_handler);
575 ggc_mark_rtx (eh->ehr_label);
4956d07c 576
52a11cbf
RH
577 ggc_mark_rtx (eh->sjlj_fc);
578 ggc_mark_rtx (eh->sjlj_exit_after);
4956d07c 579}
9a0d1e1b 580
52a11cbf
RH
581void
582free_eh_status (f)
583 struct function *f;
9a0d1e1b 584{
52a11cbf 585 struct eh_status *eh = f->eh;
250d07b6 586
52a11cbf 587 if (eh->region_array)
250d07b6 588 {
52a11cbf
RH
589 int i;
590 for (i = eh->last_region_number; i > 0; --i)
591 {
592 struct eh_region *r = eh->region_array[i];
593 /* Mind we don't free a region struct more than once. */
594 if (r && r->region_number == i)
595 free (r);
596 }
597 free (eh->region_array);
250d07b6 598 }
52a11cbf 599 else if (eh->region_tree)
250d07b6 600 {
52a11cbf
RH
601 struct eh_region *next, *r = eh->region_tree;
602 while (1)
603 {
604 if (r->inner)
605 r = r->inner;
606 else if (r->next_peer)
607 {
608 next = r->next_peer;
609 free (r);
610 r = next;
611 }
612 else
613 {
614 do {
615 next = r->outer;
616 free (r);
617 r = next;
618 if (r == NULL)
619 goto tree_done;
620 } while (r->next_peer == NULL);
621 next = r->next_peer;
622 free (r);
623 r = next;
624 }
625 }
626 tree_done:;
250d07b6
RH
627 }
628
52a11cbf
RH
629 VARRAY_FREE (eh->ttype_data);
630 VARRAY_FREE (eh->ehspec_data);
631 VARRAY_FREE (eh->action_record_data);
632 if (eh->call_site_data)
633 free (eh->call_site_data);
634
635 free (eh);
636 f->eh = NULL;
655dd289 637 exception_handler_labels = NULL;
9a0d1e1b
AM
638}
639
52a11cbf
RH
640\f
641/* Start an exception handling region. All instructions emitted
642 after this point are considered to be part of the region until
643 expand_eh_region_end is invoked. */
9a0d1e1b 644
52a11cbf
RH
645void
646expand_eh_region_start ()
9a0d1e1b 647{
52a11cbf
RH
648 struct eh_region *new_region;
649 struct eh_region *cur_region;
650 rtx note;
9a0d1e1b 651
52a11cbf
RH
652 if (! doing_eh (0))
653 return;
9a0d1e1b 654
52a11cbf
RH
655 /* Insert a new blank region as a leaf in the tree. */
656 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
657 cur_region = cfun->eh->cur_region;
658 new_region->outer = cur_region;
659 if (cur_region)
9a0d1e1b 660 {
52a11cbf
RH
661 new_region->next_peer = cur_region->inner;
662 cur_region->inner = new_region;
663 }
e6cfb550 664 else
9a0d1e1b 665 {
52a11cbf
RH
666 new_region->next_peer = cfun->eh->region_tree;
667 cfun->eh->region_tree = new_region;
9a0d1e1b 668 }
52a11cbf
RH
669 cfun->eh->cur_region = new_region;
670
671 /* Create a note marking the start of this region. */
672 new_region->region_number = ++cfun->eh->last_region_number;
6496a589 673 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
52a11cbf 674 NOTE_EH_HANDLER (note) = new_region->region_number;
9a0d1e1b
AM
675}
676
52a11cbf 677/* Common code to end a region. Returns the region just ended. */
9f8e6243 678
52a11cbf
RH
679static struct eh_region *
680expand_eh_region_end ()
9f8e6243 681{
52a11cbf
RH
682 struct eh_region *cur_region = cfun->eh->cur_region;
683 rtx note;
684
a1f300c0 685 /* Create a note marking the end of this region. */
6496a589 686 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
52a11cbf
RH
687 NOTE_EH_HANDLER (note) = cur_region->region_number;
688
689 /* Pop. */
690 cfun->eh->cur_region = cur_region->outer;
691
52a11cbf 692 return cur_region;
9f8e6243
AM
693}
694
52a11cbf
RH
695/* End an exception handling region for a cleanup. HANDLER is an
696 expression to expand for the cleanup. */
9c606f69 697
52a11cbf
RH
698void
699expand_eh_region_end_cleanup (handler)
700 tree handler;
9c606f69 701{
52a11cbf 702 struct eh_region *region;
e6855a2d 703 tree protect_cleanup_actions;
52a11cbf 704 rtx around_label;
47c84870 705 rtx data_save[2];
52a11cbf
RH
706
707 if (! doing_eh (0))
708 return;
9c606f69 709
52a11cbf
RH
710 region = expand_eh_region_end ();
711 region->type = ERT_CLEANUP;
712 region->label = gen_label_rtx ();
713 region->u.cleanup.exp = handler;
9c606f69 714
52a11cbf
RH
715 around_label = gen_label_rtx ();
716 emit_jump (around_label);
9c606f69 717
52a11cbf 718 emit_label (region->label);
9c606f69 719
e6855a2d 720 /* Give the language a chance to specify an action to be taken if an
a1f300c0 721 exception is thrown that would propagate out of the HANDLER. */
3f2c5d1a
RS
722 protect_cleanup_actions
723 = (lang_protect_cleanup_actions
724 ? (*lang_protect_cleanup_actions) ()
e6855a2d
MM
725 : NULL_TREE);
726
52a11cbf
RH
727 if (protect_cleanup_actions)
728 expand_eh_region_start ();
9c606f69 729
47c84870
JM
730 /* In case this cleanup involves an inline destructor with a try block in
731 it, we need to save the EH return data registers around it. */
732 data_save[0] = gen_reg_rtx (Pmode);
86c99549 733 emit_move_insn (data_save[0], get_exception_pointer (cfun));
16842c15 734 data_save[1] = gen_reg_rtx (word_mode);
86c99549 735 emit_move_insn (data_save[1], get_exception_filter (cfun));
47c84870 736
52a11cbf 737 expand_expr (handler, const0_rtx, VOIDmode, 0);
9c606f69 738
47c84870
JM
739 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
740 emit_move_insn (cfun->eh->filter, data_save[1]);
741
52a11cbf
RH
742 if (protect_cleanup_actions)
743 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
a9f0664a 744
c10f3adf
AH
745 /* We need any stack adjustment complete before the around_label. */
746 do_pending_stack_adjust ();
747
52a11cbf
RH
748 /* We delay the generation of the _Unwind_Resume until we generate
749 landing pads. We emit a marker here so as to get good control
750 flow data in the meantime. */
47c84870
JM
751 region->resume
752 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
52a11cbf
RH
753 emit_barrier ();
754
52a11cbf 755 emit_label (around_label);
9c606f69
AM
756}
757
52a11cbf
RH
758/* End an exception handling region for a try block, and prepares
759 for subsequent calls to expand_start_catch. */
9a0d1e1b 760
52a11cbf
RH
761void
762expand_start_all_catch ()
9a0d1e1b 763{
52a11cbf 764 struct eh_region *region;
9a0d1e1b 765
52a11cbf
RH
766 if (! doing_eh (1))
767 return;
9a0d1e1b 768
52a11cbf
RH
769 region = expand_eh_region_end ();
770 region->type = ERT_TRY;
771 region->u.try.prev_try = cfun->eh->try_region;
772 region->u.try.continue_label = gen_label_rtx ();
9a0d1e1b 773
52a11cbf
RH
774 cfun->eh->try_region = region;
775
776 emit_jump (region->u.try.continue_label);
777}
9a0d1e1b 778
6d41a92f
OH
779/* Begin a catch clause. TYPE is the type caught, a list of such types, or
780 null if this is a catch-all clause. Providing a type list enables to
781 associate the catch region with potentially several exception types, which
23d1aac4 782 is useful e.g. for Ada. */
9a0d1e1b 783
52a11cbf 784void
6d41a92f
OH
785expand_start_catch (type_or_list)
786 tree type_or_list;
9a0d1e1b 787{
52a11cbf 788 struct eh_region *t, *c, *l;
6d41a92f 789 tree type_list;
52a11cbf
RH
790
791 if (! doing_eh (0))
792 return;
793
6d41a92f
OH
794 type_list = type_or_list;
795
796 if (type_or_list)
797 {
798 /* Ensure to always end up with a type list to normalize further
799 processing, then register each type against the runtime types
800 map. */
801 tree type_node;
802
803 if (TREE_CODE (type_or_list) != TREE_LIST)
804 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
805
806 type_node = type_list;
807 for (; type_node; type_node = TREE_CHAIN (type_node))
808 add_type_for_runtime (TREE_VALUE (type_node));
809 }
810
52a11cbf
RH
811 expand_eh_region_start ();
812
813 t = cfun->eh->try_region;
814 c = cfun->eh->cur_region;
815 c->type = ERT_CATCH;
6d41a92f 816 c->u.catch.type_list = type_list;
52a11cbf
RH
817 c->label = gen_label_rtx ();
818
819 l = t->u.try.last_catch;
820 c->u.catch.prev_catch = l;
821 if (l)
822 l->u.catch.next_catch = c;
823 else
824 t->u.try.catch = c;
825 t->u.try.last_catch = c;
9a0d1e1b 826
52a11cbf 827 emit_label (c->label);
9a0d1e1b
AM
828}
829
52a11cbf 830/* End a catch clause. Control will resume after the try/catch block. */
9a0d1e1b 831
52a11cbf
RH
832void
833expand_end_catch ()
9a0d1e1b 834{
52a11cbf
RH
835 struct eh_region *try_region, *catch_region;
836
837 if (! doing_eh (0))
838 return;
839
840 catch_region = expand_eh_region_end ();
841 try_region = cfun->eh->try_region;
842
843 emit_jump (try_region->u.try.continue_label);
9a0d1e1b
AM
844}
845
52a11cbf 846/* End a sequence of catch handlers for a try block. */
9a0d1e1b 847
52a11cbf
RH
848void
849expand_end_all_catch ()
9a0d1e1b 850{
52a11cbf
RH
851 struct eh_region *try_region;
852
853 if (! doing_eh (0))
854 return;
855
856 try_region = cfun->eh->try_region;
857 cfun->eh->try_region = try_region->u.try.prev_try;
858
859 emit_label (try_region->u.try.continue_label);
9a0d1e1b
AM
860}
861
52a11cbf
RH
862/* End an exception region for an exception type filter. ALLOWED is a
863 TREE_LIST of types to be matched by the runtime. FAILURE is an
ff7cc307 864 expression to invoke if a mismatch occurs.
b4e49397
JM
865
866 ??? We could use these semantics for calls to rethrow, too; if we can
867 see the surrounding catch clause, we know that the exception we're
868 rethrowing satisfies the "filter" of the catch type. */
9a0d1e1b 869
52a11cbf
RH
870void
871expand_eh_region_end_allowed (allowed, failure)
872 tree allowed, failure;
9a0d1e1b 873{
52a11cbf
RH
874 struct eh_region *region;
875 rtx around_label;
9a0d1e1b 876
52a11cbf
RH
877 if (! doing_eh (0))
878 return;
e6cfb550 879
52a11cbf
RH
880 region = expand_eh_region_end ();
881 region->type = ERT_ALLOWED_EXCEPTIONS;
882 region->u.allowed.type_list = allowed;
883 region->label = gen_label_rtx ();
9a0d1e1b 884
52a11cbf
RH
885 for (; allowed ; allowed = TREE_CHAIN (allowed))
886 add_type_for_runtime (TREE_VALUE (allowed));
9a0d1e1b 887
52a11cbf
RH
888 /* We must emit the call to FAILURE here, so that if this function
889 throws a different exception, that it will be processed by the
890 correct region. */
9a0d1e1b 891
52a11cbf
RH
892 around_label = gen_label_rtx ();
893 emit_jump (around_label);
894
895 emit_label (region->label);
896 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
b912bca0
MM
897 /* We must adjust the stack before we reach the AROUND_LABEL because
898 the call to FAILURE does not occur on all paths to the
899 AROUND_LABEL. */
900 do_pending_stack_adjust ();
9a0d1e1b 901
52a11cbf 902 emit_label (around_label);
9a0d1e1b
AM
903}
904
52a11cbf
RH
905/* End an exception region for a must-not-throw filter. FAILURE is an
906 expression invoke if an uncaught exception propagates this far.
e6cfb550 907
52a11cbf
RH
908 This is conceptually identical to expand_eh_region_end_allowed with
909 an empty allowed list (if you passed "std::terminate" instead of
910 "__cxa_call_unexpected"), but they are represented differently in
911 the C++ LSDA. */
6814a8a0 912
52a11cbf
RH
913void
914expand_eh_region_end_must_not_throw (failure)
915 tree failure;
e6cfb550 916{
52a11cbf
RH
917 struct eh_region *region;
918 rtx around_label;
e6cfb550 919
52a11cbf
RH
920 if (! doing_eh (0))
921 return;
6814a8a0 922
52a11cbf
RH
923 region = expand_eh_region_end ();
924 region->type = ERT_MUST_NOT_THROW;
925 region->label = gen_label_rtx ();
e6cfb550 926
52a11cbf
RH
927 /* We must emit the call to FAILURE here, so that if this function
928 throws a different exception, that it will be processed by the
929 correct region. */
6814a8a0 930
52a11cbf
RH
931 around_label = gen_label_rtx ();
932 emit_jump (around_label);
6814a8a0 933
52a11cbf
RH
934 emit_label (region->label);
935 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
6814a8a0 936
52a11cbf 937 emit_label (around_label);
e6cfb550
AM
938}
939
52a11cbf
RH
940/* End an exception region for a throw. No handling goes on here,
941 but it's the easiest way for the front-end to indicate what type
942 is being thrown. */
6814a8a0 943
52a11cbf
RH
944void
945expand_eh_region_end_throw (type)
946 tree type;
e6cfb550 947{
52a11cbf
RH
948 struct eh_region *region;
949
950 if (! doing_eh (0))
951 return;
952
953 region = expand_eh_region_end ();
954 region->type = ERT_THROW;
955 region->u.throw.type = type;
e6cfb550
AM
956}
957
52a11cbf
RH
958/* End a fixup region. Within this region the cleanups for the immediately
959 enclosing region are _not_ run. This is used for goto cleanup to avoid
960 destroying an object twice.
12670d88 961
52a11cbf
RH
962 This would be an extraordinarily simple prospect, were it not for the
963 fact that we don't actually know what the immediately enclosing region
964 is. This surprising fact is because expand_cleanups is currently
965 generating a sequence that it will insert somewhere else. We collect
966 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
4956d07c 967
52a11cbf
RH
968void
969expand_eh_region_end_fixup (handler)
970 tree handler;
4956d07c 971{
52a11cbf
RH
972 struct eh_region *fixup;
973
974 if (! doing_eh (0))
975 return;
976
977 fixup = expand_eh_region_end ();
978 fixup->type = ERT_FIXUP;
979 fixup->u.fixup.cleanup_exp = handler;
4956d07c
MS
980}
981
47c84870 982/* Return an rtl expression for a pointer to the exception object
52a11cbf 983 within a handler. */
4956d07c
MS
984
985rtx
86c99549
RH
986get_exception_pointer (fun)
987 struct function *fun;
4956d07c 988{
86c99549
RH
989 rtx exc_ptr = fun->eh->exc_ptr;
990 if (fun == cfun && ! exc_ptr)
52a11cbf
RH
991 {
992 exc_ptr = gen_reg_rtx (Pmode);
86c99549 993 fun->eh->exc_ptr = exc_ptr;
52a11cbf
RH
994 }
995 return exc_ptr;
996}
4956d07c 997
47c84870
JM
998/* Return an rtl expression for the exception dispatch filter
999 within a handler. */
1000
1001static rtx
86c99549
RH
1002get_exception_filter (fun)
1003 struct function *fun;
47c84870 1004{
86c99549
RH
1005 rtx filter = fun->eh->filter;
1006 if (fun == cfun && ! filter)
47c84870 1007 {
041c9d5a 1008 filter = gen_reg_rtx (word_mode);
86c99549 1009 fun->eh->filter = filter;
47c84870
JM
1010 }
1011 return filter;
1012}
52a11cbf
RH
1013\f
1014/* Begin a region that will contain entries created with
1015 add_partial_entry. */
4956d07c 1016
52a11cbf
RH
1017void
1018begin_protect_partials ()
1019{
1020 /* Push room for a new list. */
1021 cfun->eh->protect_list
1022 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
4956d07c
MS
1023}
1024
27a36778
MS
1025/* Start a new exception region for a region of code that has a
1026 cleanup action and push the HANDLER for the region onto
1027 protect_list. All of the regions created with add_partial_entry
009c3281
JM
1028 will be ended when end_protect_partials is invoked.
1029
1030 ??? The only difference between this purpose and that of
1031 expand_decl_cleanup is that in this case, we only want the cleanup to
1032 run if an exception is thrown. This should also be handled using
1033 binding levels. */
12670d88
RK
1034
1035void
1036add_partial_entry (handler)
1037 tree handler;
1038{
1039 expand_eh_region_start ();
1040
76fc91c7 1041 /* Add this entry to the front of the list. */
3f2c5d1a 1042 TREE_VALUE (cfun->eh->protect_list)
52a11cbf 1043 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
12670d88
RK
1044}
1045
52a11cbf 1046/* End all the pending exception regions on protect_list. */
27a36778 1047
52a11cbf
RH
1048void
1049end_protect_partials ()
27a36778 1050{
52a11cbf 1051 tree t;
638e6ebc 1052
52a11cbf
RH
1053 /* Pop the topmost entry. */
1054 t = TREE_VALUE (cfun->eh->protect_list);
1055 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
bb727b5a 1056
52a11cbf
RH
1057 /* End all the exception regions. */
1058 for (; t; t = TREE_CHAIN (t))
1059 expand_eh_region_end_cleanup (TREE_VALUE (t));
154bba13
TT
1060}
1061
52a11cbf
RH
1062\f
1063/* This section is for the exception handling specific optimization pass. */
154bba13 1064
52a11cbf
RH
1065/* Random access the exception region tree. It's just as simple to
1066 collect the regions this way as in expand_eh_region_start, but
1067 without having to realloc memory. */
154bba13 1068
52a11cbf
RH
1069static void
1070collect_eh_region_array ()
154bba13 1071{
52a11cbf 1072 struct eh_region **array, *i;
154bba13 1073
52a11cbf
RH
1074 i = cfun->eh->region_tree;
1075 if (! i)
1076 return;
154bba13 1077
52a11cbf
RH
1078 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1079 cfun->eh->region_array = array;
154bba13 1080
52a11cbf
RH
1081 while (1)
1082 {
1083 array[i->region_number] = i;
1084
1085 /* If there are sub-regions, process them. */
1086 if (i->inner)
1087 i = i->inner;
1088 /* If there are peers, process them. */
1089 else if (i->next_peer)
1090 i = i->next_peer;
1091 /* Otherwise, step back up the tree to the next peer. */
1092 else
1093 {
1094 do {
1095 i = i->outer;
1096 if (i == NULL)
1097 return;
1098 } while (i->next_peer == NULL);
1099 i = i->next_peer;
1100 }
1101 }
27a36778
MS
1102}
1103
52a11cbf
RH
1104static void
1105resolve_fixup_regions ()
27a36778 1106{
52a11cbf 1107 int i, j, n = cfun->eh->last_region_number;
27a36778 1108
52a11cbf
RH
1109 for (i = 1; i <= n; ++i)
1110 {
1111 struct eh_region *fixup = cfun->eh->region_array[i];
ea446801 1112 struct eh_region *cleanup = 0;
27a36778 1113
52a11cbf
RH
1114 if (! fixup || fixup->type != ERT_FIXUP)
1115 continue;
27a36778 1116
52a11cbf
RH
1117 for (j = 1; j <= n; ++j)
1118 {
1119 cleanup = cfun->eh->region_array[j];
1120 if (cleanup->type == ERT_CLEANUP
1121 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1122 break;
1123 }
1124 if (j > n)
1125 abort ();
27a36778 1126
52a11cbf
RH
1127 fixup->u.fixup.real_region = cleanup->outer;
1128 }
27a36778 1129}
27a36778 1130
52a11cbf
RH
1131/* Now that we've discovered what region actually encloses a fixup,
1132 we can shuffle pointers and remove them from the tree. */
27a36778
MS
1133
1134static void
52a11cbf 1135remove_fixup_regions ()
27a36778 1136{
52a11cbf 1137 int i;
45053eaf
RH
1138 rtx insn, note;
1139 struct eh_region *fixup;
27a36778 1140
45053eaf
RH
1141 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1142 for instructions referencing fixup regions. This is only
1143 strictly necessary for fixup regions with no parent, but
1144 doesn't hurt to do it for all regions. */
1145 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1146 if (INSN_P (insn)
1147 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1148 && INTVAL (XEXP (note, 0)) > 0
1149 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1150 && fixup->type == ERT_FIXUP)
1151 {
1152 if (fixup->u.fixup.real_region)
2b1e2382 1153 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
45053eaf
RH
1154 else
1155 remove_note (insn, note);
1156 }
1157
1158 /* Remove the fixup regions from the tree. */
52a11cbf
RH
1159 for (i = cfun->eh->last_region_number; i > 0; --i)
1160 {
45053eaf 1161 fixup = cfun->eh->region_array[i];
52a11cbf
RH
1162 if (! fixup)
1163 continue;
27a36778 1164
52a11cbf
RH
1165 /* Allow GC to maybe free some memory. */
1166 if (fixup->type == ERT_CLEANUP)
1167 fixup->u.cleanup.exp = NULL_TREE;
27a36778 1168
52a11cbf
RH
1169 if (fixup->type != ERT_FIXUP)
1170 continue;
27a36778 1171
52a11cbf
RH
1172 if (fixup->inner)
1173 {
1174 struct eh_region *parent, *p, **pp;
27a36778 1175
52a11cbf 1176 parent = fixup->u.fixup.real_region;
27a36778 1177
52a11cbf
RH
1178 /* Fix up the children's parent pointers; find the end of
1179 the list. */
1180 for (p = fixup->inner; ; p = p->next_peer)
1181 {
1182 p->outer = parent;
1183 if (! p->next_peer)
1184 break;
1185 }
27a36778 1186
52a11cbf
RH
1187 /* In the tree of cleanups, only outer-inner ordering matters.
1188 So link the children back in anywhere at the correct level. */
1189 if (parent)
1190 pp = &parent->inner;
1191 else
1192 pp = &cfun->eh->region_tree;
1193 p->next_peer = *pp;
1194 *pp = fixup->inner;
1195 fixup->inner = NULL;
1196 }
27a36778 1197
52a11cbf
RH
1198 remove_eh_handler (fixup);
1199 }
27a36778
MS
1200}
1201
655dd289
JJ
1202/* Remove all regions whose labels are not reachable from insns. */
1203
1204static void
1205remove_unreachable_regions (insns)
1206 rtx insns;
1207{
1208 int i, *uid_region_num;
1209 bool *reachable;
1210 struct eh_region *r;
1211 rtx insn;
1212
1213 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1214 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1215
1216 for (i = cfun->eh->last_region_number; i > 0; --i)
1217 {
1218 r = cfun->eh->region_array[i];
1219 if (!r || r->region_number != i)
1220 continue;
1221
1222 if (r->resume)
1223 {
1224 if (uid_region_num[INSN_UID (r->resume)])
1225 abort ();
1226 uid_region_num[INSN_UID (r->resume)] = i;
1227 }
1228 if (r->label)
1229 {
1230 if (uid_region_num[INSN_UID (r->label)])
1231 abort ();
1232 uid_region_num[INSN_UID (r->label)] = i;
1233 }
1234 if (r->type == ERT_TRY && r->u.try.continue_label)
1235 {
1236 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1237 abort ();
1238 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1239 }
1240 }
1241
1242 for (insn = insns; insn; insn = NEXT_INSN (insn))
1243 reachable[uid_region_num[INSN_UID (insn)]] = true;
1244
1245 for (i = cfun->eh->last_region_number; i > 0; --i)
1246 {
1247 r = cfun->eh->region_array[i];
1248 if (r && r->region_number == i && !reachable[i])
1249 {
1250 /* Don't remove ERT_THROW regions if their outer region
1251 is reachable. */
1252 if (r->type == ERT_THROW
1253 && r->outer
1254 && reachable[r->outer->region_number])
1255 continue;
1256
1257 remove_eh_handler (r);
1258 }
1259 }
1260
1261 free (reachable);
1262 free (uid_region_num);
1263}
1264
52a11cbf
RH
1265/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1266 can_throw instruction in the region. */
27a36778
MS
1267
1268static void
52a11cbf
RH
1269convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1270 rtx *pinsns;
1271 int *orig_sp;
1272 int cur;
27a36778 1273{
52a11cbf
RH
1274 int *sp = orig_sp;
1275 rtx insn, next;
27a36778 1276
52a11cbf
RH
1277 for (insn = *pinsns; insn ; insn = next)
1278 {
1279 next = NEXT_INSN (insn);
1280 if (GET_CODE (insn) == NOTE)
1281 {
1282 int kind = NOTE_LINE_NUMBER (insn);
1283 if (kind == NOTE_INSN_EH_REGION_BEG
1284 || kind == NOTE_INSN_EH_REGION_END)
1285 {
1286 if (kind == NOTE_INSN_EH_REGION_BEG)
1287 {
1288 struct eh_region *r;
27a36778 1289
52a11cbf
RH
1290 *sp++ = cur;
1291 cur = NOTE_EH_HANDLER (insn);
27a36778 1292
52a11cbf
RH
1293 r = cfun->eh->region_array[cur];
1294 if (r->type == ERT_FIXUP)
1295 {
1296 r = r->u.fixup.real_region;
1297 cur = r ? r->region_number : 0;
1298 }
1299 else if (r->type == ERT_CATCH)
1300 {
1301 r = r->outer;
1302 cur = r ? r->region_number : 0;
1303 }
1304 }
1305 else
1306 cur = *--sp;
1307
1308 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1309 requires extra care to adjust sequence start. */
1310 if (insn == *pinsns)
1311 *pinsns = next;
1312 remove_insn (insn);
1313 continue;
1314 }
1315 }
1316 else if (INSN_P (insn))
1317 {
1318 if (cur > 0
1319 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1320 /* Calls can always potentially throw exceptions, unless
1321 they have a REG_EH_REGION note with a value of 0 or less.
1322 Which should be the only possible kind so far. */
1323 && (GET_CODE (insn) == CALL_INSN
1324 /* If we wanted exceptions for non-call insns, then
1325 any may_trap_p instruction could throw. */
1326 || (flag_non_call_exceptions
d7730f7a
RH
1327 && GET_CODE (PATTERN (insn)) != CLOBBER
1328 && GET_CODE (PATTERN (insn)) != USE
52a11cbf
RH
1329 && may_trap_p (PATTERN (insn)))))
1330 {
1331 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1332 REG_NOTES (insn));
1333 }
27a36778 1334
52a11cbf
RH
1335 if (GET_CODE (insn) == CALL_INSN
1336 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1337 {
1338 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1339 sp, cur);
1340 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1341 sp, cur);
1342 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1343 sp, cur);
1344 }
1345 }
1346 }
27a36778 1347
52a11cbf
RH
1348 if (sp != orig_sp)
1349 abort ();
1350}
27a36778 1351
52a11cbf
RH
1352void
1353convert_from_eh_region_ranges ()
1354{
1355 int *stack;
1356 rtx insns;
27a36778 1357
52a11cbf
RH
1358 collect_eh_region_array ();
1359 resolve_fixup_regions ();
27a36778 1360
52a11cbf
RH
1361 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1362 insns = get_insns ();
1363 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1364 free (stack);
27a36778 1365
52a11cbf 1366 remove_fixup_regions ();
655dd289 1367 remove_unreachable_regions (insns);
27a36778
MS
1368}
1369
52a11cbf
RH
1370void
1371find_exception_handler_labels ()
27a36778 1372{
52a11cbf
RH
1373 rtx list = NULL_RTX;
1374 int i;
27a36778 1375
52a11cbf 1376 free_EXPR_LIST_list (&exception_handler_labels);
27a36778 1377
52a11cbf
RH
1378 if (cfun->eh->region_tree == NULL)
1379 return;
27a36778 1380
52a11cbf
RH
1381 for (i = cfun->eh->last_region_number; i > 0; --i)
1382 {
1383 struct eh_region *region = cfun->eh->region_array[i];
1384 rtx lab;
27a36778 1385
655dd289 1386 if (! region || region->region_number != i)
52a11cbf
RH
1387 continue;
1388 if (cfun->eh->built_landing_pads)
1389 lab = region->landing_pad;
1390 else
1391 lab = region->label;
27a36778 1392
52a11cbf
RH
1393 if (lab)
1394 list = alloc_EXPR_LIST (0, lab, list);
27a36778
MS
1395 }
1396
52a11cbf
RH
1397 /* For sjlj exceptions, need the return label to remain live until
1398 after landing pad generation. */
1399 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1400 list = alloc_EXPR_LIST (0, return_label, list);
27a36778 1401
52a11cbf 1402 exception_handler_labels = list;
27a36778
MS
1403}
1404
93f82d60
RH
1405bool
1406current_function_has_exception_handlers ()
1407{
1408 int i;
1409
1410 for (i = cfun->eh->last_region_number; i > 0; --i)
1411 {
1412 struct eh_region *region = cfun->eh->region_array[i];
1413
1414 if (! region || region->region_number != i)
1415 continue;
1416 if (region->type != ERT_THROW)
1417 return true;
1418 }
1419
1420 return false;
1421}
52a11cbf
RH
1422\f
1423static struct eh_region *
1424duplicate_eh_region_1 (o, map)
1425 struct eh_region *o;
1426 struct inline_remap *map;
4956d07c 1427{
52a11cbf
RH
1428 struct eh_region *n
1429 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
4956d07c 1430
52a11cbf
RH
1431 n->region_number = o->region_number + cfun->eh->last_region_number;
1432 n->type = o->type;
4956d07c 1433
52a11cbf
RH
1434 switch (n->type)
1435 {
1436 case ERT_CLEANUP:
1437 case ERT_MUST_NOT_THROW:
1438 break;
27a36778 1439
52a11cbf
RH
1440 case ERT_TRY:
1441 if (o->u.try.continue_label)
1442 n->u.try.continue_label
1443 = get_label_from_map (map,
1444 CODE_LABEL_NUMBER (o->u.try.continue_label));
1445 break;
27a36778 1446
52a11cbf 1447 case ERT_CATCH:
6d41a92f 1448 n->u.catch.type_list = o->u.catch.type_list;
52a11cbf 1449 break;
27a36778 1450
52a11cbf
RH
1451 case ERT_ALLOWED_EXCEPTIONS:
1452 n->u.allowed.type_list = o->u.allowed.type_list;
1453 break;
1454
1455 case ERT_THROW:
1456 n->u.throw.type = o->u.throw.type;
3f2c5d1a 1457
52a11cbf
RH
1458 default:
1459 abort ();
1460 }
1461
1462 if (o->label)
1463 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
47c84870 1464 if (o->resume)
e7b9b18e 1465 {
47c84870
JM
1466 n->resume = map->insn_map[INSN_UID (o->resume)];
1467 if (n->resume == NULL)
52a11cbf 1468 abort ();
27a36778 1469 }
4956d07c 1470
52a11cbf 1471 return n;
4956d07c
MS
1472}
1473
52a11cbf
RH
1474static void
1475duplicate_eh_region_2 (o, n_array)
1476 struct eh_region *o;
1477 struct eh_region **n_array;
4c581243 1478{
52a11cbf 1479 struct eh_region *n = n_array[o->region_number];
4c581243 1480
52a11cbf
RH
1481 switch (n->type)
1482 {
1483 case ERT_TRY:
1484 n->u.try.catch = n_array[o->u.try.catch->region_number];
1485 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1486 break;
12670d88 1487
52a11cbf
RH
1488 case ERT_CATCH:
1489 if (o->u.catch.next_catch)
1490 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1491 if (o->u.catch.prev_catch)
1492 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1493 break;
12670d88 1494
52a11cbf
RH
1495 default:
1496 break;
1497 }
4956d07c 1498
52a11cbf
RH
1499 if (o->outer)
1500 n->outer = n_array[o->outer->region_number];
1501 if (o->inner)
1502 n->inner = n_array[o->inner->region_number];
1503 if (o->next_peer)
1504 n->next_peer = n_array[o->next_peer->region_number];
3f2c5d1a 1505}
52a11cbf
RH
1506
1507int
1508duplicate_eh_regions (ifun, map)
1509 struct function *ifun;
1510 struct inline_remap *map;
4956d07c 1511{
52a11cbf
RH
1512 int ifun_last_region_number = ifun->eh->last_region_number;
1513 struct eh_region **n_array, *root, *cur;
1514 int i;
4956d07c 1515
52a11cbf
RH
1516 if (ifun_last_region_number == 0)
1517 return 0;
4956d07c 1518
52a11cbf 1519 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
4956d07c 1520
52a11cbf 1521 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1522 {
52a11cbf
RH
1523 cur = ifun->eh->region_array[i];
1524 if (!cur || cur->region_number != i)
1525 continue;
1526 n_array[i] = duplicate_eh_region_1 (cur, map);
27a36778 1527 }
52a11cbf 1528 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1529 {
52a11cbf
RH
1530 cur = ifun->eh->region_array[i];
1531 if (!cur || cur->region_number != i)
1532 continue;
1533 duplicate_eh_region_2 (cur, n_array);
1534 }
27a36778 1535
52a11cbf
RH
1536 root = n_array[ifun->eh->region_tree->region_number];
1537 cur = cfun->eh->cur_region;
1538 if (cur)
1539 {
1540 struct eh_region *p = cur->inner;
1541 if (p)
1542 {
1543 while (p->next_peer)
1544 p = p->next_peer;
1545 p->next_peer = root;
1546 }
1547 else
1548 cur->inner = root;
27a36778 1549
52a11cbf 1550 for (i = 1; i <= ifun_last_region_number; ++i)
b24a9e88 1551 if (n_array[i] && n_array[i]->outer == NULL)
52a11cbf
RH
1552 n_array[i]->outer = cur;
1553 }
1554 else
1555 {
1556 struct eh_region *p = cfun->eh->region_tree;
1557 if (p)
1558 {
1559 while (p->next_peer)
1560 p = p->next_peer;
1561 p->next_peer = root;
1562 }
1563 else
1564 cfun->eh->region_tree = root;
27a36778 1565 }
1e4ceb6f 1566
52a11cbf 1567 free (n_array);
1e4ceb6f 1568
52a11cbf
RH
1569 i = cfun->eh->last_region_number;
1570 cfun->eh->last_region_number = i + ifun_last_region_number;
1571 return i;
4956d07c
MS
1572}
1573
52a11cbf 1574\f
52a11cbf
RH
1575static int
1576t2r_eq (pentry, pdata)
1577 const PTR pentry;
1578 const PTR pdata;
9762d48d 1579{
52a11cbf
RH
1580 tree entry = (tree) pentry;
1581 tree data = (tree) pdata;
9762d48d 1582
52a11cbf 1583 return TREE_PURPOSE (entry) == data;
9762d48d
JM
1584}
1585
52a11cbf
RH
1586static hashval_t
1587t2r_hash (pentry)
1588 const PTR pentry;
1589{
1590 tree entry = (tree) pentry;
1591 return TYPE_HASH (TREE_PURPOSE (entry));
1592}
9762d48d 1593
52a11cbf
RH
1594static int
1595t2r_mark_1 (slot, data)
1596 PTR *slot;
1597 PTR data ATTRIBUTE_UNUSED;
9762d48d 1598{
52a11cbf
RH
1599 tree contents = (tree) *slot;
1600 ggc_mark_tree (contents);
1601 return 1;
1602}
9762d48d 1603
52a11cbf
RH
1604static void
1605t2r_mark (addr)
1606 PTR addr;
1607{
1608 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1609}
9762d48d 1610
52a11cbf
RH
1611static void
1612add_type_for_runtime (type)
1613 tree type;
1614{
1615 tree *slot;
9762d48d 1616
52a11cbf
RH
1617 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1618 TYPE_HASH (type), INSERT);
1619 if (*slot == NULL)
1620 {
1621 tree runtime = (*lang_eh_runtime_type) (type);
1622 *slot = tree_cons (type, runtime, NULL_TREE);
1623 }
1624}
3f2c5d1a 1625
52a11cbf
RH
1626static tree
1627lookup_type_for_runtime (type)
1628 tree type;
1629{
1630 tree *slot;
b37f006b 1631
52a11cbf
RH
1632 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1633 TYPE_HASH (type), NO_INSERT);
b37f006b 1634
a1f300c0 1635 /* We should have always inserted the data earlier. */
52a11cbf
RH
1636 return TREE_VALUE (*slot);
1637}
9762d48d 1638
52a11cbf
RH
1639\f
1640/* Represent an entry in @TTypes for either catch actions
1641 or exception filter actions. */
1642struct ttypes_filter
1643{
1644 tree t;
1645 int filter;
1646};
b37f006b 1647
52a11cbf
RH
1648/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1649 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 1650
52a11cbf
RH
1651static int
1652ttypes_filter_eq (pentry, pdata)
1653 const PTR pentry;
1654 const PTR pdata;
1655{
1656 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1657 tree data = (tree) pdata;
b37f006b 1658
52a11cbf 1659 return entry->t == data;
9762d48d
JM
1660}
1661
52a11cbf
RH
1662static hashval_t
1663ttypes_filter_hash (pentry)
1664 const PTR pentry;
1665{
1666 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1667 return TYPE_HASH (entry->t);
1668}
4956d07c 1669
52a11cbf
RH
1670/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1671 exception specification list we are thinking about adding. */
1672/* ??? Currently we use the type lists in the order given. Someone
1673 should put these in some canonical order. */
1674
1675static int
1676ehspec_filter_eq (pentry, pdata)
1677 const PTR pentry;
1678 const PTR pdata;
4956d07c 1679{
52a11cbf
RH
1680 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1681 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1682
1683 return type_list_equal (entry->t, data->t);
4956d07c
MS
1684}
1685
52a11cbf 1686/* Hash function for exception specification lists. */
4956d07c 1687
52a11cbf
RH
1688static hashval_t
1689ehspec_filter_hash (pentry)
1690 const PTR pentry;
4956d07c 1691{
52a11cbf
RH
1692 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1693 hashval_t h = 0;
1694 tree list;
1695
1696 for (list = entry->t; list ; list = TREE_CHAIN (list))
1697 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1698 return h;
4956d07c
MS
1699}
1700
52a11cbf
RH
1701/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1702 up the search. Return the filter value to be used. */
4956d07c 1703
52a11cbf
RH
1704static int
1705add_ttypes_entry (ttypes_hash, type)
1706 htab_t ttypes_hash;
1707 tree type;
4956d07c 1708{
52a11cbf 1709 struct ttypes_filter **slot, *n;
4956d07c 1710
52a11cbf
RH
1711 slot = (struct ttypes_filter **)
1712 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1713
1714 if ((n = *slot) == NULL)
4956d07c 1715 {
52a11cbf 1716 /* Filter value is a 1 based table index. */
12670d88 1717
52a11cbf
RH
1718 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1719 n->t = type;
1720 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1721 *slot = n;
1722
1723 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 1724 }
52a11cbf
RH
1725
1726 return n->filter;
4956d07c
MS
1727}
1728
52a11cbf
RH
1729/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1730 to speed up the search. Return the filter value to be used. */
1731
1732static int
1733add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1734 htab_t ehspec_hash;
1735 htab_t ttypes_hash;
1736 tree list;
12670d88 1737{
52a11cbf
RH
1738 struct ttypes_filter **slot, *n;
1739 struct ttypes_filter dummy;
12670d88 1740
52a11cbf
RH
1741 dummy.t = list;
1742 slot = (struct ttypes_filter **)
1743 htab_find_slot (ehspec_hash, &dummy, INSERT);
1744
1745 if ((n = *slot) == NULL)
1746 {
1747 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1748
1749 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1750 n->t = list;
1751 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1752 *slot = n;
1753
1754 /* Look up each type in the list and encode its filter
1755 value as a uleb128. Terminate the list with 0. */
1756 for (; list ; list = TREE_CHAIN (list))
3f2c5d1a 1757 push_uleb128 (&cfun->eh->ehspec_data,
52a11cbf
RH
1758 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1759 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1760 }
1761
1762 return n->filter;
12670d88
RK
1763}
1764
52a11cbf
RH
1765/* Generate the action filter values to be used for CATCH and
1766 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1767 we use lots of landing pads, and so every type or list can share
1768 the same filter value, which saves table space. */
1769
1770static void
1771assign_filter_values ()
9a0d1e1b 1772{
52a11cbf
RH
1773 int i;
1774 htab_t ttypes, ehspec;
9a9deafc 1775
52a11cbf
RH
1776 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1777 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1778
52a11cbf
RH
1779 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1780 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1781
52a11cbf
RH
1782 for (i = cfun->eh->last_region_number; i > 0; --i)
1783 {
1784 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1785
52a11cbf
RH
1786 /* Mind we don't process a region more than once. */
1787 if (!r || r->region_number != i)
1788 continue;
9a0d1e1b 1789
52a11cbf
RH
1790 switch (r->type)
1791 {
1792 case ERT_CATCH:
6d41a92f
OH
1793 /* Whatever type_list is (NULL or true list), we build a list
1794 of filters for the region. */
1795 r->u.catch.filter_list = NULL_TREE;
1796
1797 if (r->u.catch.type_list != NULL)
1798 {
1799 /* Get a filter value for each of the types caught and store
1800 them in the region's dedicated list. */
1801 tree tp_node = r->u.catch.type_list;
1802
1803 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1804 {
1805 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1806 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1807
1808 r->u.catch.filter_list
6d41a92f
OH
1809 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1810 }
1811 }
1812 else
1813 {
1814 /* Get a filter value for the NULL list also since it will need
1815 an action record anyway. */
1816 int flt = add_ttypes_entry (ttypes, NULL);
1817 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1818
1819 r->u.catch.filter_list
6d41a92f
OH
1820 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1821 }
3f2c5d1a 1822
52a11cbf 1823 break;
bf71cd2e 1824
52a11cbf
RH
1825 case ERT_ALLOWED_EXCEPTIONS:
1826 r->u.allowed.filter
1827 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1828 break;
bf71cd2e 1829
52a11cbf
RH
1830 default:
1831 break;
1832 }
1833 }
1834
1835 htab_delete (ttypes);
1836 htab_delete (ehspec);
1837}
1838
1839static void
1840build_post_landing_pads ()
1841{
1842 int i;
bf71cd2e 1843
52a11cbf 1844 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1845 {
52a11cbf
RH
1846 struct eh_region *region = cfun->eh->region_array[i];
1847 rtx seq;
bf71cd2e 1848
52a11cbf
RH
1849 /* Mind we don't process a region more than once. */
1850 if (!region || region->region_number != i)
1851 continue;
1852
1853 switch (region->type)
987009bf 1854 {
52a11cbf
RH
1855 case ERT_TRY:
1856 /* ??? Collect the set of all non-overlapping catch handlers
1857 all the way up the chain until blocked by a cleanup. */
1858 /* ??? Outer try regions can share landing pads with inner
1859 try regions if the types are completely non-overlapping,
a1f300c0 1860 and there are no intervening cleanups. */
bf71cd2e 1861
52a11cbf 1862 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1863
52a11cbf 1864 start_sequence ();
bf71cd2e 1865
52a11cbf 1866 emit_label (region->post_landing_pad);
bf71cd2e 1867
52a11cbf
RH
1868 /* ??? It is mighty inconvenient to call back into the
1869 switch statement generation code in expand_end_case.
1870 Rapid prototyping sez a sequence of ifs. */
1871 {
1872 struct eh_region *c;
1873 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1874 {
1875 /* ??? _Unwind_ForcedUnwind wants no match here. */
6d41a92f 1876 if (c->u.catch.type_list == NULL)
52a11cbf
RH
1877 emit_jump (c->label);
1878 else
6d41a92f
OH
1879 {
1880 /* Need for one cmp/jump per type caught. Each type
1881 list entry has a matching entry in the filter list
1882 (see assign_filter_values). */
1883 tree tp_node = c->u.catch.type_list;
1884 tree flt_node = c->u.catch.filter_list;
1885
1886 for (; tp_node; )
1887 {
1888 emit_cmp_and_jump_insns
1889 (cfun->eh->filter,
1890 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1891 EQ, NULL_RTX, word_mode, 0, c->label);
1892
1893 tp_node = TREE_CHAIN (tp_node);
1894 flt_node = TREE_CHAIN (flt_node);
1895 }
1896 }
52a11cbf
RH
1897 }
1898 }
bf71cd2e 1899
47c84870
JM
1900 /* We delay the generation of the _Unwind_Resume until we generate
1901 landing pads. We emit a marker here so as to get good control
1902 flow data in the meantime. */
1903 region->resume
1904 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1905 emit_barrier ();
1906
52a11cbf
RH
1907 seq = get_insns ();
1908 end_sequence ();
e6cfb550 1909
47c84870 1910 emit_insns_before (seq, region->u.try.catch->label);
52a11cbf 1911 break;
bf71cd2e 1912
52a11cbf
RH
1913 case ERT_ALLOWED_EXCEPTIONS:
1914 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1915
52a11cbf 1916 start_sequence ();
f54a7f6f 1917
52a11cbf 1918 emit_label (region->post_landing_pad);
f54a7f6f 1919
52a11cbf
RH
1920 emit_cmp_and_jump_insns (cfun->eh->filter,
1921 GEN_INT (region->u.allowed.filter),
a06ef755 1922 EQ, NULL_RTX, word_mode, 0, region->label);
f54a7f6f 1923
47c84870
JM
1924 /* We delay the generation of the _Unwind_Resume until we generate
1925 landing pads. We emit a marker here so as to get good control
1926 flow data in the meantime. */
1927 region->resume
1928 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1929 emit_barrier ();
1930
52a11cbf
RH
1931 seq = get_insns ();
1932 end_sequence ();
1933
47c84870 1934 emit_insns_before (seq, region->label);
52a11cbf 1935 break;
f54a7f6f 1936
52a11cbf
RH
1937 case ERT_CLEANUP:
1938 case ERT_MUST_NOT_THROW:
1939 region->post_landing_pad = region->label;
1940 break;
1941
1942 case ERT_CATCH:
1943 case ERT_THROW:
1944 /* Nothing to do. */
1945 break;
1946
1947 default:
1948 abort ();
1949 }
1950 }
1951}
1e4ceb6f 1952
47c84870
JM
1953/* Replace RESX patterns with jumps to the next handler if any, or calls to
1954 _Unwind_Resume otherwise. */
1955
1e4ceb6f 1956static void
52a11cbf 1957connect_post_landing_pads ()
1e4ceb6f 1958{
52a11cbf 1959 int i;
76fc91c7 1960
52a11cbf
RH
1961 for (i = cfun->eh->last_region_number; i > 0; --i)
1962 {
1963 struct eh_region *region = cfun->eh->region_array[i];
1964 struct eh_region *outer;
47c84870 1965 rtx seq;
1e4ceb6f 1966
52a11cbf
RH
1967 /* Mind we don't process a region more than once. */
1968 if (!region || region->region_number != i)
1969 continue;
1e4ceb6f 1970
47c84870
JM
1971 /* If there is no RESX, or it has been deleted by flow, there's
1972 nothing to fix up. */
1973 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 1974 continue;
76fc91c7 1975
52a11cbf
RH
1976 /* Search for another landing pad in this function. */
1977 for (outer = region->outer; outer ; outer = outer->outer)
1978 if (outer->post_landing_pad)
1979 break;
1e4ceb6f 1980
52a11cbf 1981 start_sequence ();
12670d88 1982
52a11cbf
RH
1983 if (outer)
1984 emit_jump (outer->post_landing_pad);
1985 else
9555a122 1986 emit_library_call (unwind_resume_libfunc, LCT_THROW,
52a11cbf 1987 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
4956d07c 1988
52a11cbf
RH
1989 seq = get_insns ();
1990 end_sequence ();
47c84870 1991 emit_insns_before (seq, region->resume);
53c17031 1992 delete_insn (region->resume);
52a11cbf
RH
1993 }
1994}
1995
1996\f
1997static void
1998dw2_build_landing_pads ()
4956d07c 1999{
ae0ed63a
JM
2000 int i;
2001 unsigned int j;
4956d07c 2002
52a11cbf
RH
2003 for (i = cfun->eh->last_region_number; i > 0; --i)
2004 {
2005 struct eh_region *region = cfun->eh->region_array[i];
2006 rtx seq;
5c701bb1 2007 bool clobbers_hard_regs = false;
4956d07c 2008
52a11cbf
RH
2009 /* Mind we don't process a region more than once. */
2010 if (!region || region->region_number != i)
2011 continue;
1418bb67 2012
52a11cbf
RH
2013 if (region->type != ERT_CLEANUP
2014 && region->type != ERT_TRY
2015 && region->type != ERT_ALLOWED_EXCEPTIONS)
2016 continue;
12670d88 2017
52a11cbf 2018 start_sequence ();
4956d07c 2019
52a11cbf
RH
2020 region->landing_pad = gen_label_rtx ();
2021 emit_label (region->landing_pad);
4956d07c 2022
52a11cbf
RH
2023#ifdef HAVE_exception_receiver
2024 if (HAVE_exception_receiver)
2025 emit_insn (gen_exception_receiver ());
2026 else
2027#endif
2028#ifdef HAVE_nonlocal_goto_receiver
2029 if (HAVE_nonlocal_goto_receiver)
2030 emit_insn (gen_nonlocal_goto_receiver ());
2031 else
2032#endif
2033 { /* Nothing */ }
4956d07c 2034
52a11cbf
RH
2035 /* If the eh_return data registers are call-saved, then we
2036 won't have considered them clobbered from the call that
2037 threw. Kill them now. */
2038 for (j = 0; ; ++j)
2039 {
2040 unsigned r = EH_RETURN_DATA_REGNO (j);
2041 if (r == INVALID_REGNUM)
2042 break;
2043 if (! call_used_regs[r])
5c701bb1
JS
2044 {
2045 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2046 clobbers_hard_regs = true;
2047 }
2048 }
2049
2050 if (clobbers_hard_regs)
2051 {
2052 /* @@@ This is a kludge. Not all machine descriptions define a
2053 blockage insn, but we must not allow the code we just generated
2054 to be reordered by scheduling. So emit an ASM_INPUT to act as
2ba84f36 2055 blockage insn. */
5c701bb1 2056 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
52a11cbf 2057 }
e701eb4d 2058
52a11cbf
RH
2059 emit_move_insn (cfun->eh->exc_ptr,
2060 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
2061 emit_move_insn (cfun->eh->filter,
9e800206 2062 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 2063
52a11cbf
RH
2064 seq = get_insns ();
2065 end_sequence ();
5816cb14 2066
52a11cbf
RH
2067 emit_insns_before (seq, region->post_landing_pad);
2068 }
4956d07c
MS
2069}
2070
52a11cbf
RH
2071\f
2072struct sjlj_lp_info
2073{
2074 int directly_reachable;
2075 int action_index;
2076 int dispatch_index;
2077 int call_site_index;
2078};
4956d07c 2079
52a11cbf
RH
2080static bool
2081sjlj_find_directly_reachable_regions (lp_info)
2082 struct sjlj_lp_info *lp_info;
4956d07c 2083{
52a11cbf
RH
2084 rtx insn;
2085 bool found_one = false;
4956d07c 2086
52a11cbf
RH
2087 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2088 {
2089 struct eh_region *region;
98ce21b3 2090 enum reachable_code rc;
52a11cbf
RH
2091 tree type_thrown;
2092 rtx note;
4956d07c 2093
52a11cbf
RH
2094 if (! INSN_P (insn))
2095 continue;
0d3453df 2096
52a11cbf
RH
2097 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2098 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2099 continue;
5dfa7520 2100
52a11cbf 2101 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 2102
52a11cbf
RH
2103 type_thrown = NULL_TREE;
2104 if (region->type == ERT_THROW)
2105 {
2106 type_thrown = region->u.throw.type;
2107 region = region->outer;
2108 }
12670d88 2109
52a11cbf
RH
2110 /* Find the first containing region that might handle the exception.
2111 That's the landing pad to which we will transfer control. */
98ce21b3 2112 rc = RNL_NOT_CAUGHT;
52a11cbf 2113 for (; region; region = region->outer)
98ce21b3
RH
2114 {
2115 rc = reachable_next_level (region, type_thrown, 0);
2116 if (rc != RNL_NOT_CAUGHT)
2117 break;
2118 }
2119 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
52a11cbf
RH
2120 {
2121 lp_info[region->region_number].directly_reachable = 1;
2122 found_one = true;
2123 }
2124 }
4956d07c 2125
52a11cbf
RH
2126 return found_one;
2127}
e701eb4d
JM
2128
2129static void
52a11cbf
RH
2130sjlj_assign_call_site_values (dispatch_label, lp_info)
2131 rtx dispatch_label;
2132 struct sjlj_lp_info *lp_info;
e701eb4d 2133{
52a11cbf
RH
2134 htab_t ar_hash;
2135 int i, index;
2136
2137 /* First task: build the action table. */
2138
2139 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2140 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2141
2142 for (i = cfun->eh->last_region_number; i > 0; --i)
2143 if (lp_info[i].directly_reachable)
e6cfb550 2144 {
52a11cbf
RH
2145 struct eh_region *r = cfun->eh->region_array[i];
2146 r->landing_pad = dispatch_label;
2147 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2148 if (lp_info[i].action_index != -1)
2149 cfun->uses_eh_lsda = 1;
e6cfb550 2150 }
e701eb4d 2151
52a11cbf 2152 htab_delete (ar_hash);
76fc91c7 2153
52a11cbf
RH
2154 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2155 landing pad label for the region. For sjlj though, there is one
2156 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 2157
52a11cbf
RH
2158 A region receives a dispatch index if it is directly reachable
2159 and requires in-function processing. Regions that share post-landing
eaec9b3d 2160 pads may share dispatch indices. */
52a11cbf
RH
2161 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2162 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 2163
52a11cbf
RH
2164 index = 0;
2165 for (i = cfun->eh->last_region_number; i > 0; --i)
98ce21b3 2166 if (lp_info[i].directly_reachable)
52a11cbf 2167 lp_info[i].dispatch_index = index++;
76fc91c7 2168
52a11cbf
RH
2169 /* Finally: assign call-site values. If dwarf2 terms, this would be
2170 the region number assigned by convert_to_eh_region_ranges, but
2171 handles no-action and must-not-throw differently. */
76fc91c7 2172
52a11cbf
RH
2173 call_site_base = 1;
2174 for (i = cfun->eh->last_region_number; i > 0; --i)
2175 if (lp_info[i].directly_reachable)
2176 {
2177 int action = lp_info[i].action_index;
2178
2179 /* Map must-not-throw to otherwise unused call-site index 0. */
2180 if (action == -2)
2181 index = 0;
2182 /* Map no-action to otherwise unused call-site index -1. */
2183 else if (action == -1)
2184 index = -1;
2185 /* Otherwise, look it up in the table. */
2186 else
2187 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2188
2189 lp_info[i].call_site_index = index;
2190 }
4956d07c 2191}
27a36778 2192
52a11cbf
RH
2193static void
2194sjlj_mark_call_sites (lp_info)
2195 struct sjlj_lp_info *lp_info;
27a36778 2196{
52a11cbf
RH
2197 int last_call_site = -2;
2198 rtx insn, mem;
2199
52a11cbf 2200 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 2201 {
52a11cbf
RH
2202 struct eh_region *region;
2203 int this_call_site;
2204 rtx note, before, p;
27a36778 2205
52a11cbf
RH
2206 /* Reset value tracking at extended basic block boundaries. */
2207 if (GET_CODE (insn) == CODE_LABEL)
2208 last_call_site = -2;
27a36778 2209
52a11cbf
RH
2210 if (! INSN_P (insn))
2211 continue;
27a36778 2212
52a11cbf
RH
2213 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2214 if (!note)
2215 {
2216 /* Calls (and trapping insns) without notes are outside any
2217 exception handling region in this function. Mark them as
2218 no action. */
2219 if (GET_CODE (insn) == CALL_INSN
2220 || (flag_non_call_exceptions
2221 && may_trap_p (PATTERN (insn))))
2222 this_call_site = -1;
2223 else
2224 continue;
2225 }
2226 else
2227 {
2228 /* Calls that are known to not throw need not be marked. */
2229 if (INTVAL (XEXP (note, 0)) <= 0)
2230 continue;
27a36778 2231
52a11cbf
RH
2232 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2233 this_call_site = lp_info[region->region_number].call_site_index;
2234 }
27a36778 2235
52a11cbf
RH
2236 if (this_call_site == last_call_site)
2237 continue;
2238
2239 /* Don't separate a call from it's argument loads. */
2240 before = insn;
2241 if (GET_CODE (insn) == CALL_INSN)
833366d6 2242 before = find_first_parameter_load (insn, NULL_RTX);
4956d07c 2243
52a11cbf 2244 start_sequence ();
fd2c57a9
AH
2245 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2246 sjlj_fc_call_site_ofs);
52a11cbf
RH
2247 emit_move_insn (mem, GEN_INT (this_call_site));
2248 p = get_insns ();
2249 end_sequence ();
12670d88 2250
52a11cbf
RH
2251 emit_insns_before (p, before);
2252 last_call_site = this_call_site;
2253 }
2254}
4956d07c 2255
52a11cbf
RH
2256/* Construct the SjLj_Function_Context. */
2257
2258static void
2259sjlj_emit_function_enter (dispatch_label)
2260 rtx dispatch_label;
4956d07c 2261{
52a11cbf 2262 rtx fn_begin, fc, mem, seq;
4956d07c 2263
52a11cbf 2264 fc = cfun->eh->sjlj_fc;
4956d07c 2265
52a11cbf 2266 start_sequence ();
8a4451aa 2267
8979edec
JL
2268 /* We're storing this libcall's address into memory instead of
2269 calling it directly. Thus, we must call assemble_external_libcall
2270 here, as we can not depend on emit_library_call to do it for us. */
2271 assemble_external_libcall (eh_personality_libfunc);
f4ef873c 2272 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
52a11cbf
RH
2273 emit_move_insn (mem, eh_personality_libfunc);
2274
f4ef873c 2275 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
52a11cbf
RH
2276 if (cfun->uses_eh_lsda)
2277 {
2278 char buf[20];
2279 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2280 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
8a4451aa 2281 }
52a11cbf
RH
2282 else
2283 emit_move_insn (mem, const0_rtx);
3f2c5d1a 2284
52a11cbf
RH
2285#ifdef DONT_USE_BUILTIN_SETJMP
2286 {
2287 rtx x, note;
9defc9b7 2288 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
52a11cbf
RH
2289 TYPE_MODE (integer_type_node), 1,
2290 plus_constant (XEXP (fc, 0),
2291 sjlj_fc_jbuf_ofs), Pmode);
2292
2293 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2294 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2295
2296 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
a06ef755 2297 TYPE_MODE (integer_type_node), 0, dispatch_label);
52a11cbf
RH
2298 }
2299#else
2300 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2301 dispatch_label);
4956d07c 2302#endif
4956d07c 2303
52a11cbf
RH
2304 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2305 1, XEXP (fc, 0), Pmode);
12670d88 2306
52a11cbf
RH
2307 seq = get_insns ();
2308 end_sequence ();
4956d07c 2309
52a11cbf
RH
2310 /* ??? Instead of doing this at the beginning of the function,
2311 do this in a block that is at loop level 0 and dominates all
2312 can_throw_internal instructions. */
4956d07c 2313
52a11cbf
RH
2314 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2315 if (GET_CODE (fn_begin) == NOTE
2316 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2317 break;
2318 emit_insns_after (seq, fn_begin);
4956d07c
MS
2319}
2320
52a11cbf
RH
2321/* Call back from expand_function_end to know where we should put
2322 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 2323
52a11cbf
RH
2324void
2325sjlj_emit_function_exit_after (after)
2326 rtx after;
2327{
2328 cfun->eh->sjlj_exit_after = after;
2329}
4956d07c
MS
2330
2331static void
52a11cbf
RH
2332sjlj_emit_function_exit ()
2333{
2334 rtx seq;
4956d07c 2335
52a11cbf 2336 start_sequence ();
ce152ef8 2337
52a11cbf
RH
2338 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2339 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 2340
52a11cbf
RH
2341 seq = get_insns ();
2342 end_sequence ();
4956d07c 2343
52a11cbf
RH
2344 /* ??? Really this can be done in any block at loop level 0 that
2345 post-dominates all can_throw_internal instructions. This is
2346 the last possible moment. */
9a0d1e1b 2347
52a11cbf 2348 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
9a0d1e1b
AM
2349}
2350
52a11cbf
RH
2351static void
2352sjlj_emit_dispatch_table (dispatch_label, lp_info)
2353 rtx dispatch_label;
2354 struct sjlj_lp_info *lp_info;
ce152ef8 2355{
52a11cbf
RH
2356 int i, first_reachable;
2357 rtx mem, dispatch, seq, fc;
2358
2359 fc = cfun->eh->sjlj_fc;
2360
2361 start_sequence ();
2362
2363 emit_label (dispatch_label);
3f2c5d1a 2364
52a11cbf
RH
2365#ifndef DONT_USE_BUILTIN_SETJMP
2366 expand_builtin_setjmp_receiver (dispatch_label);
2367#endif
2368
2369 /* Load up dispatch index, exc_ptr and filter values from the
2370 function context. */
f4ef873c
RK
2371 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2372 sjlj_fc_call_site_ofs);
52a11cbf
RH
2373 dispatch = copy_to_reg (mem);
2374
f4ef873c 2375 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
52a11cbf
RH
2376 if (word_mode != Pmode)
2377 {
2378#ifdef POINTERS_EXTEND_UNSIGNED
2379 mem = convert_memory_address (Pmode, mem);
2380#else
2381 mem = convert_to_mode (Pmode, mem, 0);
2382#endif
2383 }
2384 emit_move_insn (cfun->eh->exc_ptr, mem);
2385
f4ef873c 2386 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
52a11cbf 2387 emit_move_insn (cfun->eh->filter, mem);
4956d07c 2388
52a11cbf
RH
2389 /* Jump to one of the directly reachable regions. */
2390 /* ??? This really ought to be using a switch statement. */
2391
2392 first_reachable = 0;
2393 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 2394 {
98ce21b3 2395 if (! lp_info[i].directly_reachable)
52a11cbf 2396 continue;
a1622f83 2397
52a11cbf
RH
2398 if (! first_reachable)
2399 {
2400 first_reachable = i;
2401 continue;
2402 }
e6cfb550 2403
a06ef755
RK
2404 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2405 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
52a11cbf 2406 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 2407 }
9a0d1e1b 2408
52a11cbf
RH
2409 seq = get_insns ();
2410 end_sequence ();
4956d07c 2411
52a11cbf
RH
2412 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2413 ->post_landing_pad));
ce152ef8
AM
2414}
2415
52a11cbf
RH
2416static void
2417sjlj_build_landing_pads ()
ce152ef8 2418{
52a11cbf 2419 struct sjlj_lp_info *lp_info;
ce152ef8 2420
52a11cbf
RH
2421 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2422 sizeof (struct sjlj_lp_info));
ce152ef8 2423
52a11cbf
RH
2424 if (sjlj_find_directly_reachable_regions (lp_info))
2425 {
2426 rtx dispatch_label = gen_label_rtx ();
ce152ef8 2427
52a11cbf
RH
2428 cfun->eh->sjlj_fc
2429 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2430 int_size_in_bytes (sjlj_fc_type_node),
2431 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 2432
52a11cbf
RH
2433 sjlj_assign_call_site_values (dispatch_label, lp_info);
2434 sjlj_mark_call_sites (lp_info);
a1622f83 2435
52a11cbf
RH
2436 sjlj_emit_function_enter (dispatch_label);
2437 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2438 sjlj_emit_function_exit ();
2439 }
a1622f83 2440
52a11cbf 2441 free (lp_info);
4956d07c 2442}
ce152ef8 2443
ce152ef8 2444void
52a11cbf 2445finish_eh_generation ()
ce152ef8 2446{
52a11cbf
RH
2447 /* Nothing to do if no regions created. */
2448 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
2449 return;
2450
52a11cbf
RH
2451 /* The object here is to provide find_basic_blocks with detailed
2452 information (via reachable_handlers) on how exception control
2453 flows within the function. In this first pass, we can include
2454 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2455 regions, and hope that it will be useful in deleting unreachable
2456 handlers. Subsequently, we will generate landing pads which will
2457 connect many of the handlers, and then type information will not
2458 be effective. Still, this is a win over previous implementations. */
2459
4793dca1 2460 rebuild_jump_labels (get_insns ());
52a11cbf 2461 find_basic_blocks (get_insns (), max_reg_num (), 0);
0068fd96 2462 cleanup_cfg (CLEANUP_PRE_LOOP);
52a11cbf
RH
2463
2464 /* These registers are used by the landing pads. Make sure they
2465 have been generated. */
86c99549
RH
2466 get_exception_pointer (cfun);
2467 get_exception_filter (cfun);
52a11cbf
RH
2468
2469 /* Construct the landing pads. */
2470
2471 assign_filter_values ();
2472 build_post_landing_pads ();
2473 connect_post_landing_pads ();
2474 if (USING_SJLJ_EXCEPTIONS)
2475 sjlj_build_landing_pads ();
2476 else
2477 dw2_build_landing_pads ();
ce152ef8 2478
52a11cbf 2479 cfun->eh->built_landing_pads = 1;
ce152ef8 2480
52a11cbf
RH
2481 /* We've totally changed the CFG. Start over. */
2482 find_exception_handler_labels ();
4793dca1 2483 rebuild_jump_labels (get_insns ());
52a11cbf 2484 find_basic_blocks (get_insns (), max_reg_num (), 0);
0068fd96 2485 cleanup_cfg (CLEANUP_PRE_LOOP);
ce152ef8 2486}
4956d07c 2487\f
52a11cbf 2488/* This section handles removing dead code for flow. */
154bba13 2489
52a11cbf 2490/* Remove LABEL from the exception_handler_labels list. */
154bba13 2491
52a11cbf
RH
2492static void
2493remove_exception_handler_label (label)
2494 rtx label;
154bba13 2495{
52a11cbf 2496 rtx *pl, l;
100d81d4 2497
655dd289
JJ
2498 /* If exception_handler_labels was not built yet,
2499 there is nothing to do. */
2500 if (exception_handler_labels == NULL)
2501 return;
2502
52a11cbf
RH
2503 for (pl = &exception_handler_labels, l = *pl;
2504 XEXP (l, 0) != label;
2505 pl = &XEXP (l, 1), l = *pl)
2506 continue;
154bba13 2507
52a11cbf
RH
2508 *pl = XEXP (l, 1);
2509 free_EXPR_LIST_node (l);
154bba13
TT
2510}
2511
52a11cbf 2512/* Splice REGION from the region tree etc. */
12670d88 2513
f19c9228 2514static void
52a11cbf
RH
2515remove_eh_handler (region)
2516 struct eh_region *region;
4956d07c 2517{
52a11cbf
RH
2518 struct eh_region **pp, *p;
2519 rtx lab;
2520 int i;
4956d07c 2521
52a11cbf
RH
2522 /* For the benefit of efficiently handling REG_EH_REGION notes,
2523 replace this region in the region array with its containing
2524 region. Note that previous region deletions may result in
2525 multiple copies of this region in the array, so we have to
2526 search the whole thing. */
2527 for (i = cfun->eh->last_region_number; i > 0; --i)
2528 if (cfun->eh->region_array[i] == region)
2529 cfun->eh->region_array[i] = region->outer;
2530
2531 if (cfun->eh->built_landing_pads)
2532 lab = region->landing_pad;
2533 else
2534 lab = region->label;
2535 if (lab)
2536 remove_exception_handler_label (lab);
2537
2538 if (region->outer)
2539 pp = &region->outer->inner;
2540 else
2541 pp = &cfun->eh->region_tree;
2542 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2543 continue;
12670d88 2544
52a11cbf 2545 if (region->inner)
4956d07c 2546 {
52a11cbf
RH
2547 for (p = region->inner; p->next_peer ; p = p->next_peer)
2548 p->outer = region->outer;
2549 p->next_peer = region->next_peer;
2550 p->outer = region->outer;
2551 *pp = region->inner;
4956d07c 2552 }
52a11cbf
RH
2553 else
2554 *pp = region->next_peer;
f19c9228 2555
52a11cbf
RH
2556 if (region->type == ERT_CATCH)
2557 {
2558 struct eh_region *try, *next, *prev;
f19c9228 2559
52a11cbf
RH
2560 for (try = region->next_peer;
2561 try->type == ERT_CATCH;
2562 try = try->next_peer)
2563 continue;
2564 if (try->type != ERT_TRY)
2565 abort ();
f19c9228 2566
52a11cbf
RH
2567 next = region->u.catch.next_catch;
2568 prev = region->u.catch.prev_catch;
f19c9228 2569
52a11cbf
RH
2570 if (next)
2571 next->u.catch.prev_catch = prev;
2572 else
2573 try->u.try.last_catch = prev;
2574 if (prev)
2575 prev->u.catch.next_catch = next;
2576 else
2577 {
2578 try->u.try.catch = next;
2579 if (! next)
2580 remove_eh_handler (try);
2581 }
2582 }
988cea7d 2583
52a11cbf 2584 free (region);
4956d07c
MS
2585}
2586
52a11cbf
RH
2587/* LABEL heads a basic block that is about to be deleted. If this
2588 label corresponds to an exception region, we may be able to
2589 delete the region. */
4956d07c
MS
2590
2591void
52a11cbf
RH
2592maybe_remove_eh_handler (label)
2593 rtx label;
4956d07c 2594{
52a11cbf 2595 int i;
4956d07c 2596
52a11cbf
RH
2597 /* ??? After generating landing pads, it's not so simple to determine
2598 if the region data is completely unused. One must examine the
2599 landing pad and the post landing pad, and whether an inner try block
2600 is referencing the catch handlers directly. */
2601 if (cfun->eh->built_landing_pads)
4956d07c
MS
2602 return;
2603
52a11cbf 2604 for (i = cfun->eh->last_region_number; i > 0; --i)
87ff9c8e 2605 {
52a11cbf
RH
2606 struct eh_region *region = cfun->eh->region_array[i];
2607 if (region && region->label == label)
87ff9c8e 2608 {
52a11cbf
RH
2609 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2610 because there is no path to the fallback call to terminate.
2611 But the region continues to affect call-site data until there
2612 are no more contained calls, which we don't see here. */
2613 if (region->type == ERT_MUST_NOT_THROW)
2614 {
2615 remove_exception_handler_label (region->label);
2616 region->label = NULL_RTX;
2617 }
2618 else
2619 remove_eh_handler (region);
2620 break;
87ff9c8e 2621 }
87ff9c8e
RH
2622 }
2623}
2624
52a11cbf
RH
2625\f
2626/* This section describes CFG exception edges for flow. */
87ff9c8e 2627
52a11cbf
RH
2628/* For communicating between calls to reachable_next_level. */
2629struct reachable_info
87ff9c8e 2630{
52a11cbf
RH
2631 tree types_caught;
2632 tree types_allowed;
2633 rtx handlers;
2634};
87ff9c8e 2635
52a11cbf
RH
2636/* A subroutine of reachable_next_level. Return true if TYPE, or a
2637 base class of TYPE, is in HANDLED. */
87ff9c8e 2638
52a11cbf
RH
2639static int
2640check_handled (handled, type)
2641 tree handled, type;
87ff9c8e 2642{
52a11cbf
RH
2643 tree t;
2644
2645 /* We can check for exact matches without front-end help. */
2646 if (! lang_eh_type_covers)
f54a7f6f 2647 {
52a11cbf
RH
2648 for (t = handled; t ; t = TREE_CHAIN (t))
2649 if (TREE_VALUE (t) == type)
2650 return 1;
2651 }
2652 else
2653 {
2654 for (t = handled; t ; t = TREE_CHAIN (t))
2655 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2656 return 1;
f54a7f6f 2657 }
52a11cbf
RH
2658
2659 return 0;
87ff9c8e
RH
2660}
2661
52a11cbf
RH
2662/* A subroutine of reachable_next_level. If we are collecting a list
2663 of handlers, add one. After landing pad generation, reference
2664 it instead of the handlers themselves. Further, the handlers are
3f2c5d1a 2665 all wired together, so by referencing one, we've got them all.
52a11cbf
RH
2666 Before landing pad generation we reference each handler individually.
2667
2668 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2669
2670static void
52a11cbf
RH
2671add_reachable_handler (info, lp_region, region)
2672 struct reachable_info *info;
2673 struct eh_region *lp_region;
2674 struct eh_region *region;
87ff9c8e 2675{
52a11cbf
RH
2676 if (! info)
2677 return;
2678
2679 if (cfun->eh->built_landing_pads)
87ff9c8e 2680 {
52a11cbf
RH
2681 if (! info->handlers)
2682 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
87ff9c8e 2683 }
52a11cbf
RH
2684 else
2685 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
87ff9c8e
RH
2686}
2687
3f2c5d1a 2688/* Process one level of exception regions for reachability.
52a11cbf
RH
2689 If TYPE_THROWN is non-null, then it is the *exact* type being
2690 propagated. If INFO is non-null, then collect handler labels
2691 and caught/allowed type information between invocations. */
87ff9c8e 2692
52a11cbf
RH
2693static enum reachable_code
2694reachable_next_level (region, type_thrown, info)
2695 struct eh_region *region;
2696 tree type_thrown;
2697 struct reachable_info *info;
87ff9c8e 2698{
52a11cbf
RH
2699 switch (region->type)
2700 {
2701 case ERT_CLEANUP:
2702 /* Before landing-pad generation, we model control flow
2703 directly to the individual handlers. In this way we can
2704 see that catch handler types may shadow one another. */
2705 add_reachable_handler (info, region, region);
2706 return RNL_MAYBE_CAUGHT;
2707
2708 case ERT_TRY:
2709 {
2710 struct eh_region *c;
2711 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2712
52a11cbf
RH
2713 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2714 {
2715 /* A catch-all handler ends the search. */
2716 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2717 to be run as well. */
6d41a92f 2718 if (c->u.catch.type_list == NULL)
52a11cbf
RH
2719 {
2720 add_reachable_handler (info, region, c);
2721 return RNL_CAUGHT;
2722 }
2723
2724 if (type_thrown)
2725 {
a8154559 2726 /* If we have at least one type match, end the search. */
6d41a92f 2727 tree tp_node = c->u.catch.type_list;
3f2c5d1a 2728
6d41a92f 2729 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
52a11cbf 2730 {
6d41a92f
OH
2731 tree type = TREE_VALUE (tp_node);
2732
2733 if (type == type_thrown
2734 || (lang_eh_type_covers
2735 && (*lang_eh_type_covers) (type, type_thrown)))
2736 {
2737 add_reachable_handler (info, region, c);
2738 return RNL_CAUGHT;
2739 }
52a11cbf
RH
2740 }
2741
2742 /* If we have definitive information of a match failure,
2743 the catch won't trigger. */
2744 if (lang_eh_type_covers)
2745 return RNL_NOT_CAUGHT;
2746 }
2747
6d41a92f
OH
2748 /* At this point, we either don't know what type is thrown or
2749 don't have front-end assistance to help deciding if it is
2750 covered by one of the types in the list for this region.
3f2c5d1a 2751
6d41a92f
OH
2752 We'd then like to add this region to the list of reachable
2753 handlers since it is indeed potentially reachable based on the
3f2c5d1a
RS
2754 information we have.
2755
6d41a92f
OH
2756 Actually, this handler is for sure not reachable if all the
2757 types it matches have already been caught. That is, it is only
2758 potentially reachable if at least one of the types it catches
2759 has not been previously caught. */
2760
52a11cbf
RH
2761 if (! info)
2762 ret = RNL_MAYBE_CAUGHT;
6d41a92f 2763 else
52a11cbf 2764 {
6d41a92f
OH
2765 tree tp_node = c->u.catch.type_list;
2766 bool maybe_reachable = false;
52a11cbf 2767
6d41a92f
OH
2768 /* Compute the potential reachability of this handler and
2769 update the list of types caught at the same time. */
2770 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2771 {
2772 tree type = TREE_VALUE (tp_node);
2773
2774 if (! check_handled (info->types_caught, type))
2775 {
2776 info->types_caught
2777 = tree_cons (NULL, type, info->types_caught);
3f2c5d1a 2778
6d41a92f
OH
2779 maybe_reachable = true;
2780 }
2781 }
3f2c5d1a 2782
6d41a92f
OH
2783 if (maybe_reachable)
2784 {
2785 add_reachable_handler (info, region, c);
3f2c5d1a 2786
6d41a92f
OH
2787 /* ??? If the catch type is a base class of every allowed
2788 type, then we know we can stop the search. */
2789 ret = RNL_MAYBE_CAUGHT;
2790 }
52a11cbf
RH
2791 }
2792 }
87ff9c8e 2793
52a11cbf
RH
2794 return ret;
2795 }
87ff9c8e 2796
52a11cbf
RH
2797 case ERT_ALLOWED_EXCEPTIONS:
2798 /* An empty list of types definitely ends the search. */
2799 if (region->u.allowed.type_list == NULL_TREE)
2800 {
2801 add_reachable_handler (info, region, region);
2802 return RNL_CAUGHT;
2803 }
87ff9c8e 2804
52a11cbf
RH
2805 /* Collect a list of lists of allowed types for use in detecting
2806 when a catch may be transformed into a catch-all. */
2807 if (info)
2808 info->types_allowed = tree_cons (NULL_TREE,
2809 region->u.allowed.type_list,
2810 info->types_allowed);
3f2c5d1a 2811
684d9f3b 2812 /* If we have definitive information about the type hierarchy,
52a11cbf
RH
2813 then we can tell if the thrown type will pass through the
2814 filter. */
2815 if (type_thrown && lang_eh_type_covers)
2816 {
2817 if (check_handled (region->u.allowed.type_list, type_thrown))
2818 return RNL_NOT_CAUGHT;
2819 else
2820 {
2821 add_reachable_handler (info, region, region);
2822 return RNL_CAUGHT;
2823 }
2824 }
21cd906e 2825
52a11cbf
RH
2826 add_reachable_handler (info, region, region);
2827 return RNL_MAYBE_CAUGHT;
21cd906e 2828
52a11cbf
RH
2829 case ERT_CATCH:
2830 /* Catch regions are handled by their controling try region. */
2831 return RNL_NOT_CAUGHT;
21cd906e 2832
52a11cbf
RH
2833 case ERT_MUST_NOT_THROW:
2834 /* Here we end our search, since no exceptions may propagate.
2835 If we've touched down at some landing pad previous, then the
2836 explicit function call we generated may be used. Otherwise
2837 the call is made by the runtime. */
2838 if (info && info->handlers)
21cd906e 2839 {
52a11cbf
RH
2840 add_reachable_handler (info, region, region);
2841 return RNL_CAUGHT;
21cd906e 2842 }
52a11cbf
RH
2843 else
2844 return RNL_BLOCKED;
21cd906e 2845
52a11cbf
RH
2846 case ERT_THROW:
2847 case ERT_FIXUP:
3f2c5d1a 2848 case ERT_UNKNOWN:
52a11cbf
RH
2849 /* Shouldn't see these here. */
2850 break;
21cd906e 2851 }
fa51b01b 2852
52a11cbf 2853 abort ();
fa51b01b 2854}
4956d07c 2855
52a11cbf
RH
2856/* Retrieve a list of labels of exception handlers which can be
2857 reached by a given insn. */
4956d07c 2858
52a11cbf
RH
2859rtx
2860reachable_handlers (insn)
4956d07c
MS
2861 rtx insn;
2862{
52a11cbf
RH
2863 struct reachable_info info;
2864 struct eh_region *region;
2865 tree type_thrown;
2866 int region_number;
fb13d4d0 2867
52a11cbf
RH
2868 if (GET_CODE (insn) == JUMP_INSN
2869 && GET_CODE (PATTERN (insn)) == RESX)
2870 region_number = XINT (PATTERN (insn), 0);
2871 else
1ef1bf06
AM
2872 {
2873 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
52a11cbf
RH
2874 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2875 return NULL;
2876 region_number = INTVAL (XEXP (note, 0));
27a36778 2877 }
4956d07c 2878
52a11cbf 2879 memset (&info, 0, sizeof (info));
4956d07c 2880
52a11cbf 2881 region = cfun->eh->region_array[region_number];
fb13d4d0 2882
52a11cbf 2883 type_thrown = NULL_TREE;
7f206d8f
RH
2884 if (GET_CODE (insn) == JUMP_INSN
2885 && GET_CODE (PATTERN (insn)) == RESX)
2886 {
2887 /* A RESX leaves a region instead of entering it. Thus the
2888 region itself may have been deleted out from under us. */
2889 if (region == NULL)
2890 return NULL;
2891 region = region->outer;
2892 }
2893 else if (region->type == ERT_THROW)
52a11cbf
RH
2894 {
2895 type_thrown = region->u.throw.type;
2896 region = region->outer;
2897 }
fac62ecf 2898
52a11cbf
RH
2899 for (; region; region = region->outer)
2900 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2901 break;
fb13d4d0 2902
52a11cbf 2903 return info.handlers;
fb13d4d0
JM
2904}
2905
52a11cbf
RH
2906/* Determine if the given INSN can throw an exception that is caught
2907 within the function. */
4956d07c 2908
52a11cbf
RH
2909bool
2910can_throw_internal (insn)
4956d07c 2911 rtx insn;
4956d07c 2912{
52a11cbf
RH
2913 struct eh_region *region;
2914 tree type_thrown;
2915 rtx note;
e6cfb550 2916
52a11cbf
RH
2917 if (! INSN_P (insn))
2918 return false;
12670d88 2919
52a11cbf
RH
2920 if (GET_CODE (insn) == INSN
2921 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2922 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 2923
52a11cbf
RH
2924 if (GET_CODE (insn) == CALL_INSN
2925 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2926 {
52a11cbf
RH
2927 int i;
2928 for (i = 0; i < 3; ++i)
4956d07c 2929 {
52a11cbf
RH
2930 rtx sub = XEXP (PATTERN (insn), i);
2931 for (; sub ; sub = NEXT_INSN (sub))
2932 if (can_throw_internal (sub))
2933 return true;
4956d07c 2934 }
52a11cbf 2935 return false;
4956d07c
MS
2936 }
2937
52a11cbf
RH
2938 /* Every insn that might throw has an EH_REGION note. */
2939 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2940 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2941 return false;
4956d07c 2942
52a11cbf 2943 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
4956d07c 2944
52a11cbf
RH
2945 type_thrown = NULL_TREE;
2946 if (region->type == ERT_THROW)
2947 {
2948 type_thrown = region->u.throw.type;
2949 region = region->outer;
2950 }
4956d07c 2951
52a11cbf
RH
2952 /* If this exception is ignored by each and every containing region,
2953 then control passes straight out. The runtime may handle some
2954 regions, which also do not require processing internally. */
2955 for (; region; region = region->outer)
2956 {
2957 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2958 if (how == RNL_BLOCKED)
2959 return false;
2960 if (how != RNL_NOT_CAUGHT)
2961 return true;
4956d07c 2962 }
4956d07c 2963
52a11cbf
RH
2964 return false;
2965}
4956d07c 2966
52a11cbf
RH
2967/* Determine if the given INSN can throw an exception that is
2968 visible outside the function. */
4956d07c 2969
52a11cbf
RH
2970bool
2971can_throw_external (insn)
2972 rtx insn;
4956d07c 2973{
52a11cbf
RH
2974 struct eh_region *region;
2975 tree type_thrown;
2976 rtx note;
4956d07c 2977
52a11cbf
RH
2978 if (! INSN_P (insn))
2979 return false;
2980
2981 if (GET_CODE (insn) == INSN
2982 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2983 insn = XVECEXP (PATTERN (insn), 0, 0);
2984
2985 if (GET_CODE (insn) == CALL_INSN
2986 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2987 {
52a11cbf
RH
2988 int i;
2989 for (i = 0; i < 3; ++i)
4956d07c 2990 {
52a11cbf
RH
2991 rtx sub = XEXP (PATTERN (insn), i);
2992 for (; sub ; sub = NEXT_INSN (sub))
2993 if (can_throw_external (sub))
2994 return true;
4956d07c 2995 }
52a11cbf 2996 return false;
4956d07c 2997 }
52a11cbf
RH
2998
2999 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3000 if (!note)
3001 {
3002 /* Calls (and trapping insns) without notes are outside any
3003 exception handling region in this function. We have to
3004 assume it might throw. Given that the front end and middle
3005 ends mark known NOTHROW functions, this isn't so wildly
3006 inaccurate. */
3007 return (GET_CODE (insn) == CALL_INSN
3008 || (flag_non_call_exceptions
3009 && may_trap_p (PATTERN (insn))));
3010 }
3011 if (INTVAL (XEXP (note, 0)) <= 0)
3012 return false;
3013
3014 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3015
3016 type_thrown = NULL_TREE;
3017 if (region->type == ERT_THROW)
3018 {
3019 type_thrown = region->u.throw.type;
3020 region = region->outer;
3021 }
3022
3023 /* If the exception is caught or blocked by any containing region,
3024 then it is not seen by any calling function. */
3025 for (; region ; region = region->outer)
3026 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3027 return false;
3028
3029 return true;
4956d07c 3030}
1ef1bf06 3031
52a11cbf 3032/* True if nothing in this function can throw outside this function. */
6814a8a0 3033
52a11cbf
RH
3034bool
3035nothrow_function_p ()
1ef1bf06
AM
3036{
3037 rtx insn;
1ef1bf06 3038
52a11cbf
RH
3039 if (! flag_exceptions)
3040 return true;
1ef1bf06 3041
1ef1bf06 3042 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf
RH
3043 if (can_throw_external (insn))
3044 return false;
3045 for (insn = current_function_epilogue_delay_list; insn;
3046 insn = XEXP (insn, 1))
3047 if (can_throw_external (insn))
3048 return false;
4da896b2 3049
52a11cbf 3050 return true;
1ef1bf06 3051}
52a11cbf 3052
ca55abae 3053\f
52a11cbf 3054/* Various hooks for unwind library. */
ca55abae
JM
3055
3056/* Do any necessary initialization to access arbitrary stack frames.
3057 On the SPARC, this means flushing the register windows. */
3058
3059void
3060expand_builtin_unwind_init ()
3061{
3062 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 3063 able to copy the saved values for any registers from frames we unwind. */
ca55abae
JM
3064 current_function_has_nonlocal_label = 1;
3065
3066#ifdef SETUP_FRAME_ADDRESSES
3067 SETUP_FRAME_ADDRESSES ();
3068#endif
3069}
3070
52a11cbf
RH
3071rtx
3072expand_builtin_eh_return_data_regno (arglist)
3073 tree arglist;
3074{
3075 tree which = TREE_VALUE (arglist);
3076 unsigned HOST_WIDE_INT iwhich;
3077
3078 if (TREE_CODE (which) != INTEGER_CST)
3079 {
3080 error ("argument of `__builtin_eh_return_regno' must be constant");
3081 return constm1_rtx;
3082 }
3083
3084 iwhich = tree_low_cst (which, 1);
3085 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3086 if (iwhich == INVALID_REGNUM)
3087 return constm1_rtx;
3088
3089#ifdef DWARF_FRAME_REGNUM
3090 iwhich = DWARF_FRAME_REGNUM (iwhich);
3091#else
3092 iwhich = DBX_REGISTER_NUMBER (iwhich);
3093#endif
3094
3f2c5d1a 3095 return GEN_INT (iwhich);
52a11cbf
RH
3096}
3097
ca55abae
JM
3098/* Given a value extracted from the return address register or stack slot,
3099 return the actual address encoded in that value. */
3100
3101rtx
3102expand_builtin_extract_return_addr (addr_tree)
3103 tree addr_tree;
3104{
3105 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf
RH
3106
3107 /* First mask out any unwanted bits. */
3108#ifdef MASK_RETURN_ADDR
22273300 3109 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
52a11cbf
RH
3110#endif
3111
3112 /* Then adjust to find the real return address. */
3113#if defined (RETURN_ADDR_OFFSET)
3114 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3115#endif
3116
3117 return addr;
ca55abae
JM
3118}
3119
3120/* Given an actual address in addr_tree, do any necessary encoding
3121 and return the value to be stored in the return address register or
3122 stack slot so the epilogue will return to that address. */
3123
3124rtx
3125expand_builtin_frob_return_addr (addr_tree)
3126 tree addr_tree;
3127{
4b6c1672 3128 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
52a11cbf 3129
be128cd9 3130#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
3131 if (GET_MODE (addr) != Pmode)
3132 addr = convert_memory_address (Pmode, addr);
be128cd9
RK
3133#endif
3134
ca55abae 3135#ifdef RETURN_ADDR_OFFSET
52a11cbf 3136 addr = force_reg (Pmode, addr);
ca55abae
JM
3137 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3138#endif
52a11cbf 3139
ca55abae
JM
3140 return addr;
3141}
3142
52a11cbf
RH
3143/* Set up the epilogue with the magic bits we'll need to return to the
3144 exception handler. */
ca55abae 3145
52a11cbf
RH
3146void
3147expand_builtin_eh_return (stackadj_tree, handler_tree)
3148 tree stackadj_tree, handler_tree;
ca55abae 3149{
52a11cbf 3150 rtx stackadj, handler;
ca55abae 3151
52a11cbf
RH
3152 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3153 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
ca55abae 3154
be128cd9 3155#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
3156 if (GET_MODE (stackadj) != Pmode)
3157 stackadj = convert_memory_address (Pmode, stackadj);
3158
3159 if (GET_MODE (handler) != Pmode)
3160 handler = convert_memory_address (Pmode, handler);
be128cd9
RK
3161#endif
3162
52a11cbf
RH
3163 if (! cfun->eh->ehr_label)
3164 {
3165 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3166 cfun->eh->ehr_handler = copy_to_reg (handler);
3167 cfun->eh->ehr_label = gen_label_rtx ();
3168 }
ca55abae 3169 else
ca55abae 3170 {
52a11cbf
RH
3171 if (stackadj != cfun->eh->ehr_stackadj)
3172 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3173 if (handler != cfun->eh->ehr_handler)
3174 emit_move_insn (cfun->eh->ehr_handler, handler);
ca55abae
JM
3175 }
3176
52a11cbf 3177 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
3178}
3179
71038426
RH
3180void
3181expand_eh_return ()
ca55abae 3182{
52a11cbf 3183 rtx sa, ra, around_label;
ca55abae 3184
52a11cbf 3185 if (! cfun->eh->ehr_label)
71038426 3186 return;
ca55abae 3187
52a11cbf
RH
3188 sa = EH_RETURN_STACKADJ_RTX;
3189 if (! sa)
71038426 3190 {
52a11cbf 3191 error ("__builtin_eh_return not supported on this target");
71038426
RH
3192 return;
3193 }
ca55abae 3194
52a11cbf 3195 current_function_calls_eh_return = 1;
ca55abae 3196
52a11cbf
RH
3197 around_label = gen_label_rtx ();
3198 emit_move_insn (sa, const0_rtx);
3199 emit_jump (around_label);
ca55abae 3200
52a11cbf
RH
3201 emit_label (cfun->eh->ehr_label);
3202 clobber_return_register ();
ca55abae 3203
52a11cbf
RH
3204#ifdef HAVE_eh_return
3205 if (HAVE_eh_return)
3206 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3207 else
71038426 3208#endif
52a11cbf
RH
3209 {
3210 ra = EH_RETURN_HANDLER_RTX;
3211 if (! ra)
3212 {
3213 error ("__builtin_eh_return not supported on this target");
3214 ra = gen_reg_rtx (Pmode);
3215 }
71038426 3216
52a11cbf 3217 emit_move_insn (sa, cfun->eh->ehr_stackadj);
be128cd9 3218 emit_move_insn (ra, cfun->eh->ehr_handler);
52a11cbf 3219 }
71038426 3220
52a11cbf 3221 emit_label (around_label);
71038426 3222}
77d33a84 3223\f
949f197f 3224/* In the following functions, we represent entries in the action table
eaec9b3d 3225 as 1-based indices. Special cases are:
949f197f
RH
3226
3227 0: null action record, non-null landing pad; implies cleanups
3228 -1: null action record, null landing pad; implies no action
3229 -2: no call-site entry; implies must_not_throw
3230 -3: we have yet to process outer regions
3231
3232 Further, no special cases apply to the "next" field of the record.
3233 For next, 0 means end of list. */
3234
52a11cbf
RH
3235struct action_record
3236{
3237 int offset;
3238 int filter;
3239 int next;
3240};
77d33a84 3241
52a11cbf
RH
3242static int
3243action_record_eq (pentry, pdata)
3244 const PTR pentry;
3245 const PTR pdata;
3246{
3247 const struct action_record *entry = (const struct action_record *) pentry;
3248 const struct action_record *data = (const struct action_record *) pdata;
3249 return entry->filter == data->filter && entry->next == data->next;
3250}
77d33a84 3251
52a11cbf
RH
3252static hashval_t
3253action_record_hash (pentry)
3254 const PTR pentry;
3255{
3256 const struct action_record *entry = (const struct action_record *) pentry;
3257 return entry->next * 1009 + entry->filter;
3258}
77d33a84 3259
52a11cbf
RH
3260static int
3261add_action_record (ar_hash, filter, next)
3262 htab_t ar_hash;
3263 int filter, next;
77d33a84 3264{
52a11cbf
RH
3265 struct action_record **slot, *new, tmp;
3266
3267 tmp.filter = filter;
3268 tmp.next = next;
3269 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 3270
52a11cbf 3271 if ((new = *slot) == NULL)
77d33a84 3272 {
52a11cbf
RH
3273 new = (struct action_record *) xmalloc (sizeof (*new));
3274 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3275 new->filter = filter;
3276 new->next = next;
3277 *slot = new;
3278
3279 /* The filter value goes in untouched. The link to the next
3280 record is a "self-relative" byte offset, or zero to indicate
3281 that there is no next record. So convert the absolute 1 based
eaec9b3d 3282 indices we've been carrying around into a displacement. */
52a11cbf
RH
3283
3284 push_sleb128 (&cfun->eh->action_record_data, filter);
3285 if (next)
3286 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3287 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 3288 }
77d33a84 3289
52a11cbf
RH
3290 return new->offset;
3291}
77d33a84 3292
52a11cbf
RH
3293static int
3294collect_one_action_chain (ar_hash, region)
3295 htab_t ar_hash;
3296 struct eh_region *region;
77d33a84 3297{
52a11cbf
RH
3298 struct eh_region *c;
3299 int next;
77d33a84 3300
52a11cbf
RH
3301 /* If we've reached the top of the region chain, then we have
3302 no actions, and require no landing pad. */
3303 if (region == NULL)
3304 return -1;
3305
3306 switch (region->type)
77d33a84 3307 {
52a11cbf
RH
3308 case ERT_CLEANUP:
3309 /* A cleanup adds a zero filter to the beginning of the chain, but
3310 there are special cases to look out for. If there are *only*
3311 cleanups along a path, then it compresses to a zero action.
3312 Further, if there are multiple cleanups along a path, we only
3313 need to represent one of them, as that is enough to trigger
3314 entry to the landing pad at runtime. */
3315 next = collect_one_action_chain (ar_hash, region->outer);
3316 if (next <= 0)
3317 return 0;
3318 for (c = region->outer; c ; c = c->outer)
3319 if (c->type == ERT_CLEANUP)
3320 return next;
3321 return add_action_record (ar_hash, 0, next);
3322
3323 case ERT_TRY:
3324 /* Process the associated catch regions in reverse order.
3325 If there's a catch-all handler, then we don't need to
3326 search outer regions. Use a magic -3 value to record
a1f300c0 3327 that we haven't done the outer search. */
52a11cbf
RH
3328 next = -3;
3329 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3330 {
6d41a92f
OH
3331 if (c->u.catch.type_list == NULL)
3332 {
3333 /* Retrieve the filter from the head of the filter list
3334 where we have stored it (see assign_filter_values). */
3f2c5d1a 3335 int filter
6d41a92f
OH
3336 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3337
3338 next = add_action_record (ar_hash, filter, 0);
3339 }
52a11cbf
RH
3340 else
3341 {
6d41a92f
OH
3342 /* Once the outer search is done, trigger an action record for
3343 each filter we have. */
3344 tree flt_node;
3345
52a11cbf
RH
3346 if (next == -3)
3347 {
3348 next = collect_one_action_chain (ar_hash, region->outer);
949f197f
RH
3349
3350 /* If there is no next action, terminate the chain. */
3351 if (next == -1)
52a11cbf 3352 next = 0;
949f197f
RH
3353 /* If all outer actions are cleanups or must_not_throw,
3354 we'll have no action record for it, since we had wanted
3355 to encode these states in the call-site record directly.
3356 Add a cleanup action to the chain to catch these. */
3357 else if (next <= 0)
3358 next = add_action_record (ar_hash, 0, 0);
52a11cbf 3359 }
3f2c5d1a 3360
6d41a92f
OH
3361 flt_node = c->u.catch.filter_list;
3362 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3363 {
3364 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3365 next = add_action_record (ar_hash, filter, next);
3366 }
52a11cbf
RH
3367 }
3368 }
3369 return next;
3370
3371 case ERT_ALLOWED_EXCEPTIONS:
3372 /* An exception specification adds its filter to the
3373 beginning of the chain. */
3374 next = collect_one_action_chain (ar_hash, region->outer);
3375 return add_action_record (ar_hash, region->u.allowed.filter,
3376 next < 0 ? 0 : next);
3377
3378 case ERT_MUST_NOT_THROW:
3379 /* A must-not-throw region with no inner handlers or cleanups
3380 requires no call-site entry. Note that this differs from
3381 the no handler or cleanup case in that we do require an lsda
3382 to be generated. Return a magic -2 value to record this. */
3383 return -2;
3384
3385 case ERT_CATCH:
3386 case ERT_THROW:
3387 /* CATCH regions are handled in TRY above. THROW regions are
3388 for optimization information only and produce no output. */
3389 return collect_one_action_chain (ar_hash, region->outer);
3390
3391 default:
3392 abort ();
77d33a84
AM
3393 }
3394}
3395
52a11cbf
RH
3396static int
3397add_call_site (landing_pad, action)
3398 rtx landing_pad;
3399 int action;
77d33a84 3400{
52a11cbf
RH
3401 struct call_site_record *data = cfun->eh->call_site_data;
3402 int used = cfun->eh->call_site_data_used;
3403 int size = cfun->eh->call_site_data_size;
77d33a84 3404
52a11cbf
RH
3405 if (used >= size)
3406 {
3407 size = (size ? size * 2 : 64);
3408 data = (struct call_site_record *)
3409 xrealloc (data, sizeof (*data) * size);
3410 cfun->eh->call_site_data = data;
3411 cfun->eh->call_site_data_size = size;
3412 }
77d33a84 3413
52a11cbf
RH
3414 data[used].landing_pad = landing_pad;
3415 data[used].action = action;
77d33a84 3416
52a11cbf 3417 cfun->eh->call_site_data_used = used + 1;
77d33a84 3418
52a11cbf 3419 return used + call_site_base;
77d33a84
AM
3420}
3421
52a11cbf
RH
3422/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3423 The new note numbers will not refer to region numbers, but
3424 instead to call site entries. */
77d33a84 3425
52a11cbf
RH
3426void
3427convert_to_eh_region_ranges ()
77d33a84 3428{
52a11cbf
RH
3429 rtx insn, iter, note;
3430 htab_t ar_hash;
3431 int last_action = -3;
3432 rtx last_action_insn = NULL_RTX;
3433 rtx last_landing_pad = NULL_RTX;
3434 rtx first_no_action_insn = NULL_RTX;
ae0ed63a 3435 int call_site = 0;
77d33a84 3436
52a11cbf
RH
3437 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3438 return;
77d33a84 3439
52a11cbf 3440 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 3441
52a11cbf 3442 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 3443
52a11cbf
RH
3444 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3445 if (INSN_P (iter))
3446 {
3447 struct eh_region *region;
3448 int this_action;
3449 rtx this_landing_pad;
77d33a84 3450
52a11cbf
RH
3451 insn = iter;
3452 if (GET_CODE (insn) == INSN
3453 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3454 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 3455
52a11cbf
RH
3456 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3457 if (!note)
3458 {
3459 if (! (GET_CODE (insn) == CALL_INSN
3460 || (flag_non_call_exceptions
3461 && may_trap_p (PATTERN (insn)))))
3462 continue;
3463 this_action = -1;
3464 region = NULL;
3465 }
3466 else
3467 {
3468 if (INTVAL (XEXP (note, 0)) <= 0)
3469 continue;
3470 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3471 this_action = collect_one_action_chain (ar_hash, region);
3472 }
3473
3474 /* Existence of catch handlers, or must-not-throw regions
3475 implies that an lsda is needed (even if empty). */
3476 if (this_action != -1)
3477 cfun->uses_eh_lsda = 1;
3478
3479 /* Delay creation of region notes for no-action regions
3480 until we're sure that an lsda will be required. */
3481 else if (last_action == -3)
3482 {
3483 first_no_action_insn = iter;
3484 last_action = -1;
3485 }
1ef1bf06 3486
52a11cbf
RH
3487 /* Cleanups and handlers may share action chains but not
3488 landing pads. Collect the landing pad for this region. */
3489 if (this_action >= 0)
3490 {
3491 struct eh_region *o;
3492 for (o = region; ! o->landing_pad ; o = o->outer)
3493 continue;
3494 this_landing_pad = o->landing_pad;
3495 }
3496 else
3497 this_landing_pad = NULL_RTX;
1ef1bf06 3498
52a11cbf
RH
3499 /* Differing actions or landing pads implies a change in call-site
3500 info, which implies some EH_REGION note should be emitted. */
3501 if (last_action != this_action
3502 || last_landing_pad != this_landing_pad)
3503 {
3504 /* If we'd not seen a previous action (-3) or the previous
3505 action was must-not-throw (-2), then we do not need an
3506 end note. */
3507 if (last_action >= -1)
3508 {
3509 /* If we delayed the creation of the begin, do it now. */
3510 if (first_no_action_insn)
3511 {
3512 call_site = add_call_site (NULL_RTX, 0);
3513 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3514 first_no_action_insn);
3515 NOTE_EH_HANDLER (note) = call_site;
3516 first_no_action_insn = NULL_RTX;
3517 }
3518
3519 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3520 last_action_insn);
3521 NOTE_EH_HANDLER (note) = call_site;
3522 }
3523
3524 /* If the new action is must-not-throw, then no region notes
3525 are created. */
3526 if (this_action >= -1)
3527 {
3f2c5d1a 3528 call_site = add_call_site (this_landing_pad,
52a11cbf
RH
3529 this_action < 0 ? 0 : this_action);
3530 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3531 NOTE_EH_HANDLER (note) = call_site;
3532 }
3533
3534 last_action = this_action;
3535 last_landing_pad = this_landing_pad;
3536 }
3537 last_action_insn = iter;
3538 }
1ef1bf06 3539
52a11cbf 3540 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3541 {
52a11cbf
RH
3542 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3543 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3544 }
3545
52a11cbf
RH
3546 htab_delete (ar_hash);
3547}
1ef1bf06 3548
52a11cbf
RH
3549\f
3550static void
3551push_uleb128 (data_area, value)
3552 varray_type *data_area;
3553 unsigned int value;
3554{
3555 do
3556 {
3557 unsigned char byte = value & 0x7f;
3558 value >>= 7;
3559 if (value)
3560 byte |= 0x80;
3561 VARRAY_PUSH_UCHAR (*data_area, byte);
3562 }
3563 while (value);
3564}
1ef1bf06 3565
52a11cbf
RH
3566static void
3567push_sleb128 (data_area, value)
3568 varray_type *data_area;
3569 int value;
3570{
3571 unsigned char byte;
3572 int more;
1ef1bf06 3573
52a11cbf 3574 do
1ef1bf06 3575 {
52a11cbf
RH
3576 byte = value & 0x7f;
3577 value >>= 7;
3578 more = ! ((value == 0 && (byte & 0x40) == 0)
3579 || (value == -1 && (byte & 0x40) != 0));
3580 if (more)
3581 byte |= 0x80;
3582 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3583 }
52a11cbf
RH
3584 while (more);
3585}
1ef1bf06 3586
52a11cbf 3587\f
52a11cbf
RH
3588#ifndef HAVE_AS_LEB128
3589static int
3590dw2_size_of_call_site_table ()
1ef1bf06 3591{
52a11cbf
RH
3592 int n = cfun->eh->call_site_data_used;
3593 int size = n * (4 + 4 + 4);
3594 int i;
1ef1bf06 3595
52a11cbf
RH
3596 for (i = 0; i < n; ++i)
3597 {
3598 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3599 size += size_of_uleb128 (cs->action);
3600 }
fac62ecf 3601
52a11cbf
RH
3602 return size;
3603}
3604
3605static int
3606sjlj_size_of_call_site_table ()
3607{
3608 int n = cfun->eh->call_site_data_used;
3609 int size = 0;
3610 int i;
77d33a84 3611
52a11cbf 3612 for (i = 0; i < n; ++i)
1ef1bf06 3613 {
52a11cbf
RH
3614 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3615 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3616 size += size_of_uleb128 (cs->action);
1ef1bf06 3617 }
52a11cbf
RH
3618
3619 return size;
3620}
3621#endif
3622
3623static void
3624dw2_output_call_site_table ()
3625{
83182544 3626 const char *const function_start_lab
52a11cbf
RH
3627 = IDENTIFIER_POINTER (current_function_func_begin_label);
3628 int n = cfun->eh->call_site_data_used;
3629 int i;
3630
3631 for (i = 0; i < n; ++i)
1ef1bf06 3632 {
52a11cbf
RH
3633 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3634 char reg_start_lab[32];
3635 char reg_end_lab[32];
3636 char landing_pad_lab[32];
3637
3638 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3639 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3640
3641 if (cs->landing_pad)
3642 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3643 CODE_LABEL_NUMBER (cs->landing_pad));
3644
3645 /* ??? Perhaps use insn length scaling if the assembler supports
3646 generic arithmetic. */
3647 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3648 data4 if the function is small enough. */
3649#ifdef HAVE_AS_LEB128
3650 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3651 "region %d start", i);
3652 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3653 "length");
3654 if (cs->landing_pad)
3655 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3656 "landing pad");
3657 else
3658 dw2_asm_output_data_uleb128 (0, "landing pad");
3659#else
3660 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3661 "region %d start", i);
3662 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3663 if (cs->landing_pad)
3664 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3665 "landing pad");
3666 else
3667 dw2_asm_output_data (4, 0, "landing pad");
3668#endif
3669 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3670 }
3671
52a11cbf
RH
3672 call_site_base += n;
3673}
3674
3675static void
3676sjlj_output_call_site_table ()
3677{
3678 int n = cfun->eh->call_site_data_used;
3679 int i;
1ef1bf06 3680
52a11cbf 3681 for (i = 0; i < n; ++i)
1ef1bf06 3682 {
52a11cbf 3683 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3684
52a11cbf
RH
3685 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3686 "region %d landing pad", i);
3687 dw2_asm_output_data_uleb128 (cs->action, "action");
3688 }
4da896b2 3689
52a11cbf 3690 call_site_base += n;
1ef1bf06
AM
3691}
3692
52a11cbf
RH
3693void
3694output_function_exception_table ()
3695{
2a1ee410 3696 int tt_format, cs_format, lp_format, i, n;
52a11cbf
RH
3697#ifdef HAVE_AS_LEB128
3698 char ttype_label[32];
3699 char cs_after_size_label[32];
3700 char cs_end_label[32];
3701#else
3702 int call_site_len;
3703#endif
3704 int have_tt_data;
3705 int funcdef_number;
ae0ed63a 3706 int tt_format_size = 0;
1ef1bf06 3707
52a11cbf
RH
3708 /* Not all functions need anything. */
3709 if (! cfun->uses_eh_lsda)
3710 return;
fac62ecf 3711
52a11cbf
RH
3712 funcdef_number = (USING_SJLJ_EXCEPTIONS
3713 ? sjlj_funcdef_number
3714 : current_funcdef_number);
1ef1bf06 3715
2a1ee410
RH
3716#ifdef IA64_UNWIND_INFO
3717 fputs ("\t.personality\t", asm_out_file);
3718 output_addr_const (asm_out_file, eh_personality_libfunc);
3719 fputs ("\n\t.handlerdata\n", asm_out_file);
3720 /* Note that varasm still thinks we're in the function's code section.
3721 The ".endp" directive that will immediately follow will take us back. */
3722#else
07c9d2eb 3723 (*targetm.asm_out.exception_section) ();
2a1ee410 3724#endif
52a11cbf
RH
3725
3726 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3727 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3728
b627d6fe
RH
3729 /* Indicate the format of the @TType entries. */
3730 if (! have_tt_data)
3731 tt_format = DW_EH_PE_omit;
3732 else
3733 {
3734 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3735#ifdef HAVE_AS_LEB128
3736 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3737#endif
3738 tt_format_size = size_of_encoded_value (tt_format);
3739
7a900ebc 3740 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3741 }
52a11cbf
RH
3742
3743 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3744
3745 /* The LSDA header. */
3746
3747 /* Indicate the format of the landing pad start pointer. An omitted
3748 field implies @LPStart == @Start. */
3749 /* Currently we always put @LPStart == @Start. This field would
3750 be most useful in moving the landing pads completely out of
3751 line to another section, but it could also be used to minimize
3752 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3753 lp_format = DW_EH_PE_omit;
3754 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3755 eh_data_format_name (lp_format));
52a11cbf
RH
3756
3757 /* @LPStart pointer would go here. */
3758
2a1ee410
RH
3759 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3760 eh_data_format_name (tt_format));
52a11cbf
RH
3761
3762#ifndef HAVE_AS_LEB128
3763 if (USING_SJLJ_EXCEPTIONS)
3764 call_site_len = sjlj_size_of_call_site_table ();
3765 else
3766 call_site_len = dw2_size_of_call_site_table ();
3767#endif
3768
3769 /* A pc-relative 4-byte displacement to the @TType data. */
3770 if (have_tt_data)
3771 {
3772#ifdef HAVE_AS_LEB128
3773 char ttype_after_disp_label[32];
3f2c5d1a 3774 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
52a11cbf
RH
3775 funcdef_number);
3776 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3777 "@TType base offset");
3778 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3779#else
3780 /* Ug. Alignment queers things. */
b627d6fe 3781 unsigned int before_disp, after_disp, last_disp, disp;
52a11cbf 3782
52a11cbf
RH
3783 before_disp = 1 + 1;
3784 after_disp = (1 + size_of_uleb128 (call_site_len)
3785 + call_site_len
3786 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
b627d6fe
RH
3787 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3788 * tt_format_size));
52a11cbf
RH
3789
3790 disp = after_disp;
3791 do
1ef1bf06 3792 {
52a11cbf
RH
3793 unsigned int disp_size, pad;
3794
3795 last_disp = disp;
3796 disp_size = size_of_uleb128 (disp);
3797 pad = before_disp + disp_size + after_disp;
b627d6fe
RH
3798 if (pad % tt_format_size)
3799 pad = tt_format_size - (pad % tt_format_size);
52a11cbf
RH
3800 else
3801 pad = 0;
3802 disp = after_disp + pad;
1ef1bf06 3803 }
52a11cbf
RH
3804 while (disp != last_disp);
3805
3806 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3807#endif
1ef1bf06 3808 }
1ef1bf06 3809
52a11cbf
RH
3810 /* Indicate the format of the call-site offsets. */
3811#ifdef HAVE_AS_LEB128
2a1ee410 3812 cs_format = DW_EH_PE_uleb128;
52a11cbf 3813#else
2a1ee410 3814 cs_format = DW_EH_PE_udata4;
52a11cbf 3815#endif
2a1ee410
RH
3816 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3817 eh_data_format_name (cs_format));
52a11cbf
RH
3818
3819#ifdef HAVE_AS_LEB128
3820 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3821 funcdef_number);
3822 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3823 funcdef_number);
3824 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3825 "Call-site table length");
3826 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3827 if (USING_SJLJ_EXCEPTIONS)
3828 sjlj_output_call_site_table ();
3829 else
3830 dw2_output_call_site_table ();
3831 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3832#else
3833 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3834 if (USING_SJLJ_EXCEPTIONS)
3835 sjlj_output_call_site_table ();
3836 else
3837 dw2_output_call_site_table ();
3838#endif
3839
3840 /* ??? Decode and interpret the data for flag_debug_asm. */
3841 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3842 for (i = 0; i < n; ++i)
3843 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3844 (i ? NULL : "Action record table"));
1ef1bf06 3845
52a11cbf 3846 if (have_tt_data)
7a900ebc 3847 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3848
52a11cbf
RH
3849 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3850 while (i-- > 0)
1ef1bf06 3851 {
52a11cbf 3852 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
225b9cb9 3853 rtx value;
52a11cbf
RH
3854
3855 if (type == NULL_TREE)
3856 type = integer_zero_node;
3857 else
3858 type = lookup_type_for_runtime (type);
3859
225b9cb9
RH
3860 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3861 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3862 assemble_integer (value, tt_format_size,
3863 tt_format_size * BITS_PER_UNIT, 1);
3864 else
3865 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
1ef1bf06 3866 }
52a11cbf
RH
3867
3868#ifdef HAVE_AS_LEB128
3869 if (have_tt_data)
3870 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3871#endif
3872
3873 /* ??? Decode and interpret the data for flag_debug_asm. */
3874 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3875 for (i = 0; i < n; ++i)
3876 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3877 (i ? NULL : "Exception specification table"));
3878
3879 function_section (current_function_decl);
3880
3881 if (USING_SJLJ_EXCEPTIONS)
3882 sjlj_funcdef_number += 1;
1ef1bf06 3883}