]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
c-common.h (STMT_EXPR_NO_SCOPE): New macro.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
3f2c5d1a 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
a8154559 3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
1322177d 6This file is part of GCC.
4956d07c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
4956d07c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
4956d07c
MS
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
4956d07c
MS
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4956d07c
MS
52#include "rtl.h"
53#include "tree.h"
54#include "flags.h"
4956d07c 55#include "function.h"
4956d07c 56#include "expr.h"
e78d8e51 57#include "libfuncs.h"
4956d07c 58#include "insn-config.h"
52a11cbf
RH
59#include "except.h"
60#include "integrate.h"
61#include "hard-reg-set.h"
62#include "basic-block.h"
4956d07c 63#include "output.h"
52a11cbf
RH
64#include "dwarf2asm.h"
65#include "dwarf2out.h"
2a1ee410 66#include "dwarf2.h"
10f0ad3d 67#include "toplev.h"
52a11cbf 68#include "hashtab.h"
2b12ffe0 69#include "intl.h"
87ff9c8e 70#include "ggc.h"
b1474bb7 71#include "tm_p.h"
07c9d2eb 72#include "target.h"
f1e639b1 73#include "langhooks.h"
52a11cbf
RH
74
75/* Provide defaults for stuff that may not be defined when using
76 sjlj exceptions. */
77#ifndef EH_RETURN_STACKADJ_RTX
78#define EH_RETURN_STACKADJ_RTX 0
79#endif
80#ifndef EH_RETURN_HANDLER_RTX
81#define EH_RETURN_HANDLER_RTX 0
82#endif
83#ifndef EH_RETURN_DATA_REGNO
84#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
85#endif
86
27a36778 87
52a11cbf
RH
88/* Nonzero means enable synchronous exceptions for non-call instructions. */
89int flag_non_call_exceptions;
27a36778 90
52a11cbf
RH
91/* Protect cleanup actions with must-not-throw regions, with a call
92 to the given failure handler. */
e6855a2d 93tree (*lang_protect_cleanup_actions) PARAMS ((void));
27a36778 94
52a11cbf
RH
95/* Return true if type A catches type B. */
96int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
27a36778 97
52a11cbf
RH
98/* Map a type to a runtime object to match type. */
99tree (*lang_eh_runtime_type) PARAMS ((tree));
4956d07c 100
6a58eee9
RH
101/* A hash table of label to region number. */
102
103struct ehl_map_entry
104{
105 rtx label;
106 struct eh_region *region;
107};
108
109static htab_t exception_handler_label_map;
4956d07c 110
52a11cbf 111static int call_site_base;
ae0ed63a 112static unsigned int sjlj_funcdef_number;
52a11cbf
RH
113static htab_t type_to_runtime_map;
114
115/* Describe the SjLj_Function_Context structure. */
116static tree sjlj_fc_type_node;
117static int sjlj_fc_call_site_ofs;
118static int sjlj_fc_data_ofs;
119static int sjlj_fc_personality_ofs;
120static int sjlj_fc_lsda_ofs;
121static int sjlj_fc_jbuf_ofs;
122\f
123/* Describes one exception region. */
124struct eh_region
125{
126 /* The immediately surrounding region. */
127 struct eh_region *outer;
956d6950 128
52a11cbf
RH
129 /* The list of immediately contained regions. */
130 struct eh_region *inner;
131 struct eh_region *next_peer;
956d6950 132
52a11cbf
RH
133 /* An identifier for this region. */
134 int region_number;
71038426 135
6a58eee9
RH
136 /* When a region is deleted, its parents inherit the REG_EH_REGION
137 numbers already assigned. */
138 bitmap aka;
139
52a11cbf
RH
140 /* Each region does exactly one thing. */
141 enum eh_region_type
142 {
572202a7
RK
143 ERT_UNKNOWN = 0,
144 ERT_CLEANUP,
52a11cbf
RH
145 ERT_TRY,
146 ERT_CATCH,
147 ERT_ALLOWED_EXCEPTIONS,
148 ERT_MUST_NOT_THROW,
149 ERT_THROW,
150 ERT_FIXUP
151 } type;
152
eaec9b3d 153 /* Holds the action to perform based on the preceding type. */
52a11cbf
RH
154 union {
155 /* A list of catch blocks, a surrounding try block,
156 and the label for continuing after a catch. */
157 struct {
158 struct eh_region *catch;
159 struct eh_region *last_catch;
160 struct eh_region *prev_try;
161 rtx continue_label;
162 } try;
163
6d41a92f
OH
164 /* The list through the catch handlers, the list of type objects
165 matched, and the list of associated filters. */
52a11cbf
RH
166 struct {
167 struct eh_region *next_catch;
168 struct eh_region *prev_catch;
6d41a92f
OH
169 tree type_list;
170 tree filter_list;
52a11cbf
RH
171 } catch;
172
173 /* A tree_list of allowed types. */
174 struct {
175 tree type_list;
176 int filter;
177 } allowed;
178
3f2c5d1a 179 /* The type given by a call to "throw foo();", or discovered
52a11cbf
RH
180 for a throw. */
181 struct {
182 tree type;
183 } throw;
184
185 /* Retain the cleanup expression even after expansion so that
186 we can match up fixup regions. */
187 struct {
188 tree exp;
189 } cleanup;
190
191 /* The real region (by expression and by pointer) that fixup code
192 should live in. */
193 struct {
194 tree cleanup_exp;
195 struct eh_region *real_region;
196 } fixup;
197 } u;
198
47c84870
JM
199 /* Entry point for this region's handler before landing pads are built. */
200 rtx label;
52a11cbf 201
47c84870 202 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
203 rtx landing_pad;
204
47c84870 205 /* Entry point for this region's handler from an inner region. */
52a11cbf 206 rtx post_landing_pad;
47c84870
JM
207
208 /* The RESX insn for handing off control to the next outermost handler,
209 if appropriate. */
210 rtx resume;
52a11cbf 211};
71038426 212
52a11cbf
RH
213/* Used to save exception status for each function. */
214struct eh_status
215{
216 /* The tree of all regions for this function. */
217 struct eh_region *region_tree;
e6cfb550 218
52a11cbf
RH
219 /* The same information as an indexable array. */
220 struct eh_region **region_array;
e6cfb550 221
52a11cbf
RH
222 /* The most recently open region. */
223 struct eh_region *cur_region;
e6cfb550 224
52a11cbf
RH
225 /* This is the region for which we are processing catch blocks. */
226 struct eh_region *try_region;
71038426 227
52a11cbf
RH
228 rtx filter;
229 rtx exc_ptr;
4956d07c 230
52a11cbf
RH
231 int built_landing_pads;
232 int last_region_number;
e6cfb550 233
52a11cbf
RH
234 varray_type ttype_data;
235 varray_type ehspec_data;
236 varray_type action_record_data;
6814a8a0 237
52a11cbf
RH
238 struct call_site_record
239 {
240 rtx landing_pad;
241 int action;
242 } *call_site_data;
243 int call_site_data_used;
244 int call_site_data_size;
245
246 rtx ehr_stackadj;
247 rtx ehr_handler;
248 rtx ehr_label;
249
250 rtx sjlj_fc;
251 rtx sjlj_exit_after;
252};
e6cfb550 253
52a11cbf
RH
254\f
255static void mark_eh_region PARAMS ((struct eh_region *));
6a58eee9
RH
256static int mark_ehl_map_entry PARAMS ((PTR *, PTR));
257static void mark_ehl_map PARAMS ((void *));
258
259static void free_region PARAMS ((struct eh_region *));
52a11cbf
RH
260
261static int t2r_eq PARAMS ((const PTR,
262 const PTR));
263static hashval_t t2r_hash PARAMS ((const PTR));
264static int t2r_mark_1 PARAMS ((PTR *, PTR));
265static void t2r_mark PARAMS ((PTR));
266static void add_type_for_runtime PARAMS ((tree));
267static tree lookup_type_for_runtime PARAMS ((tree));
268
269static struct eh_region *expand_eh_region_end PARAMS ((void));
270
86c99549 271static rtx get_exception_filter PARAMS ((struct function *));
47c84870 272
52a11cbf
RH
273static void collect_eh_region_array PARAMS ((void));
274static void resolve_fixup_regions PARAMS ((void));
275static void remove_fixup_regions PARAMS ((void));
655dd289 276static void remove_unreachable_regions PARAMS ((rtx));
52a11cbf
RH
277static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
278
279static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
280 struct inline_remap *));
281static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
282 struct eh_region **));
283static int ttypes_filter_eq PARAMS ((const PTR,
284 const PTR));
285static hashval_t ttypes_filter_hash PARAMS ((const PTR));
286static int ehspec_filter_eq PARAMS ((const PTR,
287 const PTR));
288static hashval_t ehspec_filter_hash PARAMS ((const PTR));
289static int add_ttypes_entry PARAMS ((htab_t, tree));
290static int add_ehspec_entry PARAMS ((htab_t, htab_t,
291 tree));
292static void assign_filter_values PARAMS ((void));
293static void build_post_landing_pads PARAMS ((void));
294static void connect_post_landing_pads PARAMS ((void));
295static void dw2_build_landing_pads PARAMS ((void));
296
297struct sjlj_lp_info;
298static bool sjlj_find_directly_reachable_regions
299 PARAMS ((struct sjlj_lp_info *));
300static void sjlj_assign_call_site_values
301 PARAMS ((rtx, struct sjlj_lp_info *));
302static void sjlj_mark_call_sites
303 PARAMS ((struct sjlj_lp_info *));
304static void sjlj_emit_function_enter PARAMS ((rtx));
305static void sjlj_emit_function_exit PARAMS ((void));
306static void sjlj_emit_dispatch_table
307 PARAMS ((rtx, struct sjlj_lp_info *));
308static void sjlj_build_landing_pads PARAMS ((void));
309
6a58eee9
RH
310static hashval_t ehl_hash PARAMS ((const PTR));
311static int ehl_eq PARAMS ((const PTR,
312 const PTR));
313static void ehl_free PARAMS ((PTR));
314static void add_ehl_entry PARAMS ((rtx,
315 struct eh_region *));
52a11cbf
RH
316static void remove_exception_handler_label PARAMS ((rtx));
317static void remove_eh_handler PARAMS ((struct eh_region *));
6a58eee9 318static int for_each_eh_label_1 PARAMS ((PTR *, PTR));
52a11cbf
RH
319
320struct reachable_info;
321
322/* The return value of reachable_next_level. */
323enum reachable_code
324{
325 /* The given exception is not processed by the given region. */
326 RNL_NOT_CAUGHT,
327 /* The given exception may need processing by the given region. */
328 RNL_MAYBE_CAUGHT,
329 /* The given exception is completely processed by the given region. */
330 RNL_CAUGHT,
331 /* The given exception is completely processed by the runtime. */
332 RNL_BLOCKED
333};
e6cfb550 334
52a11cbf
RH
335static int check_handled PARAMS ((tree, tree));
336static void add_reachable_handler
337 PARAMS ((struct reachable_info *, struct eh_region *,
338 struct eh_region *));
339static enum reachable_code reachable_next_level
340 PARAMS ((struct eh_region *, tree, struct reachable_info *));
341
342static int action_record_eq PARAMS ((const PTR,
343 const PTR));
344static hashval_t action_record_hash PARAMS ((const PTR));
345static int add_action_record PARAMS ((htab_t, int, int));
346static int collect_one_action_chain PARAMS ((htab_t,
347 struct eh_region *));
348static int add_call_site PARAMS ((rtx, int));
349
350static void push_uleb128 PARAMS ((varray_type *,
351 unsigned int));
352static void push_sleb128 PARAMS ((varray_type *, int));
52a11cbf
RH
353#ifndef HAVE_AS_LEB128
354static int dw2_size_of_call_site_table PARAMS ((void));
355static int sjlj_size_of_call_site_table PARAMS ((void));
356#endif
357static void dw2_output_call_site_table PARAMS ((void));
358static void sjlj_output_call_site_table PARAMS ((void));
e6cfb550 359
52a11cbf
RH
360\f
361/* Routine to see if exception handling is turned on.
362 DO_WARN is non-zero if we want to inform the user that exception
3f2c5d1a 363 handling is turned off.
4956d07c 364
52a11cbf
RH
365 This is used to ensure that -fexceptions has been specified if the
366 compiler tries to use any exception-specific functions. */
4956d07c 367
52a11cbf
RH
368int
369doing_eh (do_warn)
370 int do_warn;
371{
372 if (! flag_exceptions)
373 {
374 static int warned = 0;
375 if (! warned && do_warn)
376 {
377 error ("exception handling disabled, use -fexceptions to enable");
378 warned = 1;
379 }
380 return 0;
381 }
382 return 1;
4956d07c
MS
383}
384
52a11cbf
RH
385\f
386void
387init_eh ()
4956d07c 388{
6a58eee9 389 ggc_add_root (&exception_handler_label_map, 1, 1, mark_ehl_map);
4956d07c 390
52a11cbf
RH
391 if (! flag_exceptions)
392 return;
4956d07c 393
52a11cbf
RH
394 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
395 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
4956d07c 396
52a11cbf
RH
397 /* Create the SjLj_Function_Context structure. This should match
398 the definition in unwind-sjlj.c. */
399 if (USING_SJLJ_EXCEPTIONS)
400 {
401 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 402
f1e639b1 403 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
52a11cbf 404 ggc_add_tree_root (&sjlj_fc_type_node, 1);
9a0d1e1b 405
52a11cbf
RH
406 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
407 build_pointer_type (sjlj_fc_type_node));
408 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 409
52a11cbf
RH
410 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
411 integer_type_node);
412 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 413
52a11cbf 414 tmp = build_index_type (build_int_2 (4 - 1, 0));
b0c48229
NB
415 tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
416 tmp);
52a11cbf
RH
417 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
418 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 419
52a11cbf
RH
420 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
421 ptr_type_node);
422 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 423
52a11cbf
RH
424 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
425 ptr_type_node);
426 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 427
52a11cbf
RH
428#ifdef DONT_USE_BUILTIN_SETJMP
429#ifdef JMP_BUF_SIZE
430 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
431#else
432 /* Should be large enough for most systems, if it is not,
433 JMP_BUF_SIZE should be defined with the proper value. It will
434 also tend to be larger than necessary for most systems, a more
435 optimal port will define JMP_BUF_SIZE. */
436 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
437#endif
438#else
439 /* This is 2 for builtin_setjmp, plus whatever the target requires
440 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
441 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
442 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
443#endif
444 tmp = build_index_type (tmp);
445 tmp = build_array_type (ptr_type_node, tmp);
446 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
447#ifdef DONT_USE_BUILTIN_SETJMP
448 /* We don't know what the alignment requirements of the
449 runtime's jmp_buf has. Overestimate. */
450 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
451 DECL_USER_ALIGN (f_jbuf) = 1;
452#endif
453 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
454
455 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
456 TREE_CHAIN (f_prev) = f_cs;
457 TREE_CHAIN (f_cs) = f_data;
458 TREE_CHAIN (f_data) = f_per;
459 TREE_CHAIN (f_per) = f_lsda;
460 TREE_CHAIN (f_lsda) = f_jbuf;
461
462 layout_type (sjlj_fc_type_node);
463
464 /* Cache the interesting field offsets so that we have
465 easy access from rtl. */
466 sjlj_fc_call_site_ofs
467 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
468 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
469 sjlj_fc_data_ofs
470 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
471 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
472 sjlj_fc_personality_ofs
473 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
474 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
475 sjlj_fc_lsda_ofs
476 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
477 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
478 sjlj_fc_jbuf_ofs
479 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
480 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
481 }
4956d07c
MS
482}
483
52a11cbf
RH
484void
485init_eh_for_function ()
4956d07c 486{
52a11cbf 487 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
4956d07c
MS
488}
489
52a11cbf 490/* Mark EH for GC. */
4956d07c
MS
491
492static void
52a11cbf
RH
493mark_eh_region (region)
494 struct eh_region *region;
4956d07c 495{
52a11cbf
RH
496 if (! region)
497 return;
4956d07c 498
52a11cbf
RH
499 switch (region->type)
500 {
572202a7
RK
501 case ERT_UNKNOWN:
502 /* This can happen if a nested function is inside the body of a region
503 and we do a GC as part of processing it. */
504 break;
52a11cbf
RH
505 case ERT_CLEANUP:
506 ggc_mark_tree (region->u.cleanup.exp);
507 break;
508 case ERT_TRY:
509 ggc_mark_rtx (region->u.try.continue_label);
510 break;
511 case ERT_CATCH:
6d41a92f
OH
512 ggc_mark_tree (region->u.catch.type_list);
513 ggc_mark_tree (region->u.catch.filter_list);
52a11cbf
RH
514 break;
515 case ERT_ALLOWED_EXCEPTIONS:
516 ggc_mark_tree (region->u.allowed.type_list);
517 break;
518 case ERT_MUST_NOT_THROW:
519 break;
520 case ERT_THROW:
521 ggc_mark_tree (region->u.throw.type);
522 break;
523 case ERT_FIXUP:
524 ggc_mark_tree (region->u.fixup.cleanup_exp);
525 break;
526 default:
527 abort ();
528 }
4956d07c 529
52a11cbf 530 ggc_mark_rtx (region->label);
47c84870 531 ggc_mark_rtx (region->resume);
52a11cbf
RH
532 ggc_mark_rtx (region->landing_pad);
533 ggc_mark_rtx (region->post_landing_pad);
4956d07c
MS
534}
535
6a58eee9
RH
536static int
537mark_ehl_map_entry (pentry, data)
538 PTR *pentry;
539 PTR data ATTRIBUTE_UNUSED;
540{
541 struct ehl_map_entry *entry = *(struct ehl_map_entry **) pentry;
542 ggc_mark_rtx (entry->label);
543 return 1;
544}
545
546static void
547mark_ehl_map (pp)
548 void *pp;
549{
550 htab_t map = *(htab_t *) pp;
551 if (map)
552 htab_traverse (map, mark_ehl_map_entry, NULL);
553}
554
52a11cbf
RH
555void
556mark_eh_status (eh)
557 struct eh_status *eh;
4956d07c 558{
52a11cbf
RH
559 int i;
560
561 if (eh == 0)
562 return;
563
564 /* If we've called collect_eh_region_array, use it. Otherwise walk
565 the tree non-recursively. */
566 if (eh->region_array)
567 {
568 for (i = eh->last_region_number; i > 0; --i)
569 {
570 struct eh_region *r = eh->region_array[i];
571 if (r && r->region_number == i)
572 mark_eh_region (r);
573 }
574 }
575 else if (eh->region_tree)
576 {
577 struct eh_region *r = eh->region_tree;
578 while (1)
579 {
580 mark_eh_region (r);
581 if (r->inner)
582 r = r->inner;
583 else if (r->next_peer)
584 r = r->next_peer;
585 else
586 {
587 do {
588 r = r->outer;
589 if (r == NULL)
590 goto tree_done;
591 } while (r->next_peer == NULL);
592 r = r->next_peer;
593 }
594 }
595 tree_done:;
596 }
4956d07c 597
52a11cbf
RH
598 ggc_mark_rtx (eh->filter);
599 ggc_mark_rtx (eh->exc_ptr);
600 ggc_mark_tree_varray (eh->ttype_data);
4956d07c 601
52a11cbf
RH
602 if (eh->call_site_data)
603 {
604 for (i = eh->call_site_data_used - 1; i >= 0; --i)
605 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
606 }
4956d07c 607
52a11cbf
RH
608 ggc_mark_rtx (eh->ehr_stackadj);
609 ggc_mark_rtx (eh->ehr_handler);
610 ggc_mark_rtx (eh->ehr_label);
4956d07c 611
52a11cbf
RH
612 ggc_mark_rtx (eh->sjlj_fc);
613 ggc_mark_rtx (eh->sjlj_exit_after);
4956d07c 614}
9a0d1e1b 615
6a58eee9
RH
616static inline void
617free_region (r)
618 struct eh_region *r;
619{
620 /* Note that the aka bitmap is freed by regset_release_memory. But if
621 we ever replace with a non-obstack implementation, this would be
622 the place to do it. */
623 free (r);
624}
625
52a11cbf
RH
626void
627free_eh_status (f)
628 struct function *f;
9a0d1e1b 629{
52a11cbf 630 struct eh_status *eh = f->eh;
250d07b6 631
52a11cbf 632 if (eh->region_array)
250d07b6 633 {
52a11cbf
RH
634 int i;
635 for (i = eh->last_region_number; i > 0; --i)
636 {
637 struct eh_region *r = eh->region_array[i];
638 /* Mind we don't free a region struct more than once. */
639 if (r && r->region_number == i)
6a58eee9 640 free_region (r);
52a11cbf
RH
641 }
642 free (eh->region_array);
250d07b6 643 }
52a11cbf 644 else if (eh->region_tree)
250d07b6 645 {
52a11cbf
RH
646 struct eh_region *next, *r = eh->region_tree;
647 while (1)
648 {
649 if (r->inner)
650 r = r->inner;
651 else if (r->next_peer)
652 {
653 next = r->next_peer;
6a58eee9 654 free_region (r);
52a11cbf
RH
655 r = next;
656 }
657 else
658 {
659 do {
660 next = r->outer;
6a58eee9 661 free_region (r);
52a11cbf
RH
662 r = next;
663 if (r == NULL)
664 goto tree_done;
665 } while (r->next_peer == NULL);
666 next = r->next_peer;
6a58eee9 667 free_region (r);
52a11cbf
RH
668 r = next;
669 }
670 }
671 tree_done:;
250d07b6
RH
672 }
673
52a11cbf
RH
674 VARRAY_FREE (eh->ttype_data);
675 VARRAY_FREE (eh->ehspec_data);
676 VARRAY_FREE (eh->action_record_data);
677 if (eh->call_site_data)
678 free (eh->call_site_data);
679
680 free (eh);
681 f->eh = NULL;
6a58eee9
RH
682
683 if (exception_handler_label_map)
684 {
685 htab_delete (exception_handler_label_map);
686 exception_handler_label_map = NULL;
687 }
9a0d1e1b
AM
688}
689
52a11cbf
RH
690\f
691/* Start an exception handling region. All instructions emitted
692 after this point are considered to be part of the region until
693 expand_eh_region_end is invoked. */
9a0d1e1b 694
52a11cbf
RH
695void
696expand_eh_region_start ()
9a0d1e1b 697{
52a11cbf
RH
698 struct eh_region *new_region;
699 struct eh_region *cur_region;
700 rtx note;
9a0d1e1b 701
52a11cbf
RH
702 if (! doing_eh (0))
703 return;
9a0d1e1b 704
52a11cbf
RH
705 /* Insert a new blank region as a leaf in the tree. */
706 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
707 cur_region = cfun->eh->cur_region;
708 new_region->outer = cur_region;
709 if (cur_region)
9a0d1e1b 710 {
52a11cbf
RH
711 new_region->next_peer = cur_region->inner;
712 cur_region->inner = new_region;
713 }
e6cfb550 714 else
9a0d1e1b 715 {
52a11cbf
RH
716 new_region->next_peer = cfun->eh->region_tree;
717 cfun->eh->region_tree = new_region;
9a0d1e1b 718 }
52a11cbf
RH
719 cfun->eh->cur_region = new_region;
720
721 /* Create a note marking the start of this region. */
722 new_region->region_number = ++cfun->eh->last_region_number;
6496a589 723 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
52a11cbf 724 NOTE_EH_HANDLER (note) = new_region->region_number;
9a0d1e1b
AM
725}
726
52a11cbf 727/* Common code to end a region. Returns the region just ended. */
9f8e6243 728
52a11cbf
RH
729static struct eh_region *
730expand_eh_region_end ()
9f8e6243 731{
52a11cbf
RH
732 struct eh_region *cur_region = cfun->eh->cur_region;
733 rtx note;
734
a1f300c0 735 /* Create a note marking the end of this region. */
6496a589 736 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
52a11cbf
RH
737 NOTE_EH_HANDLER (note) = cur_region->region_number;
738
739 /* Pop. */
740 cfun->eh->cur_region = cur_region->outer;
741
52a11cbf 742 return cur_region;
9f8e6243
AM
743}
744
52a11cbf
RH
745/* End an exception handling region for a cleanup. HANDLER is an
746 expression to expand for the cleanup. */
9c606f69 747
52a11cbf
RH
748void
749expand_eh_region_end_cleanup (handler)
750 tree handler;
9c606f69 751{
52a11cbf 752 struct eh_region *region;
e6855a2d 753 tree protect_cleanup_actions;
52a11cbf 754 rtx around_label;
47c84870 755 rtx data_save[2];
52a11cbf
RH
756
757 if (! doing_eh (0))
758 return;
9c606f69 759
52a11cbf
RH
760 region = expand_eh_region_end ();
761 region->type = ERT_CLEANUP;
762 region->label = gen_label_rtx ();
763 region->u.cleanup.exp = handler;
9c606f69 764
52a11cbf
RH
765 around_label = gen_label_rtx ();
766 emit_jump (around_label);
9c606f69 767
52a11cbf 768 emit_label (region->label);
9c606f69 769
e6855a2d 770 /* Give the language a chance to specify an action to be taken if an
a1f300c0 771 exception is thrown that would propagate out of the HANDLER. */
3f2c5d1a
RS
772 protect_cleanup_actions
773 = (lang_protect_cleanup_actions
774 ? (*lang_protect_cleanup_actions) ()
e6855a2d
MM
775 : NULL_TREE);
776
52a11cbf
RH
777 if (protect_cleanup_actions)
778 expand_eh_region_start ();
9c606f69 779
47c84870
JM
780 /* In case this cleanup involves an inline destructor with a try block in
781 it, we need to save the EH return data registers around it. */
782 data_save[0] = gen_reg_rtx (Pmode);
86c99549 783 emit_move_insn (data_save[0], get_exception_pointer (cfun));
16842c15 784 data_save[1] = gen_reg_rtx (word_mode);
86c99549 785 emit_move_insn (data_save[1], get_exception_filter (cfun));
47c84870 786
52a11cbf 787 expand_expr (handler, const0_rtx, VOIDmode, 0);
9c606f69 788
47c84870
JM
789 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
790 emit_move_insn (cfun->eh->filter, data_save[1]);
791
52a11cbf
RH
792 if (protect_cleanup_actions)
793 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
a9f0664a 794
c10f3adf
AH
795 /* We need any stack adjustment complete before the around_label. */
796 do_pending_stack_adjust ();
797
52a11cbf
RH
798 /* We delay the generation of the _Unwind_Resume until we generate
799 landing pads. We emit a marker here so as to get good control
800 flow data in the meantime. */
47c84870
JM
801 region->resume
802 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
52a11cbf
RH
803 emit_barrier ();
804
52a11cbf 805 emit_label (around_label);
9c606f69
AM
806}
807
52a11cbf
RH
808/* End an exception handling region for a try block, and prepares
809 for subsequent calls to expand_start_catch. */
9a0d1e1b 810
52a11cbf
RH
811void
812expand_start_all_catch ()
9a0d1e1b 813{
52a11cbf 814 struct eh_region *region;
9a0d1e1b 815
52a11cbf
RH
816 if (! doing_eh (1))
817 return;
9a0d1e1b 818
52a11cbf
RH
819 region = expand_eh_region_end ();
820 region->type = ERT_TRY;
821 region->u.try.prev_try = cfun->eh->try_region;
822 region->u.try.continue_label = gen_label_rtx ();
9a0d1e1b 823
52a11cbf
RH
824 cfun->eh->try_region = region;
825
826 emit_jump (region->u.try.continue_label);
827}
9a0d1e1b 828
6d41a92f
OH
829/* Begin a catch clause. TYPE is the type caught, a list of such types, or
830 null if this is a catch-all clause. Providing a type list enables to
831 associate the catch region with potentially several exception types, which
23d1aac4 832 is useful e.g. for Ada. */
9a0d1e1b 833
52a11cbf 834void
6d41a92f
OH
835expand_start_catch (type_or_list)
836 tree type_or_list;
9a0d1e1b 837{
52a11cbf 838 struct eh_region *t, *c, *l;
6d41a92f 839 tree type_list;
52a11cbf
RH
840
841 if (! doing_eh (0))
842 return;
843
6d41a92f
OH
844 type_list = type_or_list;
845
846 if (type_or_list)
847 {
848 /* Ensure to always end up with a type list to normalize further
849 processing, then register each type against the runtime types
850 map. */
851 tree type_node;
852
853 if (TREE_CODE (type_or_list) != TREE_LIST)
854 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
855
856 type_node = type_list;
857 for (; type_node; type_node = TREE_CHAIN (type_node))
858 add_type_for_runtime (TREE_VALUE (type_node));
859 }
860
52a11cbf
RH
861 expand_eh_region_start ();
862
863 t = cfun->eh->try_region;
864 c = cfun->eh->cur_region;
865 c->type = ERT_CATCH;
6d41a92f 866 c->u.catch.type_list = type_list;
52a11cbf
RH
867 c->label = gen_label_rtx ();
868
869 l = t->u.try.last_catch;
870 c->u.catch.prev_catch = l;
871 if (l)
872 l->u.catch.next_catch = c;
873 else
874 t->u.try.catch = c;
875 t->u.try.last_catch = c;
9a0d1e1b 876
52a11cbf 877 emit_label (c->label);
9a0d1e1b
AM
878}
879
52a11cbf 880/* End a catch clause. Control will resume after the try/catch block. */
9a0d1e1b 881
52a11cbf
RH
882void
883expand_end_catch ()
9a0d1e1b 884{
52a11cbf
RH
885 struct eh_region *try_region, *catch_region;
886
887 if (! doing_eh (0))
888 return;
889
890 catch_region = expand_eh_region_end ();
891 try_region = cfun->eh->try_region;
892
893 emit_jump (try_region->u.try.continue_label);
9a0d1e1b
AM
894}
895
52a11cbf 896/* End a sequence of catch handlers for a try block. */
9a0d1e1b 897
52a11cbf
RH
898void
899expand_end_all_catch ()
9a0d1e1b 900{
52a11cbf
RH
901 struct eh_region *try_region;
902
903 if (! doing_eh (0))
904 return;
905
906 try_region = cfun->eh->try_region;
907 cfun->eh->try_region = try_region->u.try.prev_try;
908
909 emit_label (try_region->u.try.continue_label);
9a0d1e1b
AM
910}
911
52a11cbf
RH
912/* End an exception region for an exception type filter. ALLOWED is a
913 TREE_LIST of types to be matched by the runtime. FAILURE is an
ff7cc307 914 expression to invoke if a mismatch occurs.
b4e49397
JM
915
916 ??? We could use these semantics for calls to rethrow, too; if we can
917 see the surrounding catch clause, we know that the exception we're
918 rethrowing satisfies the "filter" of the catch type. */
9a0d1e1b 919
52a11cbf
RH
920void
921expand_eh_region_end_allowed (allowed, failure)
922 tree allowed, failure;
9a0d1e1b 923{
52a11cbf
RH
924 struct eh_region *region;
925 rtx around_label;
9a0d1e1b 926
52a11cbf
RH
927 if (! doing_eh (0))
928 return;
e6cfb550 929
52a11cbf
RH
930 region = expand_eh_region_end ();
931 region->type = ERT_ALLOWED_EXCEPTIONS;
932 region->u.allowed.type_list = allowed;
933 region->label = gen_label_rtx ();
9a0d1e1b 934
52a11cbf
RH
935 for (; allowed ; allowed = TREE_CHAIN (allowed))
936 add_type_for_runtime (TREE_VALUE (allowed));
9a0d1e1b 937
52a11cbf
RH
938 /* We must emit the call to FAILURE here, so that if this function
939 throws a different exception, that it will be processed by the
940 correct region. */
9a0d1e1b 941
52a11cbf
RH
942 around_label = gen_label_rtx ();
943 emit_jump (around_label);
944
945 emit_label (region->label);
946 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
b912bca0
MM
947 /* We must adjust the stack before we reach the AROUND_LABEL because
948 the call to FAILURE does not occur on all paths to the
949 AROUND_LABEL. */
950 do_pending_stack_adjust ();
9a0d1e1b 951
52a11cbf 952 emit_label (around_label);
9a0d1e1b
AM
953}
954
52a11cbf
RH
955/* End an exception region for a must-not-throw filter. FAILURE is an
956 expression invoke if an uncaught exception propagates this far.
e6cfb550 957
52a11cbf
RH
958 This is conceptually identical to expand_eh_region_end_allowed with
959 an empty allowed list (if you passed "std::terminate" instead of
960 "__cxa_call_unexpected"), but they are represented differently in
961 the C++ LSDA. */
6814a8a0 962
52a11cbf
RH
963void
964expand_eh_region_end_must_not_throw (failure)
965 tree failure;
e6cfb550 966{
52a11cbf
RH
967 struct eh_region *region;
968 rtx around_label;
e6cfb550 969
52a11cbf
RH
970 if (! doing_eh (0))
971 return;
6814a8a0 972
52a11cbf
RH
973 region = expand_eh_region_end ();
974 region->type = ERT_MUST_NOT_THROW;
975 region->label = gen_label_rtx ();
e6cfb550 976
52a11cbf
RH
977 /* We must emit the call to FAILURE here, so that if this function
978 throws a different exception, that it will be processed by the
979 correct region. */
6814a8a0 980
52a11cbf
RH
981 around_label = gen_label_rtx ();
982 emit_jump (around_label);
6814a8a0 983
52a11cbf
RH
984 emit_label (region->label);
985 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
6814a8a0 986
52a11cbf 987 emit_label (around_label);
e6cfb550
AM
988}
989
52a11cbf
RH
990/* End an exception region for a throw. No handling goes on here,
991 but it's the easiest way for the front-end to indicate what type
992 is being thrown. */
6814a8a0 993
52a11cbf
RH
994void
995expand_eh_region_end_throw (type)
996 tree type;
e6cfb550 997{
52a11cbf
RH
998 struct eh_region *region;
999
1000 if (! doing_eh (0))
1001 return;
1002
1003 region = expand_eh_region_end ();
1004 region->type = ERT_THROW;
1005 region->u.throw.type = type;
e6cfb550
AM
1006}
1007
52a11cbf
RH
1008/* End a fixup region. Within this region the cleanups for the immediately
1009 enclosing region are _not_ run. This is used for goto cleanup to avoid
1010 destroying an object twice.
12670d88 1011
52a11cbf
RH
1012 This would be an extraordinarily simple prospect, were it not for the
1013 fact that we don't actually know what the immediately enclosing region
1014 is. This surprising fact is because expand_cleanups is currently
1015 generating a sequence that it will insert somewhere else. We collect
1016 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
4956d07c 1017
52a11cbf
RH
1018void
1019expand_eh_region_end_fixup (handler)
1020 tree handler;
4956d07c 1021{
52a11cbf
RH
1022 struct eh_region *fixup;
1023
1024 if (! doing_eh (0))
1025 return;
1026
1027 fixup = expand_eh_region_end ();
1028 fixup->type = ERT_FIXUP;
1029 fixup->u.fixup.cleanup_exp = handler;
4956d07c
MS
1030}
1031
47c84870 1032/* Return an rtl expression for a pointer to the exception object
52a11cbf 1033 within a handler. */
4956d07c
MS
1034
1035rtx
86c99549
RH
1036get_exception_pointer (fun)
1037 struct function *fun;
4956d07c 1038{
86c99549
RH
1039 rtx exc_ptr = fun->eh->exc_ptr;
1040 if (fun == cfun && ! exc_ptr)
52a11cbf
RH
1041 {
1042 exc_ptr = gen_reg_rtx (Pmode);
86c99549 1043 fun->eh->exc_ptr = exc_ptr;
52a11cbf
RH
1044 }
1045 return exc_ptr;
1046}
4956d07c 1047
47c84870
JM
1048/* Return an rtl expression for the exception dispatch filter
1049 within a handler. */
1050
1051static rtx
86c99549
RH
1052get_exception_filter (fun)
1053 struct function *fun;
47c84870 1054{
86c99549
RH
1055 rtx filter = fun->eh->filter;
1056 if (fun == cfun && ! filter)
47c84870 1057 {
041c9d5a 1058 filter = gen_reg_rtx (word_mode);
86c99549 1059 fun->eh->filter = filter;
47c84870
JM
1060 }
1061 return filter;
1062}
52a11cbf
RH
1063\f
1064/* This section is for the exception handling specific optimization pass. */
154bba13 1065
52a11cbf
RH
1066/* Random access the exception region tree. It's just as simple to
1067 collect the regions this way as in expand_eh_region_start, but
1068 without having to realloc memory. */
154bba13 1069
52a11cbf
RH
1070static void
1071collect_eh_region_array ()
154bba13 1072{
52a11cbf 1073 struct eh_region **array, *i;
154bba13 1074
52a11cbf
RH
1075 i = cfun->eh->region_tree;
1076 if (! i)
1077 return;
154bba13 1078
52a11cbf
RH
1079 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1080 cfun->eh->region_array = array;
154bba13 1081
52a11cbf
RH
1082 while (1)
1083 {
1084 array[i->region_number] = i;
1085
1086 /* If there are sub-regions, process them. */
1087 if (i->inner)
1088 i = i->inner;
1089 /* If there are peers, process them. */
1090 else if (i->next_peer)
1091 i = i->next_peer;
1092 /* Otherwise, step back up the tree to the next peer. */
1093 else
1094 {
1095 do {
1096 i = i->outer;
1097 if (i == NULL)
1098 return;
1099 } while (i->next_peer == NULL);
1100 i = i->next_peer;
1101 }
1102 }
27a36778
MS
1103}
1104
52a11cbf
RH
1105static void
1106resolve_fixup_regions ()
27a36778 1107{
52a11cbf 1108 int i, j, n = cfun->eh->last_region_number;
27a36778 1109
52a11cbf
RH
1110 for (i = 1; i <= n; ++i)
1111 {
1112 struct eh_region *fixup = cfun->eh->region_array[i];
ea446801 1113 struct eh_region *cleanup = 0;
27a36778 1114
52a11cbf
RH
1115 if (! fixup || fixup->type != ERT_FIXUP)
1116 continue;
27a36778 1117
52a11cbf
RH
1118 for (j = 1; j <= n; ++j)
1119 {
1120 cleanup = cfun->eh->region_array[j];
1121 if (cleanup->type == ERT_CLEANUP
1122 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1123 break;
1124 }
1125 if (j > n)
1126 abort ();
27a36778 1127
52a11cbf
RH
1128 fixup->u.fixup.real_region = cleanup->outer;
1129 }
27a36778 1130}
27a36778 1131
52a11cbf
RH
1132/* Now that we've discovered what region actually encloses a fixup,
1133 we can shuffle pointers and remove them from the tree. */
27a36778
MS
1134
1135static void
52a11cbf 1136remove_fixup_regions ()
27a36778 1137{
52a11cbf 1138 int i;
45053eaf
RH
1139 rtx insn, note;
1140 struct eh_region *fixup;
27a36778 1141
45053eaf
RH
1142 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1143 for instructions referencing fixup regions. This is only
1144 strictly necessary for fixup regions with no parent, but
1145 doesn't hurt to do it for all regions. */
1146 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1147 if (INSN_P (insn)
1148 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1149 && INTVAL (XEXP (note, 0)) > 0
1150 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1151 && fixup->type == ERT_FIXUP)
1152 {
1153 if (fixup->u.fixup.real_region)
2b1e2382 1154 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
45053eaf
RH
1155 else
1156 remove_note (insn, note);
1157 }
1158
1159 /* Remove the fixup regions from the tree. */
52a11cbf
RH
1160 for (i = cfun->eh->last_region_number; i > 0; --i)
1161 {
45053eaf 1162 fixup = cfun->eh->region_array[i];
52a11cbf
RH
1163 if (! fixup)
1164 continue;
27a36778 1165
52a11cbf
RH
1166 /* Allow GC to maybe free some memory. */
1167 if (fixup->type == ERT_CLEANUP)
1168 fixup->u.cleanup.exp = NULL_TREE;
27a36778 1169
52a11cbf
RH
1170 if (fixup->type != ERT_FIXUP)
1171 continue;
27a36778 1172
52a11cbf
RH
1173 if (fixup->inner)
1174 {
1175 struct eh_region *parent, *p, **pp;
27a36778 1176
52a11cbf 1177 parent = fixup->u.fixup.real_region;
27a36778 1178
52a11cbf
RH
1179 /* Fix up the children's parent pointers; find the end of
1180 the list. */
1181 for (p = fixup->inner; ; p = p->next_peer)
1182 {
1183 p->outer = parent;
1184 if (! p->next_peer)
1185 break;
1186 }
27a36778 1187
52a11cbf
RH
1188 /* In the tree of cleanups, only outer-inner ordering matters.
1189 So link the children back in anywhere at the correct level. */
1190 if (parent)
1191 pp = &parent->inner;
1192 else
1193 pp = &cfun->eh->region_tree;
1194 p->next_peer = *pp;
1195 *pp = fixup->inner;
1196 fixup->inner = NULL;
1197 }
27a36778 1198
52a11cbf
RH
1199 remove_eh_handler (fixup);
1200 }
27a36778
MS
1201}
1202
655dd289
JJ
1203/* Remove all regions whose labels are not reachable from insns. */
1204
1205static void
1206remove_unreachable_regions (insns)
1207 rtx insns;
1208{
1209 int i, *uid_region_num;
1210 bool *reachable;
1211 struct eh_region *r;
1212 rtx insn;
1213
1214 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1215 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1216
1217 for (i = cfun->eh->last_region_number; i > 0; --i)
1218 {
1219 r = cfun->eh->region_array[i];
1220 if (!r || r->region_number != i)
1221 continue;
1222
1223 if (r->resume)
1224 {
1225 if (uid_region_num[INSN_UID (r->resume)])
1226 abort ();
1227 uid_region_num[INSN_UID (r->resume)] = i;
1228 }
1229 if (r->label)
1230 {
1231 if (uid_region_num[INSN_UID (r->label)])
1232 abort ();
1233 uid_region_num[INSN_UID (r->label)] = i;
1234 }
1235 if (r->type == ERT_TRY && r->u.try.continue_label)
1236 {
1237 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1238 abort ();
1239 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1240 }
1241 }
1242
1243 for (insn = insns; insn; insn = NEXT_INSN (insn))
1244 reachable[uid_region_num[INSN_UID (insn)]] = true;
1245
1246 for (i = cfun->eh->last_region_number; i > 0; --i)
1247 {
1248 r = cfun->eh->region_array[i];
1249 if (r && r->region_number == i && !reachable[i])
1250 {
1251 /* Don't remove ERT_THROW regions if their outer region
1252 is reachable. */
1253 if (r->type == ERT_THROW
1254 && r->outer
1255 && reachable[r->outer->region_number])
1256 continue;
1257
1258 remove_eh_handler (r);
1259 }
1260 }
1261
1262 free (reachable);
1263 free (uid_region_num);
1264}
1265
52a11cbf
RH
1266/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1267 can_throw instruction in the region. */
27a36778
MS
1268
1269static void
52a11cbf
RH
1270convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1271 rtx *pinsns;
1272 int *orig_sp;
1273 int cur;
27a36778 1274{
52a11cbf
RH
1275 int *sp = orig_sp;
1276 rtx insn, next;
27a36778 1277
52a11cbf
RH
1278 for (insn = *pinsns; insn ; insn = next)
1279 {
1280 next = NEXT_INSN (insn);
1281 if (GET_CODE (insn) == NOTE)
1282 {
1283 int kind = NOTE_LINE_NUMBER (insn);
1284 if (kind == NOTE_INSN_EH_REGION_BEG
1285 || kind == NOTE_INSN_EH_REGION_END)
1286 {
1287 if (kind == NOTE_INSN_EH_REGION_BEG)
1288 {
1289 struct eh_region *r;
27a36778 1290
52a11cbf
RH
1291 *sp++ = cur;
1292 cur = NOTE_EH_HANDLER (insn);
27a36778 1293
52a11cbf
RH
1294 r = cfun->eh->region_array[cur];
1295 if (r->type == ERT_FIXUP)
1296 {
1297 r = r->u.fixup.real_region;
1298 cur = r ? r->region_number : 0;
1299 }
1300 else if (r->type == ERT_CATCH)
1301 {
1302 r = r->outer;
1303 cur = r ? r->region_number : 0;
1304 }
1305 }
1306 else
1307 cur = *--sp;
1308
1309 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1310 requires extra care to adjust sequence start. */
1311 if (insn == *pinsns)
1312 *pinsns = next;
1313 remove_insn (insn);
1314 continue;
1315 }
1316 }
1317 else if (INSN_P (insn))
1318 {
1319 if (cur > 0
1320 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1321 /* Calls can always potentially throw exceptions, unless
1322 they have a REG_EH_REGION note with a value of 0 or less.
1323 Which should be the only possible kind so far. */
1324 && (GET_CODE (insn) == CALL_INSN
1325 /* If we wanted exceptions for non-call insns, then
1326 any may_trap_p instruction could throw. */
1327 || (flag_non_call_exceptions
d7730f7a
RH
1328 && GET_CODE (PATTERN (insn)) != CLOBBER
1329 && GET_CODE (PATTERN (insn)) != USE
52a11cbf
RH
1330 && may_trap_p (PATTERN (insn)))))
1331 {
1332 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1333 REG_NOTES (insn));
1334 }
27a36778 1335
52a11cbf
RH
1336 if (GET_CODE (insn) == CALL_INSN
1337 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1338 {
1339 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1340 sp, cur);
1341 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1342 sp, cur);
1343 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1344 sp, cur);
1345 }
1346 }
1347 }
27a36778 1348
52a11cbf
RH
1349 if (sp != orig_sp)
1350 abort ();
1351}
27a36778 1352
52a11cbf
RH
1353void
1354convert_from_eh_region_ranges ()
1355{
1356 int *stack;
1357 rtx insns;
27a36778 1358
52a11cbf
RH
1359 collect_eh_region_array ();
1360 resolve_fixup_regions ();
27a36778 1361
52a11cbf
RH
1362 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1363 insns = get_insns ();
1364 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1365 free (stack);
27a36778 1366
52a11cbf 1367 remove_fixup_regions ();
655dd289 1368 remove_unreachable_regions (insns);
27a36778
MS
1369}
1370
6a58eee9
RH
1371static void
1372add_ehl_entry (label, region)
1373 rtx label;
1374 struct eh_region *region;
1375{
1376 struct ehl_map_entry **slot, *entry;
1377
1378 LABEL_PRESERVE_P (label) = 1;
1379
1380 entry = (struct ehl_map_entry *) xmalloc (sizeof (*entry));
1381 entry->label = label;
1382 entry->region = region;
1383
1384 slot = (struct ehl_map_entry **)
1385 htab_find_slot (exception_handler_label_map, entry, INSERT);
6f3d0447
RH
1386
1387 /* Before landing pad creation, each exception handler has its own
1388 label. After landing pad creation, the exception handlers may
1389 share landing pads. This is ok, since maybe_remove_eh_handler
1390 only requires the 1-1 mapping before landing pad creation. */
1391 if (*slot && !cfun->eh->built_landing_pads)
6a58eee9 1392 abort ();
6f3d0447 1393
6a58eee9
RH
1394 *slot = entry;
1395}
1396
1397static void
1398ehl_free (pentry)
1399 PTR pentry;
1400{
1401 struct ehl_map_entry *entry = (struct ehl_map_entry *)pentry;
1402 LABEL_PRESERVE_P (entry->label) = 0;
1403 free (entry);
1404}
1405
52a11cbf
RH
1406void
1407find_exception_handler_labels ()
27a36778 1408{
52a11cbf 1409 int i;
27a36778 1410
6a58eee9
RH
1411 if (exception_handler_label_map)
1412 htab_empty (exception_handler_label_map);
1413 else
1414 {
1415 /* ??? The expansion factor here (3/2) must be greater than the htab
1416 occupancy factor (4/3) to avoid unnecessary resizing. */
1417 exception_handler_label_map
1418 = htab_create (cfun->eh->last_region_number * 3 / 2,
1419 ehl_hash, ehl_eq, ehl_free);
1420 }
27a36778 1421
52a11cbf
RH
1422 if (cfun->eh->region_tree == NULL)
1423 return;
27a36778 1424
52a11cbf
RH
1425 for (i = cfun->eh->last_region_number; i > 0; --i)
1426 {
1427 struct eh_region *region = cfun->eh->region_array[i];
1428 rtx lab;
27a36778 1429
655dd289 1430 if (! region || region->region_number != i)
52a11cbf
RH
1431 continue;
1432 if (cfun->eh->built_landing_pads)
1433 lab = region->landing_pad;
1434 else
1435 lab = region->label;
27a36778 1436
52a11cbf 1437 if (lab)
6a58eee9 1438 add_ehl_entry (lab, region);
27a36778
MS
1439 }
1440
52a11cbf
RH
1441 /* For sjlj exceptions, need the return label to remain live until
1442 after landing pad generation. */
1443 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
6a58eee9 1444 add_ehl_entry (return_label, NULL);
27a36778
MS
1445}
1446
93f82d60
RH
1447bool
1448current_function_has_exception_handlers ()
1449{
1450 int i;
1451
1452 for (i = cfun->eh->last_region_number; i > 0; --i)
1453 {
1454 struct eh_region *region = cfun->eh->region_array[i];
1455
1456 if (! region || region->region_number != i)
1457 continue;
1458 if (region->type != ERT_THROW)
1459 return true;
1460 }
1461
1462 return false;
1463}
52a11cbf
RH
1464\f
1465static struct eh_region *
1466duplicate_eh_region_1 (o, map)
1467 struct eh_region *o;
1468 struct inline_remap *map;
4956d07c 1469{
52a11cbf
RH
1470 struct eh_region *n
1471 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
4956d07c 1472
52a11cbf
RH
1473 n->region_number = o->region_number + cfun->eh->last_region_number;
1474 n->type = o->type;
4956d07c 1475
52a11cbf
RH
1476 switch (n->type)
1477 {
1478 case ERT_CLEANUP:
1479 case ERT_MUST_NOT_THROW:
1480 break;
27a36778 1481
52a11cbf
RH
1482 case ERT_TRY:
1483 if (o->u.try.continue_label)
1484 n->u.try.continue_label
1485 = get_label_from_map (map,
1486 CODE_LABEL_NUMBER (o->u.try.continue_label));
1487 break;
27a36778 1488
52a11cbf 1489 case ERT_CATCH:
6d41a92f 1490 n->u.catch.type_list = o->u.catch.type_list;
52a11cbf 1491 break;
27a36778 1492
52a11cbf
RH
1493 case ERT_ALLOWED_EXCEPTIONS:
1494 n->u.allowed.type_list = o->u.allowed.type_list;
1495 break;
1496
1497 case ERT_THROW:
1498 n->u.throw.type = o->u.throw.type;
3f2c5d1a 1499
52a11cbf
RH
1500 default:
1501 abort ();
1502 }
1503
1504 if (o->label)
1505 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
47c84870 1506 if (o->resume)
e7b9b18e 1507 {
47c84870
JM
1508 n->resume = map->insn_map[INSN_UID (o->resume)];
1509 if (n->resume == NULL)
52a11cbf 1510 abort ();
27a36778 1511 }
4956d07c 1512
52a11cbf 1513 return n;
4956d07c
MS
1514}
1515
52a11cbf
RH
1516static void
1517duplicate_eh_region_2 (o, n_array)
1518 struct eh_region *o;
1519 struct eh_region **n_array;
4c581243 1520{
52a11cbf 1521 struct eh_region *n = n_array[o->region_number];
4c581243 1522
52a11cbf
RH
1523 switch (n->type)
1524 {
1525 case ERT_TRY:
1526 n->u.try.catch = n_array[o->u.try.catch->region_number];
1527 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1528 break;
12670d88 1529
52a11cbf
RH
1530 case ERT_CATCH:
1531 if (o->u.catch.next_catch)
1532 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1533 if (o->u.catch.prev_catch)
1534 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1535 break;
12670d88 1536
52a11cbf
RH
1537 default:
1538 break;
1539 }
4956d07c 1540
52a11cbf
RH
1541 if (o->outer)
1542 n->outer = n_array[o->outer->region_number];
1543 if (o->inner)
1544 n->inner = n_array[o->inner->region_number];
1545 if (o->next_peer)
1546 n->next_peer = n_array[o->next_peer->region_number];
3f2c5d1a 1547}
52a11cbf
RH
1548
1549int
1550duplicate_eh_regions (ifun, map)
1551 struct function *ifun;
1552 struct inline_remap *map;
4956d07c 1553{
52a11cbf
RH
1554 int ifun_last_region_number = ifun->eh->last_region_number;
1555 struct eh_region **n_array, *root, *cur;
1556 int i;
4956d07c 1557
52a11cbf
RH
1558 if (ifun_last_region_number == 0)
1559 return 0;
4956d07c 1560
52a11cbf 1561 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
4956d07c 1562
52a11cbf 1563 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1564 {
52a11cbf
RH
1565 cur = ifun->eh->region_array[i];
1566 if (!cur || cur->region_number != i)
1567 continue;
1568 n_array[i] = duplicate_eh_region_1 (cur, map);
27a36778 1569 }
52a11cbf 1570 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1571 {
52a11cbf
RH
1572 cur = ifun->eh->region_array[i];
1573 if (!cur || cur->region_number != i)
1574 continue;
1575 duplicate_eh_region_2 (cur, n_array);
1576 }
27a36778 1577
52a11cbf
RH
1578 root = n_array[ifun->eh->region_tree->region_number];
1579 cur = cfun->eh->cur_region;
1580 if (cur)
1581 {
1582 struct eh_region *p = cur->inner;
1583 if (p)
1584 {
1585 while (p->next_peer)
1586 p = p->next_peer;
1587 p->next_peer = root;
1588 }
1589 else
1590 cur->inner = root;
27a36778 1591
52a11cbf 1592 for (i = 1; i <= ifun_last_region_number; ++i)
b24a9e88 1593 if (n_array[i] && n_array[i]->outer == NULL)
52a11cbf
RH
1594 n_array[i]->outer = cur;
1595 }
1596 else
1597 {
1598 struct eh_region *p = cfun->eh->region_tree;
1599 if (p)
1600 {
1601 while (p->next_peer)
1602 p = p->next_peer;
1603 p->next_peer = root;
1604 }
1605 else
1606 cfun->eh->region_tree = root;
27a36778 1607 }
1e4ceb6f 1608
52a11cbf 1609 free (n_array);
1e4ceb6f 1610
52a11cbf
RH
1611 i = cfun->eh->last_region_number;
1612 cfun->eh->last_region_number = i + ifun_last_region_number;
1613 return i;
4956d07c
MS
1614}
1615
52a11cbf 1616\f
52a11cbf
RH
1617static int
1618t2r_eq (pentry, pdata)
1619 const PTR pentry;
1620 const PTR pdata;
9762d48d 1621{
52a11cbf
RH
1622 tree entry = (tree) pentry;
1623 tree data = (tree) pdata;
9762d48d 1624
52a11cbf 1625 return TREE_PURPOSE (entry) == data;
9762d48d
JM
1626}
1627
52a11cbf
RH
1628static hashval_t
1629t2r_hash (pentry)
1630 const PTR pentry;
1631{
1632 tree entry = (tree) pentry;
1633 return TYPE_HASH (TREE_PURPOSE (entry));
1634}
9762d48d 1635
52a11cbf
RH
1636static int
1637t2r_mark_1 (slot, data)
1638 PTR *slot;
1639 PTR data ATTRIBUTE_UNUSED;
9762d48d 1640{
52a11cbf
RH
1641 tree contents = (tree) *slot;
1642 ggc_mark_tree (contents);
1643 return 1;
1644}
9762d48d 1645
52a11cbf
RH
1646static void
1647t2r_mark (addr)
1648 PTR addr;
1649{
1650 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1651}
9762d48d 1652
52a11cbf
RH
1653static void
1654add_type_for_runtime (type)
1655 tree type;
1656{
1657 tree *slot;
9762d48d 1658
52a11cbf
RH
1659 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1660 TYPE_HASH (type), INSERT);
1661 if (*slot == NULL)
1662 {
1663 tree runtime = (*lang_eh_runtime_type) (type);
1664 *slot = tree_cons (type, runtime, NULL_TREE);
1665 }
1666}
3f2c5d1a 1667
52a11cbf
RH
1668static tree
1669lookup_type_for_runtime (type)
1670 tree type;
1671{
1672 tree *slot;
b37f006b 1673
52a11cbf
RH
1674 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1675 TYPE_HASH (type), NO_INSERT);
b37f006b 1676
a1f300c0 1677 /* We should have always inserted the data earlier. */
52a11cbf
RH
1678 return TREE_VALUE (*slot);
1679}
9762d48d 1680
52a11cbf
RH
1681\f
1682/* Represent an entry in @TTypes for either catch actions
1683 or exception filter actions. */
1684struct ttypes_filter
1685{
1686 tree t;
1687 int filter;
1688};
b37f006b 1689
52a11cbf
RH
1690/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1691 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 1692
52a11cbf
RH
1693static int
1694ttypes_filter_eq (pentry, pdata)
1695 const PTR pentry;
1696 const PTR pdata;
1697{
1698 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1699 tree data = (tree) pdata;
b37f006b 1700
52a11cbf 1701 return entry->t == data;
9762d48d
JM
1702}
1703
52a11cbf
RH
1704static hashval_t
1705ttypes_filter_hash (pentry)
1706 const PTR pentry;
1707{
1708 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1709 return TYPE_HASH (entry->t);
1710}
4956d07c 1711
52a11cbf
RH
1712/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1713 exception specification list we are thinking about adding. */
1714/* ??? Currently we use the type lists in the order given. Someone
1715 should put these in some canonical order. */
1716
1717static int
1718ehspec_filter_eq (pentry, pdata)
1719 const PTR pentry;
1720 const PTR pdata;
4956d07c 1721{
52a11cbf
RH
1722 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1723 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1724
1725 return type_list_equal (entry->t, data->t);
4956d07c
MS
1726}
1727
52a11cbf 1728/* Hash function for exception specification lists. */
4956d07c 1729
52a11cbf
RH
1730static hashval_t
1731ehspec_filter_hash (pentry)
1732 const PTR pentry;
4956d07c 1733{
52a11cbf
RH
1734 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1735 hashval_t h = 0;
1736 tree list;
1737
1738 for (list = entry->t; list ; list = TREE_CHAIN (list))
1739 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1740 return h;
4956d07c
MS
1741}
1742
52a11cbf
RH
1743/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1744 up the search. Return the filter value to be used. */
4956d07c 1745
52a11cbf
RH
1746static int
1747add_ttypes_entry (ttypes_hash, type)
1748 htab_t ttypes_hash;
1749 tree type;
4956d07c 1750{
52a11cbf 1751 struct ttypes_filter **slot, *n;
4956d07c 1752
52a11cbf
RH
1753 slot = (struct ttypes_filter **)
1754 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1755
1756 if ((n = *slot) == NULL)
4956d07c 1757 {
52a11cbf 1758 /* Filter value is a 1 based table index. */
12670d88 1759
52a11cbf
RH
1760 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1761 n->t = type;
1762 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1763 *slot = n;
1764
1765 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 1766 }
52a11cbf
RH
1767
1768 return n->filter;
4956d07c
MS
1769}
1770
52a11cbf
RH
1771/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1772 to speed up the search. Return the filter value to be used. */
1773
1774static int
1775add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1776 htab_t ehspec_hash;
1777 htab_t ttypes_hash;
1778 tree list;
12670d88 1779{
52a11cbf
RH
1780 struct ttypes_filter **slot, *n;
1781 struct ttypes_filter dummy;
12670d88 1782
52a11cbf
RH
1783 dummy.t = list;
1784 slot = (struct ttypes_filter **)
1785 htab_find_slot (ehspec_hash, &dummy, INSERT);
1786
1787 if ((n = *slot) == NULL)
1788 {
1789 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1790
1791 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1792 n->t = list;
1793 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1794 *slot = n;
1795
1796 /* Look up each type in the list and encode its filter
1797 value as a uleb128. Terminate the list with 0. */
1798 for (; list ; list = TREE_CHAIN (list))
3f2c5d1a 1799 push_uleb128 (&cfun->eh->ehspec_data,
52a11cbf
RH
1800 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1801 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1802 }
1803
1804 return n->filter;
12670d88
RK
1805}
1806
52a11cbf
RH
1807/* Generate the action filter values to be used for CATCH and
1808 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1809 we use lots of landing pads, and so every type or list can share
1810 the same filter value, which saves table space. */
1811
1812static void
1813assign_filter_values ()
9a0d1e1b 1814{
52a11cbf
RH
1815 int i;
1816 htab_t ttypes, ehspec;
9a9deafc 1817
52a11cbf
RH
1818 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1819 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1820
52a11cbf
RH
1821 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1822 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1823
52a11cbf
RH
1824 for (i = cfun->eh->last_region_number; i > 0; --i)
1825 {
1826 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1827
52a11cbf
RH
1828 /* Mind we don't process a region more than once. */
1829 if (!r || r->region_number != i)
1830 continue;
9a0d1e1b 1831
52a11cbf
RH
1832 switch (r->type)
1833 {
1834 case ERT_CATCH:
6d41a92f
OH
1835 /* Whatever type_list is (NULL or true list), we build a list
1836 of filters for the region. */
1837 r->u.catch.filter_list = NULL_TREE;
1838
1839 if (r->u.catch.type_list != NULL)
1840 {
1841 /* Get a filter value for each of the types caught and store
1842 them in the region's dedicated list. */
1843 tree tp_node = r->u.catch.type_list;
1844
1845 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1846 {
1847 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1848 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1849
1850 r->u.catch.filter_list
6d41a92f
OH
1851 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1852 }
1853 }
1854 else
1855 {
1856 /* Get a filter value for the NULL list also since it will need
1857 an action record anyway. */
1858 int flt = add_ttypes_entry (ttypes, NULL);
1859 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1860
1861 r->u.catch.filter_list
6d41a92f
OH
1862 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1863 }
3f2c5d1a 1864
52a11cbf 1865 break;
bf71cd2e 1866
52a11cbf
RH
1867 case ERT_ALLOWED_EXCEPTIONS:
1868 r->u.allowed.filter
1869 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1870 break;
bf71cd2e 1871
52a11cbf
RH
1872 default:
1873 break;
1874 }
1875 }
1876
1877 htab_delete (ttypes);
1878 htab_delete (ehspec);
1879}
1880
1881static void
1882build_post_landing_pads ()
1883{
1884 int i;
bf71cd2e 1885
52a11cbf 1886 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1887 {
52a11cbf
RH
1888 struct eh_region *region = cfun->eh->region_array[i];
1889 rtx seq;
bf71cd2e 1890
52a11cbf
RH
1891 /* Mind we don't process a region more than once. */
1892 if (!region || region->region_number != i)
1893 continue;
1894
1895 switch (region->type)
987009bf 1896 {
52a11cbf
RH
1897 case ERT_TRY:
1898 /* ??? Collect the set of all non-overlapping catch handlers
1899 all the way up the chain until blocked by a cleanup. */
1900 /* ??? Outer try regions can share landing pads with inner
1901 try regions if the types are completely non-overlapping,
a1f300c0 1902 and there are no intervening cleanups. */
bf71cd2e 1903
52a11cbf 1904 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1905
52a11cbf 1906 start_sequence ();
bf71cd2e 1907
52a11cbf 1908 emit_label (region->post_landing_pad);
bf71cd2e 1909
52a11cbf
RH
1910 /* ??? It is mighty inconvenient to call back into the
1911 switch statement generation code in expand_end_case.
1912 Rapid prototyping sez a sequence of ifs. */
1913 {
1914 struct eh_region *c;
1915 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1916 {
1917 /* ??? _Unwind_ForcedUnwind wants no match here. */
6d41a92f 1918 if (c->u.catch.type_list == NULL)
52a11cbf
RH
1919 emit_jump (c->label);
1920 else
6d41a92f
OH
1921 {
1922 /* Need for one cmp/jump per type caught. Each type
1923 list entry has a matching entry in the filter list
1924 (see assign_filter_values). */
1925 tree tp_node = c->u.catch.type_list;
1926 tree flt_node = c->u.catch.filter_list;
1927
1928 for (; tp_node; )
1929 {
1930 emit_cmp_and_jump_insns
1931 (cfun->eh->filter,
1932 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1933 EQ, NULL_RTX, word_mode, 0, c->label);
1934
1935 tp_node = TREE_CHAIN (tp_node);
1936 flt_node = TREE_CHAIN (flt_node);
1937 }
1938 }
52a11cbf
RH
1939 }
1940 }
bf71cd2e 1941
47c84870
JM
1942 /* We delay the generation of the _Unwind_Resume until we generate
1943 landing pads. We emit a marker here so as to get good control
1944 flow data in the meantime. */
1945 region->resume
1946 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1947 emit_barrier ();
1948
52a11cbf
RH
1949 seq = get_insns ();
1950 end_sequence ();
e6cfb550 1951
47c84870 1952 emit_insns_before (seq, region->u.try.catch->label);
52a11cbf 1953 break;
bf71cd2e 1954
52a11cbf
RH
1955 case ERT_ALLOWED_EXCEPTIONS:
1956 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1957
52a11cbf 1958 start_sequence ();
f54a7f6f 1959
52a11cbf 1960 emit_label (region->post_landing_pad);
f54a7f6f 1961
52a11cbf
RH
1962 emit_cmp_and_jump_insns (cfun->eh->filter,
1963 GEN_INT (region->u.allowed.filter),
a06ef755 1964 EQ, NULL_RTX, word_mode, 0, region->label);
f54a7f6f 1965
47c84870
JM
1966 /* We delay the generation of the _Unwind_Resume until we generate
1967 landing pads. We emit a marker here so as to get good control
1968 flow data in the meantime. */
1969 region->resume
1970 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1971 emit_barrier ();
1972
52a11cbf
RH
1973 seq = get_insns ();
1974 end_sequence ();
1975
47c84870 1976 emit_insns_before (seq, region->label);
52a11cbf 1977 break;
f54a7f6f 1978
52a11cbf
RH
1979 case ERT_CLEANUP:
1980 case ERT_MUST_NOT_THROW:
1981 region->post_landing_pad = region->label;
1982 break;
1983
1984 case ERT_CATCH:
1985 case ERT_THROW:
1986 /* Nothing to do. */
1987 break;
1988
1989 default:
1990 abort ();
1991 }
1992 }
1993}
1e4ceb6f 1994
47c84870
JM
1995/* Replace RESX patterns with jumps to the next handler if any, or calls to
1996 _Unwind_Resume otherwise. */
1997
1e4ceb6f 1998static void
52a11cbf 1999connect_post_landing_pads ()
1e4ceb6f 2000{
52a11cbf 2001 int i;
76fc91c7 2002
52a11cbf
RH
2003 for (i = cfun->eh->last_region_number; i > 0; --i)
2004 {
2005 struct eh_region *region = cfun->eh->region_array[i];
2006 struct eh_region *outer;
47c84870 2007 rtx seq;
1e4ceb6f 2008
52a11cbf
RH
2009 /* Mind we don't process a region more than once. */
2010 if (!region || region->region_number != i)
2011 continue;
1e4ceb6f 2012
47c84870
JM
2013 /* If there is no RESX, or it has been deleted by flow, there's
2014 nothing to fix up. */
2015 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 2016 continue;
76fc91c7 2017
52a11cbf
RH
2018 /* Search for another landing pad in this function. */
2019 for (outer = region->outer; outer ; outer = outer->outer)
2020 if (outer->post_landing_pad)
2021 break;
1e4ceb6f 2022
52a11cbf 2023 start_sequence ();
12670d88 2024
52a11cbf
RH
2025 if (outer)
2026 emit_jump (outer->post_landing_pad);
2027 else
9555a122 2028 emit_library_call (unwind_resume_libfunc, LCT_THROW,
52a11cbf 2029 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
4956d07c 2030
52a11cbf
RH
2031 seq = get_insns ();
2032 end_sequence ();
47c84870 2033 emit_insns_before (seq, region->resume);
53c17031 2034 delete_insn (region->resume);
52a11cbf
RH
2035 }
2036}
2037
2038\f
2039static void
2040dw2_build_landing_pads ()
4956d07c 2041{
ae0ed63a
JM
2042 int i;
2043 unsigned int j;
4956d07c 2044
52a11cbf
RH
2045 for (i = cfun->eh->last_region_number; i > 0; --i)
2046 {
2047 struct eh_region *region = cfun->eh->region_array[i];
2048 rtx seq;
5c701bb1 2049 bool clobbers_hard_regs = false;
4956d07c 2050
52a11cbf
RH
2051 /* Mind we don't process a region more than once. */
2052 if (!region || region->region_number != i)
2053 continue;
1418bb67 2054
52a11cbf
RH
2055 if (region->type != ERT_CLEANUP
2056 && region->type != ERT_TRY
2057 && region->type != ERT_ALLOWED_EXCEPTIONS)
2058 continue;
12670d88 2059
52a11cbf 2060 start_sequence ();
4956d07c 2061
52a11cbf
RH
2062 region->landing_pad = gen_label_rtx ();
2063 emit_label (region->landing_pad);
4956d07c 2064
52a11cbf
RH
2065#ifdef HAVE_exception_receiver
2066 if (HAVE_exception_receiver)
2067 emit_insn (gen_exception_receiver ());
2068 else
2069#endif
2070#ifdef HAVE_nonlocal_goto_receiver
2071 if (HAVE_nonlocal_goto_receiver)
2072 emit_insn (gen_nonlocal_goto_receiver ());
2073 else
2074#endif
2075 { /* Nothing */ }
4956d07c 2076
52a11cbf
RH
2077 /* If the eh_return data registers are call-saved, then we
2078 won't have considered them clobbered from the call that
2079 threw. Kill them now. */
2080 for (j = 0; ; ++j)
2081 {
2082 unsigned r = EH_RETURN_DATA_REGNO (j);
2083 if (r == INVALID_REGNUM)
2084 break;
2085 if (! call_used_regs[r])
5c701bb1
JS
2086 {
2087 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2088 clobbers_hard_regs = true;
2089 }
2090 }
2091
2092 if (clobbers_hard_regs)
2093 {
2094 /* @@@ This is a kludge. Not all machine descriptions define a
2095 blockage insn, but we must not allow the code we just generated
2096 to be reordered by scheduling. So emit an ASM_INPUT to act as
2ba84f36 2097 blockage insn. */
5c701bb1 2098 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
52a11cbf 2099 }
e701eb4d 2100
52a11cbf
RH
2101 emit_move_insn (cfun->eh->exc_ptr,
2102 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
2103 emit_move_insn (cfun->eh->filter,
9e800206 2104 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 2105
52a11cbf
RH
2106 seq = get_insns ();
2107 end_sequence ();
5816cb14 2108
52a11cbf
RH
2109 emit_insns_before (seq, region->post_landing_pad);
2110 }
4956d07c
MS
2111}
2112
52a11cbf
RH
2113\f
2114struct sjlj_lp_info
2115{
2116 int directly_reachable;
2117 int action_index;
2118 int dispatch_index;
2119 int call_site_index;
2120};
4956d07c 2121
52a11cbf
RH
2122static bool
2123sjlj_find_directly_reachable_regions (lp_info)
2124 struct sjlj_lp_info *lp_info;
4956d07c 2125{
52a11cbf
RH
2126 rtx insn;
2127 bool found_one = false;
4956d07c 2128
52a11cbf
RH
2129 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2130 {
2131 struct eh_region *region;
98ce21b3 2132 enum reachable_code rc;
52a11cbf
RH
2133 tree type_thrown;
2134 rtx note;
4956d07c 2135
52a11cbf
RH
2136 if (! INSN_P (insn))
2137 continue;
0d3453df 2138
52a11cbf
RH
2139 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2140 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2141 continue;
5dfa7520 2142
52a11cbf 2143 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 2144
52a11cbf
RH
2145 type_thrown = NULL_TREE;
2146 if (region->type == ERT_THROW)
2147 {
2148 type_thrown = region->u.throw.type;
2149 region = region->outer;
2150 }
12670d88 2151
52a11cbf
RH
2152 /* Find the first containing region that might handle the exception.
2153 That's the landing pad to which we will transfer control. */
98ce21b3 2154 rc = RNL_NOT_CAUGHT;
52a11cbf 2155 for (; region; region = region->outer)
98ce21b3
RH
2156 {
2157 rc = reachable_next_level (region, type_thrown, 0);
2158 if (rc != RNL_NOT_CAUGHT)
2159 break;
2160 }
2161 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
52a11cbf
RH
2162 {
2163 lp_info[region->region_number].directly_reachable = 1;
2164 found_one = true;
2165 }
2166 }
4956d07c 2167
52a11cbf
RH
2168 return found_one;
2169}
e701eb4d
JM
2170
2171static void
52a11cbf
RH
2172sjlj_assign_call_site_values (dispatch_label, lp_info)
2173 rtx dispatch_label;
2174 struct sjlj_lp_info *lp_info;
e701eb4d 2175{
52a11cbf
RH
2176 htab_t ar_hash;
2177 int i, index;
2178
2179 /* First task: build the action table. */
2180
2181 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2182 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2183
2184 for (i = cfun->eh->last_region_number; i > 0; --i)
2185 if (lp_info[i].directly_reachable)
e6cfb550 2186 {
52a11cbf
RH
2187 struct eh_region *r = cfun->eh->region_array[i];
2188 r->landing_pad = dispatch_label;
2189 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2190 if (lp_info[i].action_index != -1)
2191 cfun->uses_eh_lsda = 1;
e6cfb550 2192 }
e701eb4d 2193
52a11cbf 2194 htab_delete (ar_hash);
76fc91c7 2195
52a11cbf
RH
2196 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2197 landing pad label for the region. For sjlj though, there is one
2198 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 2199
52a11cbf
RH
2200 A region receives a dispatch index if it is directly reachable
2201 and requires in-function processing. Regions that share post-landing
eaec9b3d 2202 pads may share dispatch indices. */
52a11cbf
RH
2203 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2204 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 2205
52a11cbf
RH
2206 index = 0;
2207 for (i = cfun->eh->last_region_number; i > 0; --i)
98ce21b3 2208 if (lp_info[i].directly_reachable)
52a11cbf 2209 lp_info[i].dispatch_index = index++;
76fc91c7 2210
52a11cbf
RH
2211 /* Finally: assign call-site values. If dwarf2 terms, this would be
2212 the region number assigned by convert_to_eh_region_ranges, but
2213 handles no-action and must-not-throw differently. */
76fc91c7 2214
52a11cbf
RH
2215 call_site_base = 1;
2216 for (i = cfun->eh->last_region_number; i > 0; --i)
2217 if (lp_info[i].directly_reachable)
2218 {
2219 int action = lp_info[i].action_index;
2220
2221 /* Map must-not-throw to otherwise unused call-site index 0. */
2222 if (action == -2)
2223 index = 0;
2224 /* Map no-action to otherwise unused call-site index -1. */
2225 else if (action == -1)
2226 index = -1;
2227 /* Otherwise, look it up in the table. */
2228 else
2229 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2230
2231 lp_info[i].call_site_index = index;
2232 }
4956d07c 2233}
27a36778 2234
52a11cbf
RH
2235static void
2236sjlj_mark_call_sites (lp_info)
2237 struct sjlj_lp_info *lp_info;
27a36778 2238{
52a11cbf
RH
2239 int last_call_site = -2;
2240 rtx insn, mem;
2241
52a11cbf 2242 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 2243 {
52a11cbf
RH
2244 struct eh_region *region;
2245 int this_call_site;
2246 rtx note, before, p;
27a36778 2247
52a11cbf
RH
2248 /* Reset value tracking at extended basic block boundaries. */
2249 if (GET_CODE (insn) == CODE_LABEL)
2250 last_call_site = -2;
27a36778 2251
52a11cbf
RH
2252 if (! INSN_P (insn))
2253 continue;
27a36778 2254
52a11cbf
RH
2255 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2256 if (!note)
2257 {
2258 /* Calls (and trapping insns) without notes are outside any
2259 exception handling region in this function. Mark them as
2260 no action. */
2261 if (GET_CODE (insn) == CALL_INSN
2262 || (flag_non_call_exceptions
2263 && may_trap_p (PATTERN (insn))))
2264 this_call_site = -1;
2265 else
2266 continue;
2267 }
2268 else
2269 {
2270 /* Calls that are known to not throw need not be marked. */
2271 if (INTVAL (XEXP (note, 0)) <= 0)
2272 continue;
27a36778 2273
52a11cbf
RH
2274 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2275 this_call_site = lp_info[region->region_number].call_site_index;
2276 }
27a36778 2277
52a11cbf
RH
2278 if (this_call_site == last_call_site)
2279 continue;
2280
2281 /* Don't separate a call from it's argument loads. */
2282 before = insn;
2283 if (GET_CODE (insn) == CALL_INSN)
833366d6 2284 before = find_first_parameter_load (insn, NULL_RTX);
4956d07c 2285
52a11cbf 2286 start_sequence ();
fd2c57a9
AH
2287 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2288 sjlj_fc_call_site_ofs);
52a11cbf
RH
2289 emit_move_insn (mem, GEN_INT (this_call_site));
2290 p = get_insns ();
2291 end_sequence ();
12670d88 2292
52a11cbf
RH
2293 emit_insns_before (p, before);
2294 last_call_site = this_call_site;
2295 }
2296}
4956d07c 2297
52a11cbf
RH
2298/* Construct the SjLj_Function_Context. */
2299
2300static void
2301sjlj_emit_function_enter (dispatch_label)
2302 rtx dispatch_label;
4956d07c 2303{
52a11cbf 2304 rtx fn_begin, fc, mem, seq;
4956d07c 2305
52a11cbf 2306 fc = cfun->eh->sjlj_fc;
4956d07c 2307
52a11cbf 2308 start_sequence ();
8a4451aa 2309
8979edec
JL
2310 /* We're storing this libcall's address into memory instead of
2311 calling it directly. Thus, we must call assemble_external_libcall
2312 here, as we can not depend on emit_library_call to do it for us. */
2313 assemble_external_libcall (eh_personality_libfunc);
f4ef873c 2314 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
52a11cbf
RH
2315 emit_move_insn (mem, eh_personality_libfunc);
2316
f4ef873c 2317 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
52a11cbf
RH
2318 if (cfun->uses_eh_lsda)
2319 {
2320 char buf[20];
2321 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2322 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
8a4451aa 2323 }
52a11cbf
RH
2324 else
2325 emit_move_insn (mem, const0_rtx);
3f2c5d1a 2326
52a11cbf
RH
2327#ifdef DONT_USE_BUILTIN_SETJMP
2328 {
2329 rtx x, note;
9defc9b7 2330 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
52a11cbf
RH
2331 TYPE_MODE (integer_type_node), 1,
2332 plus_constant (XEXP (fc, 0),
2333 sjlj_fc_jbuf_ofs), Pmode);
2334
2335 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2336 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2337
2338 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
a06ef755 2339 TYPE_MODE (integer_type_node), 0, dispatch_label);
52a11cbf
RH
2340 }
2341#else
2342 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2343 dispatch_label);
4956d07c 2344#endif
4956d07c 2345
52a11cbf
RH
2346 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2347 1, XEXP (fc, 0), Pmode);
12670d88 2348
52a11cbf
RH
2349 seq = get_insns ();
2350 end_sequence ();
4956d07c 2351
52a11cbf
RH
2352 /* ??? Instead of doing this at the beginning of the function,
2353 do this in a block that is at loop level 0 and dominates all
2354 can_throw_internal instructions. */
4956d07c 2355
52a11cbf
RH
2356 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2357 if (GET_CODE (fn_begin) == NOTE
2358 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2359 break;
2360 emit_insns_after (seq, fn_begin);
4956d07c
MS
2361}
2362
52a11cbf
RH
2363/* Call back from expand_function_end to know where we should put
2364 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 2365
52a11cbf
RH
2366void
2367sjlj_emit_function_exit_after (after)
2368 rtx after;
2369{
2370 cfun->eh->sjlj_exit_after = after;
2371}
4956d07c
MS
2372
2373static void
52a11cbf
RH
2374sjlj_emit_function_exit ()
2375{
2376 rtx seq;
4956d07c 2377
52a11cbf 2378 start_sequence ();
ce152ef8 2379
52a11cbf
RH
2380 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2381 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 2382
52a11cbf
RH
2383 seq = get_insns ();
2384 end_sequence ();
4956d07c 2385
52a11cbf
RH
2386 /* ??? Really this can be done in any block at loop level 0 that
2387 post-dominates all can_throw_internal instructions. This is
2388 the last possible moment. */
9a0d1e1b 2389
52a11cbf 2390 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
9a0d1e1b
AM
2391}
2392
52a11cbf
RH
2393static void
2394sjlj_emit_dispatch_table (dispatch_label, lp_info)
2395 rtx dispatch_label;
2396 struct sjlj_lp_info *lp_info;
ce152ef8 2397{
52a11cbf
RH
2398 int i, first_reachable;
2399 rtx mem, dispatch, seq, fc;
2400
2401 fc = cfun->eh->sjlj_fc;
2402
2403 start_sequence ();
2404
2405 emit_label (dispatch_label);
3f2c5d1a 2406
52a11cbf
RH
2407#ifndef DONT_USE_BUILTIN_SETJMP
2408 expand_builtin_setjmp_receiver (dispatch_label);
2409#endif
2410
2411 /* Load up dispatch index, exc_ptr and filter values from the
2412 function context. */
f4ef873c
RK
2413 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2414 sjlj_fc_call_site_ofs);
52a11cbf
RH
2415 dispatch = copy_to_reg (mem);
2416
f4ef873c 2417 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
52a11cbf
RH
2418 if (word_mode != Pmode)
2419 {
2420#ifdef POINTERS_EXTEND_UNSIGNED
2421 mem = convert_memory_address (Pmode, mem);
2422#else
2423 mem = convert_to_mode (Pmode, mem, 0);
2424#endif
2425 }
2426 emit_move_insn (cfun->eh->exc_ptr, mem);
2427
f4ef873c 2428 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
52a11cbf 2429 emit_move_insn (cfun->eh->filter, mem);
4956d07c 2430
52a11cbf
RH
2431 /* Jump to one of the directly reachable regions. */
2432 /* ??? This really ought to be using a switch statement. */
2433
2434 first_reachable = 0;
2435 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 2436 {
98ce21b3 2437 if (! lp_info[i].directly_reachable)
52a11cbf 2438 continue;
a1622f83 2439
52a11cbf
RH
2440 if (! first_reachable)
2441 {
2442 first_reachable = i;
2443 continue;
2444 }
e6cfb550 2445
a06ef755
RK
2446 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2447 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
52a11cbf 2448 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 2449 }
9a0d1e1b 2450
52a11cbf
RH
2451 seq = get_insns ();
2452 end_sequence ();
4956d07c 2453
52a11cbf
RH
2454 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2455 ->post_landing_pad));
ce152ef8
AM
2456}
2457
52a11cbf
RH
2458static void
2459sjlj_build_landing_pads ()
ce152ef8 2460{
52a11cbf 2461 struct sjlj_lp_info *lp_info;
ce152ef8 2462
52a11cbf
RH
2463 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2464 sizeof (struct sjlj_lp_info));
ce152ef8 2465
52a11cbf
RH
2466 if (sjlj_find_directly_reachable_regions (lp_info))
2467 {
2468 rtx dispatch_label = gen_label_rtx ();
ce152ef8 2469
52a11cbf
RH
2470 cfun->eh->sjlj_fc
2471 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2472 int_size_in_bytes (sjlj_fc_type_node),
2473 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 2474
52a11cbf
RH
2475 sjlj_assign_call_site_values (dispatch_label, lp_info);
2476 sjlj_mark_call_sites (lp_info);
a1622f83 2477
52a11cbf
RH
2478 sjlj_emit_function_enter (dispatch_label);
2479 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2480 sjlj_emit_function_exit ();
2481 }
a1622f83 2482
52a11cbf 2483 free (lp_info);
4956d07c 2484}
ce152ef8 2485
ce152ef8 2486void
52a11cbf 2487finish_eh_generation ()
ce152ef8 2488{
52a11cbf
RH
2489 /* Nothing to do if no regions created. */
2490 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
2491 return;
2492
52a11cbf
RH
2493 /* The object here is to provide find_basic_blocks with detailed
2494 information (via reachable_handlers) on how exception control
2495 flows within the function. In this first pass, we can include
2496 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2497 regions, and hope that it will be useful in deleting unreachable
2498 handlers. Subsequently, we will generate landing pads which will
2499 connect many of the handlers, and then type information will not
2500 be effective. Still, this is a win over previous implementations. */
2501
4793dca1 2502 rebuild_jump_labels (get_insns ());
52a11cbf 2503 find_basic_blocks (get_insns (), max_reg_num (), 0);
0068fd96 2504 cleanup_cfg (CLEANUP_PRE_LOOP);
52a11cbf
RH
2505
2506 /* These registers are used by the landing pads. Make sure they
2507 have been generated. */
86c99549
RH
2508 get_exception_pointer (cfun);
2509 get_exception_filter (cfun);
52a11cbf
RH
2510
2511 /* Construct the landing pads. */
2512
2513 assign_filter_values ();
2514 build_post_landing_pads ();
2515 connect_post_landing_pads ();
2516 if (USING_SJLJ_EXCEPTIONS)
2517 sjlj_build_landing_pads ();
2518 else
2519 dw2_build_landing_pads ();
ce152ef8 2520
52a11cbf 2521 cfun->eh->built_landing_pads = 1;
ce152ef8 2522
52a11cbf
RH
2523 /* We've totally changed the CFG. Start over. */
2524 find_exception_handler_labels ();
4793dca1 2525 rebuild_jump_labels (get_insns ());
52a11cbf 2526 find_basic_blocks (get_insns (), max_reg_num (), 0);
0068fd96 2527 cleanup_cfg (CLEANUP_PRE_LOOP);
ce152ef8 2528}
4956d07c 2529\f
6a58eee9
RH
2530static hashval_t
2531ehl_hash (pentry)
2532 const PTR pentry;
2533{
2534 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2535
2536 /* 2^32 * ((sqrt(5) - 1) / 2) */
2537 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2538 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2539}
2540
2541static int
2542ehl_eq (pentry, pdata)
2543 const PTR pentry;
2544 const PTR pdata;
2545{
2546 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2547 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2548
2549 return entry->label == data->label;
2550}
2551
52a11cbf 2552/* This section handles removing dead code for flow. */
154bba13 2553
6a58eee9 2554/* Remove LABEL from exception_handler_label_map. */
154bba13 2555
52a11cbf
RH
2556static void
2557remove_exception_handler_label (label)
2558 rtx label;
154bba13 2559{
6a58eee9 2560 struct ehl_map_entry **slot, tmp;
100d81d4 2561
6a58eee9 2562 /* If exception_handler_label_map was not built yet,
655dd289 2563 there is nothing to do. */
6a58eee9 2564 if (exception_handler_label_map == NULL)
655dd289
JJ
2565 return;
2566
6a58eee9
RH
2567 tmp.label = label;
2568 slot = (struct ehl_map_entry **)
2569 htab_find_slot (exception_handler_label_map, &tmp, NO_INSERT);
2570 if (! slot)
2571 abort ();
154bba13 2572
6a58eee9 2573 htab_clear_slot (exception_handler_label_map, (void **) slot);
154bba13
TT
2574}
2575
52a11cbf 2576/* Splice REGION from the region tree etc. */
12670d88 2577
f19c9228 2578static void
52a11cbf
RH
2579remove_eh_handler (region)
2580 struct eh_region *region;
4956d07c 2581{
52a11cbf
RH
2582 struct eh_region **pp, *p;
2583 rtx lab;
4956d07c 2584
52a11cbf
RH
2585 /* For the benefit of efficiently handling REG_EH_REGION notes,
2586 replace this region in the region array with its containing
2587 region. Note that previous region deletions may result in
6a58eee9
RH
2588 multiple copies of this region in the array, so we have a
2589 list of alternate numbers by which we are known. */
2590
2591 cfun->eh->region_array[region->region_number] = region->outer;
2592 if (region->aka)
2593 {
2594 int i;
2595 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2596 { cfun->eh->region_array[i] = region->outer; });
2597 }
2598
2599 if (region->outer)
2600 {
2601 if (!region->outer->aka)
2602 region->outer->aka = BITMAP_XMALLOC ();
2603 if (region->aka)
2604 bitmap_a_or_b (region->outer->aka, region->outer->aka, region->aka);
2605 bitmap_set_bit (region->outer->aka, region->region_number);
2606 }
52a11cbf
RH
2607
2608 if (cfun->eh->built_landing_pads)
2609 lab = region->landing_pad;
2610 else
2611 lab = region->label;
2612 if (lab)
2613 remove_exception_handler_label (lab);
2614
2615 if (region->outer)
2616 pp = &region->outer->inner;
2617 else
2618 pp = &cfun->eh->region_tree;
2619 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2620 continue;
12670d88 2621
52a11cbf 2622 if (region->inner)
4956d07c 2623 {
52a11cbf
RH
2624 for (p = region->inner; p->next_peer ; p = p->next_peer)
2625 p->outer = region->outer;
2626 p->next_peer = region->next_peer;
2627 p->outer = region->outer;
2628 *pp = region->inner;
4956d07c 2629 }
52a11cbf
RH
2630 else
2631 *pp = region->next_peer;
f19c9228 2632
52a11cbf
RH
2633 if (region->type == ERT_CATCH)
2634 {
2635 struct eh_region *try, *next, *prev;
f19c9228 2636
52a11cbf
RH
2637 for (try = region->next_peer;
2638 try->type == ERT_CATCH;
2639 try = try->next_peer)
2640 continue;
2641 if (try->type != ERT_TRY)
2642 abort ();
f19c9228 2643
52a11cbf
RH
2644 next = region->u.catch.next_catch;
2645 prev = region->u.catch.prev_catch;
f19c9228 2646
52a11cbf
RH
2647 if (next)
2648 next->u.catch.prev_catch = prev;
2649 else
2650 try->u.try.last_catch = prev;
2651 if (prev)
2652 prev->u.catch.next_catch = next;
2653 else
2654 {
2655 try->u.try.catch = next;
2656 if (! next)
2657 remove_eh_handler (try);
2658 }
2659 }
988cea7d 2660
6a58eee9 2661 free_region (region);
4956d07c
MS
2662}
2663
52a11cbf
RH
2664/* LABEL heads a basic block that is about to be deleted. If this
2665 label corresponds to an exception region, we may be able to
2666 delete the region. */
4956d07c
MS
2667
2668void
52a11cbf
RH
2669maybe_remove_eh_handler (label)
2670 rtx label;
4956d07c 2671{
6a58eee9
RH
2672 struct ehl_map_entry **slot, tmp;
2673 struct eh_region *region;
4956d07c 2674
52a11cbf
RH
2675 /* ??? After generating landing pads, it's not so simple to determine
2676 if the region data is completely unused. One must examine the
2677 landing pad and the post landing pad, and whether an inner try block
2678 is referencing the catch handlers directly. */
2679 if (cfun->eh->built_landing_pads)
4956d07c
MS
2680 return;
2681
6a58eee9
RH
2682 tmp.label = label;
2683 slot = (struct ehl_map_entry **)
2684 htab_find_slot (exception_handler_label_map, &tmp, NO_INSERT);
2685 if (! slot)
2686 return;
2687 region = (*slot)->region;
2688 if (! region)
2689 return;
2690
2691 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2692 because there is no path to the fallback call to terminate.
2693 But the region continues to affect call-site data until there
2694 are no more contained calls, which we don't see here. */
2695 if (region->type == ERT_MUST_NOT_THROW)
87ff9c8e 2696 {
6a58eee9
RH
2697 htab_clear_slot (exception_handler_label_map, (void **) slot);
2698 region->label = NULL_RTX;
87ff9c8e 2699 }
6a58eee9
RH
2700 else
2701 remove_eh_handler (region);
2702}
2703
2704/* Invokes CALLBACK for every exception handler label. Only used by old
2705 loop hackery; should not be used by new code. */
2706
2707void
2708for_each_eh_label (callback)
2709 void (*callback) PARAMS ((rtx));
2710{
2711 htab_traverse (exception_handler_label_map, for_each_eh_label_1,
2712 (void *)callback);
87ff9c8e
RH
2713}
2714
6a58eee9
RH
2715static int
2716for_each_eh_label_1 (pentry, data)
2717 PTR *pentry;
2718 PTR data;
2719{
2720 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2721 void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
2722
2723 (*callback) (entry->label);
2724 return 1;
2725}
52a11cbf
RH
2726\f
2727/* This section describes CFG exception edges for flow. */
87ff9c8e 2728
52a11cbf
RH
2729/* For communicating between calls to reachable_next_level. */
2730struct reachable_info
87ff9c8e 2731{
52a11cbf
RH
2732 tree types_caught;
2733 tree types_allowed;
2734 rtx handlers;
2735};
87ff9c8e 2736
52a11cbf
RH
2737/* A subroutine of reachable_next_level. Return true if TYPE, or a
2738 base class of TYPE, is in HANDLED. */
87ff9c8e 2739
52a11cbf
RH
2740static int
2741check_handled (handled, type)
2742 tree handled, type;
87ff9c8e 2743{
52a11cbf
RH
2744 tree t;
2745
2746 /* We can check for exact matches without front-end help. */
2747 if (! lang_eh_type_covers)
f54a7f6f 2748 {
52a11cbf
RH
2749 for (t = handled; t ; t = TREE_CHAIN (t))
2750 if (TREE_VALUE (t) == type)
2751 return 1;
2752 }
2753 else
2754 {
2755 for (t = handled; t ; t = TREE_CHAIN (t))
2756 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2757 return 1;
f54a7f6f 2758 }
52a11cbf
RH
2759
2760 return 0;
87ff9c8e
RH
2761}
2762
52a11cbf
RH
2763/* A subroutine of reachable_next_level. If we are collecting a list
2764 of handlers, add one. After landing pad generation, reference
2765 it instead of the handlers themselves. Further, the handlers are
3f2c5d1a 2766 all wired together, so by referencing one, we've got them all.
52a11cbf
RH
2767 Before landing pad generation we reference each handler individually.
2768
2769 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2770
2771static void
52a11cbf
RH
2772add_reachable_handler (info, lp_region, region)
2773 struct reachable_info *info;
2774 struct eh_region *lp_region;
2775 struct eh_region *region;
87ff9c8e 2776{
52a11cbf
RH
2777 if (! info)
2778 return;
2779
2780 if (cfun->eh->built_landing_pads)
87ff9c8e 2781 {
52a11cbf
RH
2782 if (! info->handlers)
2783 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
87ff9c8e 2784 }
52a11cbf
RH
2785 else
2786 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
87ff9c8e
RH
2787}
2788
3f2c5d1a 2789/* Process one level of exception regions for reachability.
52a11cbf
RH
2790 If TYPE_THROWN is non-null, then it is the *exact* type being
2791 propagated. If INFO is non-null, then collect handler labels
2792 and caught/allowed type information between invocations. */
87ff9c8e 2793
52a11cbf
RH
2794static enum reachable_code
2795reachable_next_level (region, type_thrown, info)
2796 struct eh_region *region;
2797 tree type_thrown;
2798 struct reachable_info *info;
87ff9c8e 2799{
52a11cbf
RH
2800 switch (region->type)
2801 {
2802 case ERT_CLEANUP:
2803 /* Before landing-pad generation, we model control flow
2804 directly to the individual handlers. In this way we can
2805 see that catch handler types may shadow one another. */
2806 add_reachable_handler (info, region, region);
2807 return RNL_MAYBE_CAUGHT;
2808
2809 case ERT_TRY:
2810 {
2811 struct eh_region *c;
2812 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2813
52a11cbf
RH
2814 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2815 {
2816 /* A catch-all handler ends the search. */
2817 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2818 to be run as well. */
6d41a92f 2819 if (c->u.catch.type_list == NULL)
52a11cbf
RH
2820 {
2821 add_reachable_handler (info, region, c);
2822 return RNL_CAUGHT;
2823 }
2824
2825 if (type_thrown)
2826 {
a8154559 2827 /* If we have at least one type match, end the search. */
6d41a92f 2828 tree tp_node = c->u.catch.type_list;
3f2c5d1a 2829
6d41a92f 2830 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
52a11cbf 2831 {
6d41a92f
OH
2832 tree type = TREE_VALUE (tp_node);
2833
2834 if (type == type_thrown
2835 || (lang_eh_type_covers
2836 && (*lang_eh_type_covers) (type, type_thrown)))
2837 {
2838 add_reachable_handler (info, region, c);
2839 return RNL_CAUGHT;
2840 }
52a11cbf
RH
2841 }
2842
2843 /* If we have definitive information of a match failure,
2844 the catch won't trigger. */
2845 if (lang_eh_type_covers)
2846 return RNL_NOT_CAUGHT;
2847 }
2848
6d41a92f
OH
2849 /* At this point, we either don't know what type is thrown or
2850 don't have front-end assistance to help deciding if it is
2851 covered by one of the types in the list for this region.
3f2c5d1a 2852
6d41a92f
OH
2853 We'd then like to add this region to the list of reachable
2854 handlers since it is indeed potentially reachable based on the
3f2c5d1a
RS
2855 information we have.
2856
6d41a92f
OH
2857 Actually, this handler is for sure not reachable if all the
2858 types it matches have already been caught. That is, it is only
2859 potentially reachable if at least one of the types it catches
2860 has not been previously caught. */
2861
52a11cbf
RH
2862 if (! info)
2863 ret = RNL_MAYBE_CAUGHT;
6d41a92f 2864 else
52a11cbf 2865 {
6d41a92f
OH
2866 tree tp_node = c->u.catch.type_list;
2867 bool maybe_reachable = false;
52a11cbf 2868
6d41a92f
OH
2869 /* Compute the potential reachability of this handler and
2870 update the list of types caught at the same time. */
2871 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2872 {
2873 tree type = TREE_VALUE (tp_node);
2874
2875 if (! check_handled (info->types_caught, type))
2876 {
2877 info->types_caught
2878 = tree_cons (NULL, type, info->types_caught);
3f2c5d1a 2879
6d41a92f
OH
2880 maybe_reachable = true;
2881 }
2882 }
3f2c5d1a 2883
6d41a92f
OH
2884 if (maybe_reachable)
2885 {
2886 add_reachable_handler (info, region, c);
3f2c5d1a 2887
6d41a92f
OH
2888 /* ??? If the catch type is a base class of every allowed
2889 type, then we know we can stop the search. */
2890 ret = RNL_MAYBE_CAUGHT;
2891 }
52a11cbf
RH
2892 }
2893 }
87ff9c8e 2894
52a11cbf
RH
2895 return ret;
2896 }
87ff9c8e 2897
52a11cbf
RH
2898 case ERT_ALLOWED_EXCEPTIONS:
2899 /* An empty list of types definitely ends the search. */
2900 if (region->u.allowed.type_list == NULL_TREE)
2901 {
2902 add_reachable_handler (info, region, region);
2903 return RNL_CAUGHT;
2904 }
87ff9c8e 2905
52a11cbf
RH
2906 /* Collect a list of lists of allowed types for use in detecting
2907 when a catch may be transformed into a catch-all. */
2908 if (info)
2909 info->types_allowed = tree_cons (NULL_TREE,
2910 region->u.allowed.type_list,
2911 info->types_allowed);
3f2c5d1a 2912
684d9f3b 2913 /* If we have definitive information about the type hierarchy,
52a11cbf
RH
2914 then we can tell if the thrown type will pass through the
2915 filter. */
2916 if (type_thrown && lang_eh_type_covers)
2917 {
2918 if (check_handled (region->u.allowed.type_list, type_thrown))
2919 return RNL_NOT_CAUGHT;
2920 else
2921 {
2922 add_reachable_handler (info, region, region);
2923 return RNL_CAUGHT;
2924 }
2925 }
21cd906e 2926
52a11cbf
RH
2927 add_reachable_handler (info, region, region);
2928 return RNL_MAYBE_CAUGHT;
21cd906e 2929
52a11cbf
RH
2930 case ERT_CATCH:
2931 /* Catch regions are handled by their controling try region. */
2932 return RNL_NOT_CAUGHT;
21cd906e 2933
52a11cbf
RH
2934 case ERT_MUST_NOT_THROW:
2935 /* Here we end our search, since no exceptions may propagate.
2936 If we've touched down at some landing pad previous, then the
2937 explicit function call we generated may be used. Otherwise
2938 the call is made by the runtime. */
2939 if (info && info->handlers)
21cd906e 2940 {
52a11cbf
RH
2941 add_reachable_handler (info, region, region);
2942 return RNL_CAUGHT;
21cd906e 2943 }
52a11cbf
RH
2944 else
2945 return RNL_BLOCKED;
21cd906e 2946
52a11cbf
RH
2947 case ERT_THROW:
2948 case ERT_FIXUP:
3f2c5d1a 2949 case ERT_UNKNOWN:
52a11cbf
RH
2950 /* Shouldn't see these here. */
2951 break;
21cd906e 2952 }
fa51b01b 2953
52a11cbf 2954 abort ();
fa51b01b 2955}
4956d07c 2956
52a11cbf
RH
2957/* Retrieve a list of labels of exception handlers which can be
2958 reached by a given insn. */
4956d07c 2959
52a11cbf
RH
2960rtx
2961reachable_handlers (insn)
4956d07c
MS
2962 rtx insn;
2963{
52a11cbf
RH
2964 struct reachable_info info;
2965 struct eh_region *region;
2966 tree type_thrown;
2967 int region_number;
fb13d4d0 2968
52a11cbf
RH
2969 if (GET_CODE (insn) == JUMP_INSN
2970 && GET_CODE (PATTERN (insn)) == RESX)
2971 region_number = XINT (PATTERN (insn), 0);
2972 else
1ef1bf06
AM
2973 {
2974 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
52a11cbf
RH
2975 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2976 return NULL;
2977 region_number = INTVAL (XEXP (note, 0));
27a36778 2978 }
4956d07c 2979
52a11cbf 2980 memset (&info, 0, sizeof (info));
4956d07c 2981
52a11cbf 2982 region = cfun->eh->region_array[region_number];
fb13d4d0 2983
52a11cbf 2984 type_thrown = NULL_TREE;
7f206d8f
RH
2985 if (GET_CODE (insn) == JUMP_INSN
2986 && GET_CODE (PATTERN (insn)) == RESX)
2987 {
2988 /* A RESX leaves a region instead of entering it. Thus the
2989 region itself may have been deleted out from under us. */
2990 if (region == NULL)
2991 return NULL;
2992 region = region->outer;
2993 }
2994 else if (region->type == ERT_THROW)
52a11cbf
RH
2995 {
2996 type_thrown = region->u.throw.type;
2997 region = region->outer;
2998 }
fac62ecf 2999
52a11cbf
RH
3000 for (; region; region = region->outer)
3001 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
3002 break;
fb13d4d0 3003
52a11cbf 3004 return info.handlers;
fb13d4d0
JM
3005}
3006
52a11cbf
RH
3007/* Determine if the given INSN can throw an exception that is caught
3008 within the function. */
4956d07c 3009
52a11cbf
RH
3010bool
3011can_throw_internal (insn)
4956d07c 3012 rtx insn;
4956d07c 3013{
52a11cbf
RH
3014 struct eh_region *region;
3015 tree type_thrown;
3016 rtx note;
e6cfb550 3017
52a11cbf
RH
3018 if (! INSN_P (insn))
3019 return false;
12670d88 3020
52a11cbf
RH
3021 if (GET_CODE (insn) == INSN
3022 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3023 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 3024
52a11cbf
RH
3025 if (GET_CODE (insn) == CALL_INSN
3026 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 3027 {
52a11cbf
RH
3028 int i;
3029 for (i = 0; i < 3; ++i)
4956d07c 3030 {
52a11cbf
RH
3031 rtx sub = XEXP (PATTERN (insn), i);
3032 for (; sub ; sub = NEXT_INSN (sub))
3033 if (can_throw_internal (sub))
3034 return true;
4956d07c 3035 }
52a11cbf 3036 return false;
4956d07c
MS
3037 }
3038
52a11cbf
RH
3039 /* Every insn that might throw has an EH_REGION note. */
3040 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3041 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3042 return false;
4956d07c 3043
52a11cbf 3044 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
4956d07c 3045
52a11cbf
RH
3046 type_thrown = NULL_TREE;
3047 if (region->type == ERT_THROW)
3048 {
3049 type_thrown = region->u.throw.type;
3050 region = region->outer;
3051 }
4956d07c 3052
52a11cbf
RH
3053 /* If this exception is ignored by each and every containing region,
3054 then control passes straight out. The runtime may handle some
3055 regions, which also do not require processing internally. */
3056 for (; region; region = region->outer)
3057 {
3058 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
3059 if (how == RNL_BLOCKED)
3060 return false;
3061 if (how != RNL_NOT_CAUGHT)
3062 return true;
4956d07c 3063 }
4956d07c 3064
52a11cbf
RH
3065 return false;
3066}
4956d07c 3067
52a11cbf
RH
3068/* Determine if the given INSN can throw an exception that is
3069 visible outside the function. */
4956d07c 3070
52a11cbf
RH
3071bool
3072can_throw_external (insn)
3073 rtx insn;
4956d07c 3074{
52a11cbf
RH
3075 struct eh_region *region;
3076 tree type_thrown;
3077 rtx note;
4956d07c 3078
52a11cbf
RH
3079 if (! INSN_P (insn))
3080 return false;
3081
3082 if (GET_CODE (insn) == INSN
3083 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3084 insn = XVECEXP (PATTERN (insn), 0, 0);
3085
3086 if (GET_CODE (insn) == CALL_INSN
3087 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 3088 {
52a11cbf
RH
3089 int i;
3090 for (i = 0; i < 3; ++i)
4956d07c 3091 {
52a11cbf
RH
3092 rtx sub = XEXP (PATTERN (insn), i);
3093 for (; sub ; sub = NEXT_INSN (sub))
3094 if (can_throw_external (sub))
3095 return true;
4956d07c 3096 }
52a11cbf 3097 return false;
4956d07c 3098 }
52a11cbf
RH
3099
3100 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3101 if (!note)
3102 {
3103 /* Calls (and trapping insns) without notes are outside any
3104 exception handling region in this function. We have to
3105 assume it might throw. Given that the front end and middle
3106 ends mark known NOTHROW functions, this isn't so wildly
3107 inaccurate. */
3108 return (GET_CODE (insn) == CALL_INSN
3109 || (flag_non_call_exceptions
3110 && may_trap_p (PATTERN (insn))));
3111 }
3112 if (INTVAL (XEXP (note, 0)) <= 0)
3113 return false;
3114
3115 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3116
3117 type_thrown = NULL_TREE;
3118 if (region->type == ERT_THROW)
3119 {
3120 type_thrown = region->u.throw.type;
3121 region = region->outer;
3122 }
3123
3124 /* If the exception is caught or blocked by any containing region,
3125 then it is not seen by any calling function. */
3126 for (; region ; region = region->outer)
3127 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3128 return false;
3129
3130 return true;
4956d07c 3131}
1ef1bf06 3132
52a11cbf 3133/* True if nothing in this function can throw outside this function. */
6814a8a0 3134
52a11cbf
RH
3135bool
3136nothrow_function_p ()
1ef1bf06
AM
3137{
3138 rtx insn;
1ef1bf06 3139
52a11cbf
RH
3140 if (! flag_exceptions)
3141 return true;
1ef1bf06 3142
1ef1bf06 3143 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf
RH
3144 if (can_throw_external (insn))
3145 return false;
3146 for (insn = current_function_epilogue_delay_list; insn;
3147 insn = XEXP (insn, 1))
3148 if (can_throw_external (insn))
3149 return false;
4da896b2 3150
52a11cbf 3151 return true;
1ef1bf06 3152}
52a11cbf 3153
ca55abae 3154\f
52a11cbf 3155/* Various hooks for unwind library. */
ca55abae
JM
3156
3157/* Do any necessary initialization to access arbitrary stack frames.
3158 On the SPARC, this means flushing the register windows. */
3159
3160void
3161expand_builtin_unwind_init ()
3162{
3163 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 3164 able to copy the saved values for any registers from frames we unwind. */
ca55abae
JM
3165 current_function_has_nonlocal_label = 1;
3166
3167#ifdef SETUP_FRAME_ADDRESSES
3168 SETUP_FRAME_ADDRESSES ();
3169#endif
3170}
3171
52a11cbf
RH
3172rtx
3173expand_builtin_eh_return_data_regno (arglist)
3174 tree arglist;
3175{
3176 tree which = TREE_VALUE (arglist);
3177 unsigned HOST_WIDE_INT iwhich;
3178
3179 if (TREE_CODE (which) != INTEGER_CST)
3180 {
3181 error ("argument of `__builtin_eh_return_regno' must be constant");
3182 return constm1_rtx;
3183 }
3184
3185 iwhich = tree_low_cst (which, 1);
3186 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3187 if (iwhich == INVALID_REGNUM)
3188 return constm1_rtx;
3189
3190#ifdef DWARF_FRAME_REGNUM
3191 iwhich = DWARF_FRAME_REGNUM (iwhich);
3192#else
3193 iwhich = DBX_REGISTER_NUMBER (iwhich);
3194#endif
3195
3f2c5d1a 3196 return GEN_INT (iwhich);
52a11cbf
RH
3197}
3198
ca55abae
JM
3199/* Given a value extracted from the return address register or stack slot,
3200 return the actual address encoded in that value. */
3201
3202rtx
3203expand_builtin_extract_return_addr (addr_tree)
3204 tree addr_tree;
3205{
3206 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf
RH
3207
3208 /* First mask out any unwanted bits. */
3209#ifdef MASK_RETURN_ADDR
22273300 3210 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
52a11cbf
RH
3211#endif
3212
3213 /* Then adjust to find the real return address. */
3214#if defined (RETURN_ADDR_OFFSET)
3215 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3216#endif
3217
3218 return addr;
ca55abae
JM
3219}
3220
3221/* Given an actual address in addr_tree, do any necessary encoding
3222 and return the value to be stored in the return address register or
3223 stack slot so the epilogue will return to that address. */
3224
3225rtx
3226expand_builtin_frob_return_addr (addr_tree)
3227 tree addr_tree;
3228{
4b6c1672 3229 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
52a11cbf 3230
be128cd9 3231#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
3232 if (GET_MODE (addr) != Pmode)
3233 addr = convert_memory_address (Pmode, addr);
be128cd9
RK
3234#endif
3235
ca55abae 3236#ifdef RETURN_ADDR_OFFSET
52a11cbf 3237 addr = force_reg (Pmode, addr);
ca55abae
JM
3238 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3239#endif
52a11cbf 3240
ca55abae
JM
3241 return addr;
3242}
3243
52a11cbf
RH
3244/* Set up the epilogue with the magic bits we'll need to return to the
3245 exception handler. */
ca55abae 3246
52a11cbf
RH
3247void
3248expand_builtin_eh_return (stackadj_tree, handler_tree)
3249 tree stackadj_tree, handler_tree;
ca55abae 3250{
52a11cbf 3251 rtx stackadj, handler;
ca55abae 3252
52a11cbf
RH
3253 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3254 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
ca55abae 3255
be128cd9 3256#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
3257 if (GET_MODE (stackadj) != Pmode)
3258 stackadj = convert_memory_address (Pmode, stackadj);
3259
3260 if (GET_MODE (handler) != Pmode)
3261 handler = convert_memory_address (Pmode, handler);
be128cd9
RK
3262#endif
3263
52a11cbf
RH
3264 if (! cfun->eh->ehr_label)
3265 {
3266 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3267 cfun->eh->ehr_handler = copy_to_reg (handler);
3268 cfun->eh->ehr_label = gen_label_rtx ();
3269 }
ca55abae 3270 else
ca55abae 3271 {
52a11cbf
RH
3272 if (stackadj != cfun->eh->ehr_stackadj)
3273 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3274 if (handler != cfun->eh->ehr_handler)
3275 emit_move_insn (cfun->eh->ehr_handler, handler);
ca55abae
JM
3276 }
3277
52a11cbf 3278 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
3279}
3280
71038426
RH
3281void
3282expand_eh_return ()
ca55abae 3283{
52a11cbf 3284 rtx sa, ra, around_label;
ca55abae 3285
52a11cbf 3286 if (! cfun->eh->ehr_label)
71038426 3287 return;
ca55abae 3288
52a11cbf
RH
3289 sa = EH_RETURN_STACKADJ_RTX;
3290 if (! sa)
71038426 3291 {
52a11cbf 3292 error ("__builtin_eh_return not supported on this target");
71038426
RH
3293 return;
3294 }
ca55abae 3295
52a11cbf 3296 current_function_calls_eh_return = 1;
ca55abae 3297
52a11cbf
RH
3298 around_label = gen_label_rtx ();
3299 emit_move_insn (sa, const0_rtx);
3300 emit_jump (around_label);
ca55abae 3301
52a11cbf
RH
3302 emit_label (cfun->eh->ehr_label);
3303 clobber_return_register ();
ca55abae 3304
52a11cbf
RH
3305#ifdef HAVE_eh_return
3306 if (HAVE_eh_return)
3307 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3308 else
71038426 3309#endif
52a11cbf
RH
3310 {
3311 ra = EH_RETURN_HANDLER_RTX;
3312 if (! ra)
3313 {
3314 error ("__builtin_eh_return not supported on this target");
3315 ra = gen_reg_rtx (Pmode);
3316 }
71038426 3317
52a11cbf 3318 emit_move_insn (sa, cfun->eh->ehr_stackadj);
be128cd9 3319 emit_move_insn (ra, cfun->eh->ehr_handler);
52a11cbf 3320 }
71038426 3321
52a11cbf 3322 emit_label (around_label);
71038426 3323}
77d33a84 3324\f
949f197f 3325/* In the following functions, we represent entries in the action table
eaec9b3d 3326 as 1-based indices. Special cases are:
949f197f
RH
3327
3328 0: null action record, non-null landing pad; implies cleanups
3329 -1: null action record, null landing pad; implies no action
3330 -2: no call-site entry; implies must_not_throw
3331 -3: we have yet to process outer regions
3332
3333 Further, no special cases apply to the "next" field of the record.
3334 For next, 0 means end of list. */
3335
52a11cbf
RH
3336struct action_record
3337{
3338 int offset;
3339 int filter;
3340 int next;
3341};
77d33a84 3342
52a11cbf
RH
3343static int
3344action_record_eq (pentry, pdata)
3345 const PTR pentry;
3346 const PTR pdata;
3347{
3348 const struct action_record *entry = (const struct action_record *) pentry;
3349 const struct action_record *data = (const struct action_record *) pdata;
3350 return entry->filter == data->filter && entry->next == data->next;
3351}
77d33a84 3352
52a11cbf
RH
3353static hashval_t
3354action_record_hash (pentry)
3355 const PTR pentry;
3356{
3357 const struct action_record *entry = (const struct action_record *) pentry;
3358 return entry->next * 1009 + entry->filter;
3359}
77d33a84 3360
52a11cbf
RH
3361static int
3362add_action_record (ar_hash, filter, next)
3363 htab_t ar_hash;
3364 int filter, next;
77d33a84 3365{
52a11cbf
RH
3366 struct action_record **slot, *new, tmp;
3367
3368 tmp.filter = filter;
3369 tmp.next = next;
3370 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 3371
52a11cbf 3372 if ((new = *slot) == NULL)
77d33a84 3373 {
52a11cbf
RH
3374 new = (struct action_record *) xmalloc (sizeof (*new));
3375 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3376 new->filter = filter;
3377 new->next = next;
3378 *slot = new;
3379
3380 /* The filter value goes in untouched. The link to the next
3381 record is a "self-relative" byte offset, or zero to indicate
3382 that there is no next record. So convert the absolute 1 based
eaec9b3d 3383 indices we've been carrying around into a displacement. */
52a11cbf
RH
3384
3385 push_sleb128 (&cfun->eh->action_record_data, filter);
3386 if (next)
3387 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3388 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 3389 }
77d33a84 3390
52a11cbf
RH
3391 return new->offset;
3392}
77d33a84 3393
52a11cbf
RH
3394static int
3395collect_one_action_chain (ar_hash, region)
3396 htab_t ar_hash;
3397 struct eh_region *region;
77d33a84 3398{
52a11cbf
RH
3399 struct eh_region *c;
3400 int next;
77d33a84 3401
52a11cbf
RH
3402 /* If we've reached the top of the region chain, then we have
3403 no actions, and require no landing pad. */
3404 if (region == NULL)
3405 return -1;
3406
3407 switch (region->type)
77d33a84 3408 {
52a11cbf
RH
3409 case ERT_CLEANUP:
3410 /* A cleanup adds a zero filter to the beginning of the chain, but
3411 there are special cases to look out for. If there are *only*
3412 cleanups along a path, then it compresses to a zero action.
3413 Further, if there are multiple cleanups along a path, we only
3414 need to represent one of them, as that is enough to trigger
3415 entry to the landing pad at runtime. */
3416 next = collect_one_action_chain (ar_hash, region->outer);
3417 if (next <= 0)
3418 return 0;
3419 for (c = region->outer; c ; c = c->outer)
3420 if (c->type == ERT_CLEANUP)
3421 return next;
3422 return add_action_record (ar_hash, 0, next);
3423
3424 case ERT_TRY:
3425 /* Process the associated catch regions in reverse order.
3426 If there's a catch-all handler, then we don't need to
3427 search outer regions. Use a magic -3 value to record
a1f300c0 3428 that we haven't done the outer search. */
52a11cbf
RH
3429 next = -3;
3430 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3431 {
6d41a92f
OH
3432 if (c->u.catch.type_list == NULL)
3433 {
3434 /* Retrieve the filter from the head of the filter list
3435 where we have stored it (see assign_filter_values). */
3f2c5d1a 3436 int filter
6d41a92f
OH
3437 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3438
3439 next = add_action_record (ar_hash, filter, 0);
3440 }
52a11cbf
RH
3441 else
3442 {
6d41a92f
OH
3443 /* Once the outer search is done, trigger an action record for
3444 each filter we have. */
3445 tree flt_node;
3446
52a11cbf
RH
3447 if (next == -3)
3448 {
3449 next = collect_one_action_chain (ar_hash, region->outer);
949f197f
RH
3450
3451 /* If there is no next action, terminate the chain. */
3452 if (next == -1)
52a11cbf 3453 next = 0;
949f197f
RH
3454 /* If all outer actions are cleanups or must_not_throw,
3455 we'll have no action record for it, since we had wanted
3456 to encode these states in the call-site record directly.
3457 Add a cleanup action to the chain to catch these. */
3458 else if (next <= 0)
3459 next = add_action_record (ar_hash, 0, 0);
52a11cbf 3460 }
3f2c5d1a 3461
6d41a92f
OH
3462 flt_node = c->u.catch.filter_list;
3463 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3464 {
3465 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3466 next = add_action_record (ar_hash, filter, next);
3467 }
52a11cbf
RH
3468 }
3469 }
3470 return next;
3471
3472 case ERT_ALLOWED_EXCEPTIONS:
3473 /* An exception specification adds its filter to the
3474 beginning of the chain. */
3475 next = collect_one_action_chain (ar_hash, region->outer);
3476 return add_action_record (ar_hash, region->u.allowed.filter,
3477 next < 0 ? 0 : next);
3478
3479 case ERT_MUST_NOT_THROW:
3480 /* A must-not-throw region with no inner handlers or cleanups
3481 requires no call-site entry. Note that this differs from
3482 the no handler or cleanup case in that we do require an lsda
3483 to be generated. Return a magic -2 value to record this. */
3484 return -2;
3485
3486 case ERT_CATCH:
3487 case ERT_THROW:
3488 /* CATCH regions are handled in TRY above. THROW regions are
3489 for optimization information only and produce no output. */
3490 return collect_one_action_chain (ar_hash, region->outer);
3491
3492 default:
3493 abort ();
77d33a84
AM
3494 }
3495}
3496
52a11cbf
RH
3497static int
3498add_call_site (landing_pad, action)
3499 rtx landing_pad;
3500 int action;
77d33a84 3501{
52a11cbf
RH
3502 struct call_site_record *data = cfun->eh->call_site_data;
3503 int used = cfun->eh->call_site_data_used;
3504 int size = cfun->eh->call_site_data_size;
77d33a84 3505
52a11cbf
RH
3506 if (used >= size)
3507 {
3508 size = (size ? size * 2 : 64);
3509 data = (struct call_site_record *)
3510 xrealloc (data, sizeof (*data) * size);
3511 cfun->eh->call_site_data = data;
3512 cfun->eh->call_site_data_size = size;
3513 }
77d33a84 3514
52a11cbf
RH
3515 data[used].landing_pad = landing_pad;
3516 data[used].action = action;
77d33a84 3517
52a11cbf 3518 cfun->eh->call_site_data_used = used + 1;
77d33a84 3519
52a11cbf 3520 return used + call_site_base;
77d33a84
AM
3521}
3522
52a11cbf
RH
3523/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3524 The new note numbers will not refer to region numbers, but
3525 instead to call site entries. */
77d33a84 3526
52a11cbf
RH
3527void
3528convert_to_eh_region_ranges ()
77d33a84 3529{
52a11cbf
RH
3530 rtx insn, iter, note;
3531 htab_t ar_hash;
3532 int last_action = -3;
3533 rtx last_action_insn = NULL_RTX;
3534 rtx last_landing_pad = NULL_RTX;
3535 rtx first_no_action_insn = NULL_RTX;
ae0ed63a 3536 int call_site = 0;
77d33a84 3537
52a11cbf
RH
3538 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3539 return;
77d33a84 3540
52a11cbf 3541 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 3542
52a11cbf 3543 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 3544
52a11cbf
RH
3545 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3546 if (INSN_P (iter))
3547 {
3548 struct eh_region *region;
3549 int this_action;
3550 rtx this_landing_pad;
77d33a84 3551
52a11cbf
RH
3552 insn = iter;
3553 if (GET_CODE (insn) == INSN
3554 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3555 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 3556
52a11cbf
RH
3557 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3558 if (!note)
3559 {
3560 if (! (GET_CODE (insn) == CALL_INSN
3561 || (flag_non_call_exceptions
3562 && may_trap_p (PATTERN (insn)))))
3563 continue;
3564 this_action = -1;
3565 region = NULL;
3566 }
3567 else
3568 {
3569 if (INTVAL (XEXP (note, 0)) <= 0)
3570 continue;
3571 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3572 this_action = collect_one_action_chain (ar_hash, region);
3573 }
3574
3575 /* Existence of catch handlers, or must-not-throw regions
3576 implies that an lsda is needed (even if empty). */
3577 if (this_action != -1)
3578 cfun->uses_eh_lsda = 1;
3579
3580 /* Delay creation of region notes for no-action regions
3581 until we're sure that an lsda will be required. */
3582 else if (last_action == -3)
3583 {
3584 first_no_action_insn = iter;
3585 last_action = -1;
3586 }
1ef1bf06 3587
52a11cbf
RH
3588 /* Cleanups and handlers may share action chains but not
3589 landing pads. Collect the landing pad for this region. */
3590 if (this_action >= 0)
3591 {
3592 struct eh_region *o;
3593 for (o = region; ! o->landing_pad ; o = o->outer)
3594 continue;
3595 this_landing_pad = o->landing_pad;
3596 }
3597 else
3598 this_landing_pad = NULL_RTX;
1ef1bf06 3599
52a11cbf
RH
3600 /* Differing actions or landing pads implies a change in call-site
3601 info, which implies some EH_REGION note should be emitted. */
3602 if (last_action != this_action
3603 || last_landing_pad != this_landing_pad)
3604 {
3605 /* If we'd not seen a previous action (-3) or the previous
3606 action was must-not-throw (-2), then we do not need an
3607 end note. */
3608 if (last_action >= -1)
3609 {
3610 /* If we delayed the creation of the begin, do it now. */
3611 if (first_no_action_insn)
3612 {
3613 call_site = add_call_site (NULL_RTX, 0);
3614 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3615 first_no_action_insn);
3616 NOTE_EH_HANDLER (note) = call_site;
3617 first_no_action_insn = NULL_RTX;
3618 }
3619
3620 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3621 last_action_insn);
3622 NOTE_EH_HANDLER (note) = call_site;
3623 }
3624
3625 /* If the new action is must-not-throw, then no region notes
3626 are created. */
3627 if (this_action >= -1)
3628 {
3f2c5d1a 3629 call_site = add_call_site (this_landing_pad,
52a11cbf
RH
3630 this_action < 0 ? 0 : this_action);
3631 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3632 NOTE_EH_HANDLER (note) = call_site;
3633 }
3634
3635 last_action = this_action;
3636 last_landing_pad = this_landing_pad;
3637 }
3638 last_action_insn = iter;
3639 }
1ef1bf06 3640
52a11cbf 3641 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3642 {
52a11cbf
RH
3643 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3644 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3645 }
3646
52a11cbf
RH
3647 htab_delete (ar_hash);
3648}
1ef1bf06 3649
52a11cbf
RH
3650\f
3651static void
3652push_uleb128 (data_area, value)
3653 varray_type *data_area;
3654 unsigned int value;
3655{
3656 do
3657 {
3658 unsigned char byte = value & 0x7f;
3659 value >>= 7;
3660 if (value)
3661 byte |= 0x80;
3662 VARRAY_PUSH_UCHAR (*data_area, byte);
3663 }
3664 while (value);
3665}
1ef1bf06 3666
52a11cbf
RH
3667static void
3668push_sleb128 (data_area, value)
3669 varray_type *data_area;
3670 int value;
3671{
3672 unsigned char byte;
3673 int more;
1ef1bf06 3674
52a11cbf 3675 do
1ef1bf06 3676 {
52a11cbf
RH
3677 byte = value & 0x7f;
3678 value >>= 7;
3679 more = ! ((value == 0 && (byte & 0x40) == 0)
3680 || (value == -1 && (byte & 0x40) != 0));
3681 if (more)
3682 byte |= 0x80;
3683 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3684 }
52a11cbf
RH
3685 while (more);
3686}
1ef1bf06 3687
52a11cbf 3688\f
52a11cbf
RH
3689#ifndef HAVE_AS_LEB128
3690static int
3691dw2_size_of_call_site_table ()
1ef1bf06 3692{
52a11cbf
RH
3693 int n = cfun->eh->call_site_data_used;
3694 int size = n * (4 + 4 + 4);
3695 int i;
1ef1bf06 3696
52a11cbf
RH
3697 for (i = 0; i < n; ++i)
3698 {
3699 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3700 size += size_of_uleb128 (cs->action);
3701 }
fac62ecf 3702
52a11cbf
RH
3703 return size;
3704}
3705
3706static int
3707sjlj_size_of_call_site_table ()
3708{
3709 int n = cfun->eh->call_site_data_used;
3710 int size = 0;
3711 int i;
77d33a84 3712
52a11cbf 3713 for (i = 0; i < n; ++i)
1ef1bf06 3714 {
52a11cbf
RH
3715 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3716 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3717 size += size_of_uleb128 (cs->action);
1ef1bf06 3718 }
52a11cbf
RH
3719
3720 return size;
3721}
3722#endif
3723
3724static void
3725dw2_output_call_site_table ()
3726{
83182544 3727 const char *const function_start_lab
52a11cbf
RH
3728 = IDENTIFIER_POINTER (current_function_func_begin_label);
3729 int n = cfun->eh->call_site_data_used;
3730 int i;
3731
3732 for (i = 0; i < n; ++i)
1ef1bf06 3733 {
52a11cbf
RH
3734 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3735 char reg_start_lab[32];
3736 char reg_end_lab[32];
3737 char landing_pad_lab[32];
3738
3739 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3740 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3741
3742 if (cs->landing_pad)
3743 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3744 CODE_LABEL_NUMBER (cs->landing_pad));
3745
3746 /* ??? Perhaps use insn length scaling if the assembler supports
3747 generic arithmetic. */
3748 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3749 data4 if the function is small enough. */
3750#ifdef HAVE_AS_LEB128
3751 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3752 "region %d start", i);
3753 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3754 "length");
3755 if (cs->landing_pad)
3756 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3757 "landing pad");
3758 else
3759 dw2_asm_output_data_uleb128 (0, "landing pad");
3760#else
3761 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3762 "region %d start", i);
3763 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3764 if (cs->landing_pad)
3765 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3766 "landing pad");
3767 else
3768 dw2_asm_output_data (4, 0, "landing pad");
3769#endif
3770 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3771 }
3772
52a11cbf
RH
3773 call_site_base += n;
3774}
3775
3776static void
3777sjlj_output_call_site_table ()
3778{
3779 int n = cfun->eh->call_site_data_used;
3780 int i;
1ef1bf06 3781
52a11cbf 3782 for (i = 0; i < n; ++i)
1ef1bf06 3783 {
52a11cbf 3784 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3785
52a11cbf
RH
3786 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3787 "region %d landing pad", i);
3788 dw2_asm_output_data_uleb128 (cs->action, "action");
3789 }
4da896b2 3790
52a11cbf 3791 call_site_base += n;
1ef1bf06
AM
3792}
3793
52a11cbf
RH
3794void
3795output_function_exception_table ()
3796{
2a1ee410 3797 int tt_format, cs_format, lp_format, i, n;
52a11cbf
RH
3798#ifdef HAVE_AS_LEB128
3799 char ttype_label[32];
3800 char cs_after_size_label[32];
3801 char cs_end_label[32];
3802#else
3803 int call_site_len;
3804#endif
3805 int have_tt_data;
3806 int funcdef_number;
ae0ed63a 3807 int tt_format_size = 0;
1ef1bf06 3808
52a11cbf
RH
3809 /* Not all functions need anything. */
3810 if (! cfun->uses_eh_lsda)
3811 return;
fac62ecf 3812
52a11cbf
RH
3813 funcdef_number = (USING_SJLJ_EXCEPTIONS
3814 ? sjlj_funcdef_number
3815 : current_funcdef_number);
1ef1bf06 3816
2a1ee410
RH
3817#ifdef IA64_UNWIND_INFO
3818 fputs ("\t.personality\t", asm_out_file);
3819 output_addr_const (asm_out_file, eh_personality_libfunc);
3820 fputs ("\n\t.handlerdata\n", asm_out_file);
3821 /* Note that varasm still thinks we're in the function's code section.
3822 The ".endp" directive that will immediately follow will take us back. */
3823#else
07c9d2eb 3824 (*targetm.asm_out.exception_section) ();
2a1ee410 3825#endif
52a11cbf
RH
3826
3827 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3828 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3829
b627d6fe
RH
3830 /* Indicate the format of the @TType entries. */
3831 if (! have_tt_data)
3832 tt_format = DW_EH_PE_omit;
3833 else
3834 {
3835 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3836#ifdef HAVE_AS_LEB128
3837 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3838#endif
3839 tt_format_size = size_of_encoded_value (tt_format);
3840
7a900ebc 3841 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3842 }
52a11cbf
RH
3843
3844 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3845
3846 /* The LSDA header. */
3847
3848 /* Indicate the format of the landing pad start pointer. An omitted
3849 field implies @LPStart == @Start. */
3850 /* Currently we always put @LPStart == @Start. This field would
3851 be most useful in moving the landing pads completely out of
3852 line to another section, but it could also be used to minimize
3853 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3854 lp_format = DW_EH_PE_omit;
3855 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3856 eh_data_format_name (lp_format));
52a11cbf
RH
3857
3858 /* @LPStart pointer would go here. */
3859
2a1ee410
RH
3860 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3861 eh_data_format_name (tt_format));
52a11cbf
RH
3862
3863#ifndef HAVE_AS_LEB128
3864 if (USING_SJLJ_EXCEPTIONS)
3865 call_site_len = sjlj_size_of_call_site_table ();
3866 else
3867 call_site_len = dw2_size_of_call_site_table ();
3868#endif
3869
3870 /* A pc-relative 4-byte displacement to the @TType data. */
3871 if (have_tt_data)
3872 {
3873#ifdef HAVE_AS_LEB128
3874 char ttype_after_disp_label[32];
3f2c5d1a 3875 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
52a11cbf
RH
3876 funcdef_number);
3877 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3878 "@TType base offset");
3879 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3880#else
3881 /* Ug. Alignment queers things. */
b627d6fe 3882 unsigned int before_disp, after_disp, last_disp, disp;
52a11cbf 3883
52a11cbf
RH
3884 before_disp = 1 + 1;
3885 after_disp = (1 + size_of_uleb128 (call_site_len)
3886 + call_site_len
3887 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
b627d6fe
RH
3888 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3889 * tt_format_size));
52a11cbf
RH
3890
3891 disp = after_disp;
3892 do
1ef1bf06 3893 {
52a11cbf
RH
3894 unsigned int disp_size, pad;
3895
3896 last_disp = disp;
3897 disp_size = size_of_uleb128 (disp);
3898 pad = before_disp + disp_size + after_disp;
b627d6fe
RH
3899 if (pad % tt_format_size)
3900 pad = tt_format_size - (pad % tt_format_size);
52a11cbf
RH
3901 else
3902 pad = 0;
3903 disp = after_disp + pad;
1ef1bf06 3904 }
52a11cbf
RH
3905 while (disp != last_disp);
3906
3907 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3908#endif
1ef1bf06 3909 }
1ef1bf06 3910
52a11cbf
RH
3911 /* Indicate the format of the call-site offsets. */
3912#ifdef HAVE_AS_LEB128
2a1ee410 3913 cs_format = DW_EH_PE_uleb128;
52a11cbf 3914#else
2a1ee410 3915 cs_format = DW_EH_PE_udata4;
52a11cbf 3916#endif
2a1ee410
RH
3917 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3918 eh_data_format_name (cs_format));
52a11cbf
RH
3919
3920#ifdef HAVE_AS_LEB128
3921 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3922 funcdef_number);
3923 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3924 funcdef_number);
3925 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3926 "Call-site table length");
3927 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3928 if (USING_SJLJ_EXCEPTIONS)
3929 sjlj_output_call_site_table ();
3930 else
3931 dw2_output_call_site_table ();
3932 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3933#else
3934 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3935 if (USING_SJLJ_EXCEPTIONS)
3936 sjlj_output_call_site_table ();
3937 else
3938 dw2_output_call_site_table ();
3939#endif
3940
3941 /* ??? Decode and interpret the data for flag_debug_asm. */
3942 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3943 for (i = 0; i < n; ++i)
3944 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3945 (i ? NULL : "Action record table"));
1ef1bf06 3946
52a11cbf 3947 if (have_tt_data)
7a900ebc 3948 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3949
52a11cbf
RH
3950 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3951 while (i-- > 0)
1ef1bf06 3952 {
52a11cbf 3953 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
225b9cb9 3954 rtx value;
52a11cbf
RH
3955
3956 if (type == NULL_TREE)
3957 type = integer_zero_node;
3958 else
3959 type = lookup_type_for_runtime (type);
3960
225b9cb9
RH
3961 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3962 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3963 assemble_integer (value, tt_format_size,
3964 tt_format_size * BITS_PER_UNIT, 1);
3965 else
3966 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
1ef1bf06 3967 }
52a11cbf
RH
3968
3969#ifdef HAVE_AS_LEB128
3970 if (have_tt_data)
3971 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3972#endif
3973
3974 /* ??? Decode and interpret the data for flag_debug_asm. */
3975 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3976 for (i = 0; i < n; ++i)
3977 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3978 (i ? NULL : "Exception specification table"));
3979
3980 function_section (current_function_decl);
3981
3982 if (USING_SJLJ_EXCEPTIONS)
3983 sjlj_funcdef_number += 1;
1ef1bf06 3984}