]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
gcc.c (cpp_options): Define __NO_INLINE__ unless we are honoring "inline" keyword.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
ef58a523 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
c913b6f1 3 1999, 2000, 2001 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING. If not, write to
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA. */
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4956d07c
MS
52#include "rtl.h"
53#include "tree.h"
54#include "flags.h"
4956d07c 55#include "function.h"
4956d07c 56#include "expr.h"
4956d07c 57#include "insn-config.h"
52a11cbf
RH
58#include "except.h"
59#include "integrate.h"
60#include "hard-reg-set.h"
61#include "basic-block.h"
4956d07c 62#include "output.h"
52a11cbf
RH
63#include "dwarf2asm.h"
64#include "dwarf2out.h"
10f0ad3d 65#include "toplev.h"
52a11cbf 66#include "hashtab.h"
2b12ffe0 67#include "intl.h"
87ff9c8e 68#include "ggc.h"
b1474bb7 69#include "tm_p.h"
4956d07c 70
52a11cbf
RH
71
72/* Provide defaults for stuff that may not be defined when using
73 sjlj exceptions. */
74#ifndef EH_RETURN_STACKADJ_RTX
75#define EH_RETURN_STACKADJ_RTX 0
76#endif
77#ifndef EH_RETURN_HANDLER_RTX
78#define EH_RETURN_HANDLER_RTX 0
79#endif
80#ifndef EH_RETURN_DATA_REGNO
81#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
82#endif
83
27a36778 84
52a11cbf
RH
85/* Nonzero means enable synchronous exceptions for non-call instructions. */
86int flag_non_call_exceptions;
27a36778 87
52a11cbf
RH
88/* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90tree protect_cleanup_actions;
27a36778 91
52a11cbf
RH
92/* Return true if type A catches type B. */
93int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
27a36778 94
52a11cbf
RH
95/* Map a type to a runtime object to match type. */
96tree (*lang_eh_runtime_type) PARAMS ((tree));
4956d07c 97
52a11cbf 98/* A list of labels used for exception handlers. */
4956d07c
MS
99rtx exception_handler_labels;
100
52a11cbf
RH
101static int call_site_base;
102static int sjlj_funcdef_number;
103static htab_t type_to_runtime_map;
104
105/* Describe the SjLj_Function_Context structure. */
106static tree sjlj_fc_type_node;
107static int sjlj_fc_call_site_ofs;
108static int sjlj_fc_data_ofs;
109static int sjlj_fc_personality_ofs;
110static int sjlj_fc_lsda_ofs;
111static int sjlj_fc_jbuf_ofs;
112\f
113/* Describes one exception region. */
114struct eh_region
115{
116 /* The immediately surrounding region. */
117 struct eh_region *outer;
956d6950 118
52a11cbf
RH
119 /* The list of immediately contained regions. */
120 struct eh_region *inner;
121 struct eh_region *next_peer;
956d6950 122
52a11cbf
RH
123 /* An identifier for this region. */
124 int region_number;
71038426 125
52a11cbf
RH
126 /* Each region does exactly one thing. */
127 enum eh_region_type
128 {
129 ERT_CLEANUP = 1,
130 ERT_TRY,
131 ERT_CATCH,
132 ERT_ALLOWED_EXCEPTIONS,
133 ERT_MUST_NOT_THROW,
134 ERT_THROW,
135 ERT_FIXUP
136 } type;
137
138 /* Holds the action to perform based on the preceeding type. */
139 union {
140 /* A list of catch blocks, a surrounding try block,
141 and the label for continuing after a catch. */
142 struct {
143 struct eh_region *catch;
144 struct eh_region *last_catch;
145 struct eh_region *prev_try;
146 rtx continue_label;
147 } try;
148
149 /* The list through the catch handlers, the type object
150 matched, and a pointer to the generated code. */
151 struct {
152 struct eh_region *next_catch;
153 struct eh_region *prev_catch;
154 tree type;
155 int filter;
156 } catch;
157
158 /* A tree_list of allowed types. */
159 struct {
160 tree type_list;
161 int filter;
162 } allowed;
163
164 /* The type given by a call to "throw foo();", or discovered
165 for a throw. */
166 struct {
167 tree type;
168 } throw;
169
170 /* Retain the cleanup expression even after expansion so that
171 we can match up fixup regions. */
172 struct {
173 tree exp;
174 } cleanup;
175
176 /* The real region (by expression and by pointer) that fixup code
177 should live in. */
178 struct {
179 tree cleanup_exp;
180 struct eh_region *real_region;
181 } fixup;
182 } u;
183
47c84870
JM
184 /* Entry point for this region's handler before landing pads are built. */
185 rtx label;
52a11cbf 186
47c84870 187 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
188 rtx landing_pad;
189
47c84870 190 /* Entry point for this region's handler from an inner region. */
52a11cbf 191 rtx post_landing_pad;
47c84870
JM
192
193 /* The RESX insn for handing off control to the next outermost handler,
194 if appropriate. */
195 rtx resume;
52a11cbf 196};
71038426 197
52a11cbf
RH
198/* Used to save exception status for each function. */
199struct eh_status
200{
201 /* The tree of all regions for this function. */
202 struct eh_region *region_tree;
e6cfb550 203
52a11cbf
RH
204 /* The same information as an indexable array. */
205 struct eh_region **region_array;
e6cfb550 206
52a11cbf
RH
207 /* The most recently open region. */
208 struct eh_region *cur_region;
e6cfb550 209
52a11cbf
RH
210 /* This is the region for which we are processing catch blocks. */
211 struct eh_region *try_region;
71038426 212
52a11cbf
RH
213 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
214 node is itself a TREE_CHAINed list of handlers for regions that
215 are not yet closed. The TREE_VALUE of each entry contains the
216 handler for the corresponding entry on the ehstack. */
217 tree protect_list;
1e4ceb6f 218
52a11cbf
RH
219 rtx filter;
220 rtx exc_ptr;
4956d07c 221
52a11cbf
RH
222 int built_landing_pads;
223 int last_region_number;
e6cfb550 224
52a11cbf
RH
225 varray_type ttype_data;
226 varray_type ehspec_data;
227 varray_type action_record_data;
6814a8a0 228
52a11cbf
RH
229 struct call_site_record
230 {
231 rtx landing_pad;
232 int action;
233 } *call_site_data;
234 int call_site_data_used;
235 int call_site_data_size;
236
237 rtx ehr_stackadj;
238 rtx ehr_handler;
239 rtx ehr_label;
240
241 rtx sjlj_fc;
242 rtx sjlj_exit_after;
243};
e6cfb550 244
52a11cbf
RH
245\f
246static void mark_eh_region PARAMS ((struct eh_region *));
247
248static int t2r_eq PARAMS ((const PTR,
249 const PTR));
250static hashval_t t2r_hash PARAMS ((const PTR));
251static int t2r_mark_1 PARAMS ((PTR *, PTR));
252static void t2r_mark PARAMS ((PTR));
253static void add_type_for_runtime PARAMS ((tree));
254static tree lookup_type_for_runtime PARAMS ((tree));
255
256static struct eh_region *expand_eh_region_end PARAMS ((void));
257
47c84870
JM
258static rtx get_exception_filter PARAMS ((void));
259
52a11cbf
RH
260static void collect_eh_region_array PARAMS ((void));
261static void resolve_fixup_regions PARAMS ((void));
262static void remove_fixup_regions PARAMS ((void));
263static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
264
265static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
266 struct inline_remap *));
267static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
268 struct eh_region **));
269static int ttypes_filter_eq PARAMS ((const PTR,
270 const PTR));
271static hashval_t ttypes_filter_hash PARAMS ((const PTR));
272static int ehspec_filter_eq PARAMS ((const PTR,
273 const PTR));
274static hashval_t ehspec_filter_hash PARAMS ((const PTR));
275static int add_ttypes_entry PARAMS ((htab_t, tree));
276static int add_ehspec_entry PARAMS ((htab_t, htab_t,
277 tree));
278static void assign_filter_values PARAMS ((void));
279static void build_post_landing_pads PARAMS ((void));
280static void connect_post_landing_pads PARAMS ((void));
281static void dw2_build_landing_pads PARAMS ((void));
282
283struct sjlj_lp_info;
284static bool sjlj_find_directly_reachable_regions
285 PARAMS ((struct sjlj_lp_info *));
286static void sjlj_assign_call_site_values
287 PARAMS ((rtx, struct sjlj_lp_info *));
288static void sjlj_mark_call_sites
289 PARAMS ((struct sjlj_lp_info *));
290static void sjlj_emit_function_enter PARAMS ((rtx));
291static void sjlj_emit_function_exit PARAMS ((void));
292static void sjlj_emit_dispatch_table
293 PARAMS ((rtx, struct sjlj_lp_info *));
294static void sjlj_build_landing_pads PARAMS ((void));
295
296static void remove_exception_handler_label PARAMS ((rtx));
297static void remove_eh_handler PARAMS ((struct eh_region *));
298
299struct reachable_info;
300
301/* The return value of reachable_next_level. */
302enum reachable_code
303{
304 /* The given exception is not processed by the given region. */
305 RNL_NOT_CAUGHT,
306 /* The given exception may need processing by the given region. */
307 RNL_MAYBE_CAUGHT,
308 /* The given exception is completely processed by the given region. */
309 RNL_CAUGHT,
310 /* The given exception is completely processed by the runtime. */
311 RNL_BLOCKED
312};
e6cfb550 313
52a11cbf
RH
314static int check_handled PARAMS ((tree, tree));
315static void add_reachable_handler
316 PARAMS ((struct reachable_info *, struct eh_region *,
317 struct eh_region *));
318static enum reachable_code reachable_next_level
319 PARAMS ((struct eh_region *, tree, struct reachable_info *));
320
321static int action_record_eq PARAMS ((const PTR,
322 const PTR));
323static hashval_t action_record_hash PARAMS ((const PTR));
324static int add_action_record PARAMS ((htab_t, int, int));
325static int collect_one_action_chain PARAMS ((htab_t,
326 struct eh_region *));
327static int add_call_site PARAMS ((rtx, int));
328
329static void push_uleb128 PARAMS ((varray_type *,
330 unsigned int));
331static void push_sleb128 PARAMS ((varray_type *, int));
332static const char *eh_data_format_name PARAMS ((int));
333#ifndef HAVE_AS_LEB128
334static int dw2_size_of_call_site_table PARAMS ((void));
335static int sjlj_size_of_call_site_table PARAMS ((void));
336#endif
337static void dw2_output_call_site_table PARAMS ((void));
338static void sjlj_output_call_site_table PARAMS ((void));
e6cfb550 339
52a11cbf
RH
340\f
341/* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
4956d07c 344
52a11cbf
RH
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
4956d07c 347
52a11cbf
RH
348int
349doing_eh (do_warn)
350 int do_warn;
351{
352 if (! flag_exceptions)
353 {
354 static int warned = 0;
355 if (! warned && do_warn)
356 {
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
359 }
360 return 0;
361 }
362 return 1;
4956d07c
MS
363}
364
52a11cbf
RH
365\f
366void
367init_eh ()
4956d07c 368{
52a11cbf
RH
369 ggc_add_rtx_root (&exception_handler_labels, 1);
370 ggc_add_tree_root (&protect_cleanup_actions, 1);
4956d07c 371
52a11cbf
RH
372 if (! flag_exceptions)
373 return;
4956d07c 374
52a11cbf
RH
375 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
376 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
4956d07c 377
52a11cbf
RH
378 /* Create the SjLj_Function_Context structure. This should match
379 the definition in unwind-sjlj.c. */
380 if (USING_SJLJ_EXCEPTIONS)
381 {
382 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 383
52a11cbf
RH
384 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
385 ggc_add_tree_root (&sjlj_fc_type_node, 1);
9a0d1e1b 386
52a11cbf
RH
387 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
388 build_pointer_type (sjlj_fc_type_node));
389 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 390
52a11cbf
RH
391 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
392 integer_type_node);
393 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 394
52a11cbf
RH
395 tmp = build_index_type (build_int_2 (4 - 1, 0));
396 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
397 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
398 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 399
52a11cbf
RH
400 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
401 ptr_type_node);
402 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 403
52a11cbf
RH
404 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
405 ptr_type_node);
406 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 407
52a11cbf
RH
408#ifdef DONT_USE_BUILTIN_SETJMP
409#ifdef JMP_BUF_SIZE
410 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
411#else
412 /* Should be large enough for most systems, if it is not,
413 JMP_BUF_SIZE should be defined with the proper value. It will
414 also tend to be larger than necessary for most systems, a more
415 optimal port will define JMP_BUF_SIZE. */
416 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
417#endif
418#else
419 /* This is 2 for builtin_setjmp, plus whatever the target requires
420 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
421 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
422 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
423#endif
424 tmp = build_index_type (tmp);
425 tmp = build_array_type (ptr_type_node, tmp);
426 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
427#ifdef DONT_USE_BUILTIN_SETJMP
428 /* We don't know what the alignment requirements of the
429 runtime's jmp_buf has. Overestimate. */
430 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
431 DECL_USER_ALIGN (f_jbuf) = 1;
432#endif
433 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
434
435 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
436 TREE_CHAIN (f_prev) = f_cs;
437 TREE_CHAIN (f_cs) = f_data;
438 TREE_CHAIN (f_data) = f_per;
439 TREE_CHAIN (f_per) = f_lsda;
440 TREE_CHAIN (f_lsda) = f_jbuf;
441
442 layout_type (sjlj_fc_type_node);
443
444 /* Cache the interesting field offsets so that we have
445 easy access from rtl. */
446 sjlj_fc_call_site_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
449 sjlj_fc_data_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
452 sjlj_fc_personality_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
455 sjlj_fc_lsda_ofs
456 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
457 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
458 sjlj_fc_jbuf_ofs
459 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
460 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
461 }
4956d07c
MS
462}
463
52a11cbf
RH
464void
465init_eh_for_function ()
4956d07c 466{
52a11cbf 467 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
4956d07c
MS
468}
469
52a11cbf 470/* Mark EH for GC. */
4956d07c
MS
471
472static void
52a11cbf
RH
473mark_eh_region (region)
474 struct eh_region *region;
4956d07c 475{
52a11cbf
RH
476 if (! region)
477 return;
4956d07c 478
52a11cbf
RH
479 switch (region->type)
480 {
481 case ERT_CLEANUP:
482 ggc_mark_tree (region->u.cleanup.exp);
483 break;
484 case ERT_TRY:
485 ggc_mark_rtx (region->u.try.continue_label);
486 break;
487 case ERT_CATCH:
488 ggc_mark_tree (region->u.catch.type);
489 break;
490 case ERT_ALLOWED_EXCEPTIONS:
491 ggc_mark_tree (region->u.allowed.type_list);
492 break;
493 case ERT_MUST_NOT_THROW:
494 break;
495 case ERT_THROW:
496 ggc_mark_tree (region->u.throw.type);
497 break;
498 case ERT_FIXUP:
499 ggc_mark_tree (region->u.fixup.cleanup_exp);
500 break;
501 default:
502 abort ();
503 }
4956d07c 504
52a11cbf 505 ggc_mark_rtx (region->label);
47c84870 506 ggc_mark_rtx (region->resume);
52a11cbf
RH
507 ggc_mark_rtx (region->landing_pad);
508 ggc_mark_rtx (region->post_landing_pad);
4956d07c
MS
509}
510
52a11cbf
RH
511void
512mark_eh_status (eh)
513 struct eh_status *eh;
4956d07c 514{
52a11cbf
RH
515 int i;
516
517 if (eh == 0)
518 return;
519
520 /* If we've called collect_eh_region_array, use it. Otherwise walk
521 the tree non-recursively. */
522 if (eh->region_array)
523 {
524 for (i = eh->last_region_number; i > 0; --i)
525 {
526 struct eh_region *r = eh->region_array[i];
527 if (r && r->region_number == i)
528 mark_eh_region (r);
529 }
530 }
531 else if (eh->region_tree)
532 {
533 struct eh_region *r = eh->region_tree;
534 while (1)
535 {
536 mark_eh_region (r);
537 if (r->inner)
538 r = r->inner;
539 else if (r->next_peer)
540 r = r->next_peer;
541 else
542 {
543 do {
544 r = r->outer;
545 if (r == NULL)
546 goto tree_done;
547 } while (r->next_peer == NULL);
548 r = r->next_peer;
549 }
550 }
551 tree_done:;
552 }
4956d07c 553
52a11cbf
RH
554 ggc_mark_tree (eh->protect_list);
555 ggc_mark_rtx (eh->filter);
556 ggc_mark_rtx (eh->exc_ptr);
557 ggc_mark_tree_varray (eh->ttype_data);
4956d07c 558
52a11cbf
RH
559 if (eh->call_site_data)
560 {
561 for (i = eh->call_site_data_used - 1; i >= 0; --i)
562 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
563 }
4956d07c 564
52a11cbf
RH
565 ggc_mark_rtx (eh->ehr_stackadj);
566 ggc_mark_rtx (eh->ehr_handler);
567 ggc_mark_rtx (eh->ehr_label);
4956d07c 568
52a11cbf
RH
569 ggc_mark_rtx (eh->sjlj_fc);
570 ggc_mark_rtx (eh->sjlj_exit_after);
4956d07c 571}
9a0d1e1b 572
52a11cbf
RH
573void
574free_eh_status (f)
575 struct function *f;
9a0d1e1b 576{
52a11cbf 577 struct eh_status *eh = f->eh;
250d07b6 578
52a11cbf 579 if (eh->region_array)
250d07b6 580 {
52a11cbf
RH
581 int i;
582 for (i = eh->last_region_number; i > 0; --i)
583 {
584 struct eh_region *r = eh->region_array[i];
585 /* Mind we don't free a region struct more than once. */
586 if (r && r->region_number == i)
587 free (r);
588 }
589 free (eh->region_array);
250d07b6 590 }
52a11cbf 591 else if (eh->region_tree)
250d07b6 592 {
52a11cbf
RH
593 struct eh_region *next, *r = eh->region_tree;
594 while (1)
595 {
596 if (r->inner)
597 r = r->inner;
598 else if (r->next_peer)
599 {
600 next = r->next_peer;
601 free (r);
602 r = next;
603 }
604 else
605 {
606 do {
607 next = r->outer;
608 free (r);
609 r = next;
610 if (r == NULL)
611 goto tree_done;
612 } while (r->next_peer == NULL);
613 next = r->next_peer;
614 free (r);
615 r = next;
616 }
617 }
618 tree_done:;
250d07b6
RH
619 }
620
52a11cbf
RH
621 VARRAY_FREE (eh->ttype_data);
622 VARRAY_FREE (eh->ehspec_data);
623 VARRAY_FREE (eh->action_record_data);
624 if (eh->call_site_data)
625 free (eh->call_site_data);
626
627 free (eh);
628 f->eh = NULL;
9a0d1e1b
AM
629}
630
52a11cbf
RH
631\f
632/* Start an exception handling region. All instructions emitted
633 after this point are considered to be part of the region until
634 expand_eh_region_end is invoked. */
9a0d1e1b 635
52a11cbf
RH
636void
637expand_eh_region_start ()
9a0d1e1b 638{
52a11cbf
RH
639 struct eh_region *new_region;
640 struct eh_region *cur_region;
641 rtx note;
9a0d1e1b 642
52a11cbf
RH
643 if (! doing_eh (0))
644 return;
9a0d1e1b 645
52a11cbf
RH
646 /* Insert a new blank region as a leaf in the tree. */
647 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
648 cur_region = cfun->eh->cur_region;
649 new_region->outer = cur_region;
650 if (cur_region)
9a0d1e1b 651 {
52a11cbf
RH
652 new_region->next_peer = cur_region->inner;
653 cur_region->inner = new_region;
654 }
e6cfb550 655 else
9a0d1e1b 656 {
52a11cbf
RH
657 new_region->next_peer = cfun->eh->region_tree;
658 cfun->eh->region_tree = new_region;
9a0d1e1b 659 }
52a11cbf
RH
660 cfun->eh->cur_region = new_region;
661
662 /* Create a note marking the start of this region. */
663 new_region->region_number = ++cfun->eh->last_region_number;
6496a589 664 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
52a11cbf 665 NOTE_EH_HANDLER (note) = new_region->region_number;
9a0d1e1b
AM
666}
667
52a11cbf 668/* Common code to end a region. Returns the region just ended. */
9f8e6243 669
52a11cbf
RH
670static struct eh_region *
671expand_eh_region_end ()
9f8e6243 672{
52a11cbf
RH
673 struct eh_region *cur_region = cfun->eh->cur_region;
674 rtx note;
675
676 /* Create a nute marking the end of this region. */
6496a589 677 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
52a11cbf
RH
678 NOTE_EH_HANDLER (note) = cur_region->region_number;
679
680 /* Pop. */
681 cfun->eh->cur_region = cur_region->outer;
682
52a11cbf 683 return cur_region;
9f8e6243
AM
684}
685
52a11cbf
RH
686/* End an exception handling region for a cleanup. HANDLER is an
687 expression to expand for the cleanup. */
9c606f69 688
52a11cbf
RH
689void
690expand_eh_region_end_cleanup (handler)
691 tree handler;
9c606f69 692{
52a11cbf
RH
693 struct eh_region *region;
694 rtx around_label;
47c84870 695 rtx data_save[2];
52a11cbf
RH
696
697 if (! doing_eh (0))
698 return;
9c606f69 699
52a11cbf
RH
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
9c606f69 704
52a11cbf
RH
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
9c606f69 707
52a11cbf 708 emit_label (region->label);
9c606f69 709
52a11cbf
RH
710 if (protect_cleanup_actions)
711 expand_eh_region_start ();
9c606f69 712
47c84870
JM
713 /* In case this cleanup involves an inline destructor with a try block in
714 it, we need to save the EH return data registers around it. */
715 data_save[0] = gen_reg_rtx (Pmode);
716 emit_move_insn (data_save[0], get_exception_pointer ());
16842c15 717 data_save[1] = gen_reg_rtx (word_mode);
47c84870
JM
718 emit_move_insn (data_save[1], get_exception_filter ());
719
52a11cbf 720 expand_expr (handler, const0_rtx, VOIDmode, 0);
9c606f69 721
47c84870
JM
722 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
723 emit_move_insn (cfun->eh->filter, data_save[1]);
724
52a11cbf
RH
725 if (protect_cleanup_actions)
726 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
a9f0664a 727
c10f3adf
AH
728 /* We need any stack adjustment complete before the around_label. */
729 do_pending_stack_adjust ();
730
52a11cbf
RH
731 /* We delay the generation of the _Unwind_Resume until we generate
732 landing pads. We emit a marker here so as to get good control
733 flow data in the meantime. */
47c84870
JM
734 region->resume
735 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
52a11cbf
RH
736 emit_barrier ();
737
52a11cbf 738 emit_label (around_label);
9c606f69
AM
739}
740
52a11cbf
RH
741/* End an exception handling region for a try block, and prepares
742 for subsequent calls to expand_start_catch. */
9a0d1e1b 743
52a11cbf
RH
744void
745expand_start_all_catch ()
9a0d1e1b 746{
52a11cbf 747 struct eh_region *region;
9a0d1e1b 748
52a11cbf
RH
749 if (! doing_eh (1))
750 return;
9a0d1e1b 751
52a11cbf
RH
752 region = expand_eh_region_end ();
753 region->type = ERT_TRY;
754 region->u.try.prev_try = cfun->eh->try_region;
755 region->u.try.continue_label = gen_label_rtx ();
9a0d1e1b 756
52a11cbf
RH
757 cfun->eh->try_region = region;
758
759 emit_jump (region->u.try.continue_label);
760}
9a0d1e1b 761
52a11cbf
RH
762/* Begin a catch clause. TYPE is the type caught, or null if this is
763 a catch-all clause. */
9a0d1e1b 764
52a11cbf
RH
765void
766expand_start_catch (type)
767 tree type;
9a0d1e1b 768{
52a11cbf
RH
769 struct eh_region *t, *c, *l;
770
771 if (! doing_eh (0))
772 return;
773
774 if (type)
775 add_type_for_runtime (type);
776 expand_eh_region_start ();
777
778 t = cfun->eh->try_region;
779 c = cfun->eh->cur_region;
780 c->type = ERT_CATCH;
781 c->u.catch.type = type;
782 c->label = gen_label_rtx ();
783
784 l = t->u.try.last_catch;
785 c->u.catch.prev_catch = l;
786 if (l)
787 l->u.catch.next_catch = c;
788 else
789 t->u.try.catch = c;
790 t->u.try.last_catch = c;
9a0d1e1b 791
52a11cbf 792 emit_label (c->label);
9a0d1e1b
AM
793}
794
52a11cbf 795/* End a catch clause. Control will resume after the try/catch block. */
9a0d1e1b 796
52a11cbf
RH
797void
798expand_end_catch ()
9a0d1e1b 799{
52a11cbf
RH
800 struct eh_region *try_region, *catch_region;
801
802 if (! doing_eh (0))
803 return;
804
805 catch_region = expand_eh_region_end ();
806 try_region = cfun->eh->try_region;
807
808 emit_jump (try_region->u.try.continue_label);
9a0d1e1b
AM
809}
810
52a11cbf 811/* End a sequence of catch handlers for a try block. */
9a0d1e1b 812
52a11cbf
RH
813void
814expand_end_all_catch ()
9a0d1e1b 815{
52a11cbf
RH
816 struct eh_region *try_region;
817
818 if (! doing_eh (0))
819 return;
820
821 try_region = cfun->eh->try_region;
822 cfun->eh->try_region = try_region->u.try.prev_try;
823
824 emit_label (try_region->u.try.continue_label);
9a0d1e1b
AM
825}
826
52a11cbf
RH
827/* End an exception region for an exception type filter. ALLOWED is a
828 TREE_LIST of types to be matched by the runtime. FAILURE is an
829 expression to invoke if a mismatch ocurrs. */
9a0d1e1b 830
52a11cbf
RH
831void
832expand_eh_region_end_allowed (allowed, failure)
833 tree allowed, failure;
9a0d1e1b 834{
52a11cbf
RH
835 struct eh_region *region;
836 rtx around_label;
9a0d1e1b 837
52a11cbf
RH
838 if (! doing_eh (0))
839 return;
e6cfb550 840
52a11cbf
RH
841 region = expand_eh_region_end ();
842 region->type = ERT_ALLOWED_EXCEPTIONS;
843 region->u.allowed.type_list = allowed;
844 region->label = gen_label_rtx ();
9a0d1e1b 845
52a11cbf
RH
846 for (; allowed ; allowed = TREE_CHAIN (allowed))
847 add_type_for_runtime (TREE_VALUE (allowed));
9a0d1e1b 848
52a11cbf
RH
849 /* We must emit the call to FAILURE here, so that if this function
850 throws a different exception, that it will be processed by the
851 correct region. */
9a0d1e1b 852
52a11cbf
RH
853 around_label = gen_label_rtx ();
854 emit_jump (around_label);
855
856 emit_label (region->label);
857 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
9a0d1e1b 858
52a11cbf 859 emit_label (around_label);
9a0d1e1b
AM
860}
861
52a11cbf
RH
862/* End an exception region for a must-not-throw filter. FAILURE is an
863 expression invoke if an uncaught exception propagates this far.
e6cfb550 864
52a11cbf
RH
865 This is conceptually identical to expand_eh_region_end_allowed with
866 an empty allowed list (if you passed "std::terminate" instead of
867 "__cxa_call_unexpected"), but they are represented differently in
868 the C++ LSDA. */
6814a8a0 869
52a11cbf
RH
870void
871expand_eh_region_end_must_not_throw (failure)
872 tree failure;
e6cfb550 873{
52a11cbf
RH
874 struct eh_region *region;
875 rtx around_label;
e6cfb550 876
52a11cbf
RH
877 if (! doing_eh (0))
878 return;
6814a8a0 879
52a11cbf
RH
880 region = expand_eh_region_end ();
881 region->type = ERT_MUST_NOT_THROW;
882 region->label = gen_label_rtx ();
e6cfb550 883
52a11cbf
RH
884 /* We must emit the call to FAILURE here, so that if this function
885 throws a different exception, that it will be processed by the
886 correct region. */
6814a8a0 887
52a11cbf
RH
888 around_label = gen_label_rtx ();
889 emit_jump (around_label);
6814a8a0 890
52a11cbf
RH
891 emit_label (region->label);
892 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
6814a8a0 893
52a11cbf 894 emit_label (around_label);
e6cfb550
AM
895}
896
52a11cbf
RH
897/* End an exception region for a throw. No handling goes on here,
898 but it's the easiest way for the front-end to indicate what type
899 is being thrown. */
6814a8a0 900
52a11cbf
RH
901void
902expand_eh_region_end_throw (type)
903 tree type;
e6cfb550 904{
52a11cbf
RH
905 struct eh_region *region;
906
907 if (! doing_eh (0))
908 return;
909
910 region = expand_eh_region_end ();
911 region->type = ERT_THROW;
912 region->u.throw.type = type;
e6cfb550
AM
913}
914
52a11cbf
RH
915/* End a fixup region. Within this region the cleanups for the immediately
916 enclosing region are _not_ run. This is used for goto cleanup to avoid
917 destroying an object twice.
12670d88 918
52a11cbf
RH
919 This would be an extraordinarily simple prospect, were it not for the
920 fact that we don't actually know what the immediately enclosing region
921 is. This surprising fact is because expand_cleanups is currently
922 generating a sequence that it will insert somewhere else. We collect
923 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
4956d07c 924
52a11cbf
RH
925void
926expand_eh_region_end_fixup (handler)
927 tree handler;
4956d07c 928{
52a11cbf
RH
929 struct eh_region *fixup;
930
931 if (! doing_eh (0))
932 return;
933
934 fixup = expand_eh_region_end ();
935 fixup->type = ERT_FIXUP;
936 fixup->u.fixup.cleanup_exp = handler;
4956d07c
MS
937}
938
47c84870 939/* Return an rtl expression for a pointer to the exception object
52a11cbf 940 within a handler. */
4956d07c
MS
941
942rtx
52a11cbf 943get_exception_pointer ()
4956d07c 944{
52a11cbf
RH
945 rtx exc_ptr = cfun->eh->exc_ptr;
946 if (! exc_ptr)
947 {
948 exc_ptr = gen_reg_rtx (Pmode);
949 cfun->eh->exc_ptr = exc_ptr;
950 }
951 return exc_ptr;
952}
4956d07c 953
47c84870
JM
954/* Return an rtl expression for the exception dispatch filter
955 within a handler. */
956
957static rtx
958get_exception_filter ()
959{
960 rtx filter = cfun->eh->filter;
961 if (! filter)
962 {
041c9d5a 963 filter = gen_reg_rtx (word_mode);
47c84870
JM
964 cfun->eh->filter = filter;
965 }
966 return filter;
967}
52a11cbf
RH
968\f
969/* Begin a region that will contain entries created with
970 add_partial_entry. */
4956d07c 971
52a11cbf
RH
972void
973begin_protect_partials ()
974{
975 /* Push room for a new list. */
976 cfun->eh->protect_list
977 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
4956d07c
MS
978}
979
27a36778
MS
980/* Start a new exception region for a region of code that has a
981 cleanup action and push the HANDLER for the region onto
982 protect_list. All of the regions created with add_partial_entry
983 will be ended when end_protect_partials is invoked. */
12670d88
RK
984
985void
986add_partial_entry (handler)
987 tree handler;
988{
989 expand_eh_region_start ();
990
52a11cbf
RH
991 /* ??? This comment was old before the most recent rewrite. We
992 really ought to fix the callers at some point. */
76fc91c7
MM
993 /* For backwards compatibility, we allow callers to omit calls to
994 begin_protect_partials for the outermost region. So, we must
995 explicitly do so here. */
52a11cbf 996 if (!cfun->eh->protect_list)
76fc91c7
MM
997 begin_protect_partials ();
998
999 /* Add this entry to the front of the list. */
52a11cbf
RH
1000 TREE_VALUE (cfun->eh->protect_list)
1001 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
12670d88
RK
1002}
1003
52a11cbf 1004/* End all the pending exception regions on protect_list. */
27a36778 1005
52a11cbf
RH
1006void
1007end_protect_partials ()
27a36778 1008{
52a11cbf 1009 tree t;
638e6ebc 1010
52a11cbf
RH
1011 /* ??? This comment was old before the most recent rewrite. We
1012 really ought to fix the callers at some point. */
1013 /* For backwards compatibility, we allow callers to omit the call to
1014 begin_protect_partials for the outermost region. So,
1015 PROTECT_LIST may be NULL. */
1016 if (!cfun->eh->protect_list)
1017 return;
bb727b5a 1018
52a11cbf
RH
1019 /* Pop the topmost entry. */
1020 t = TREE_VALUE (cfun->eh->protect_list);
1021 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
bb727b5a 1022
52a11cbf
RH
1023 /* End all the exception regions. */
1024 for (; t; t = TREE_CHAIN (t))
1025 expand_eh_region_end_cleanup (TREE_VALUE (t));
154bba13
TT
1026}
1027
52a11cbf
RH
1028\f
1029/* This section is for the exception handling specific optimization pass. */
154bba13 1030
52a11cbf
RH
1031/* Random access the exception region tree. It's just as simple to
1032 collect the regions this way as in expand_eh_region_start, but
1033 without having to realloc memory. */
154bba13 1034
52a11cbf
RH
1035static void
1036collect_eh_region_array ()
154bba13 1037{
52a11cbf 1038 struct eh_region **array, *i;
154bba13 1039
52a11cbf
RH
1040 i = cfun->eh->region_tree;
1041 if (! i)
1042 return;
154bba13 1043
52a11cbf
RH
1044 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1045 cfun->eh->region_array = array;
154bba13 1046
52a11cbf
RH
1047 while (1)
1048 {
1049 array[i->region_number] = i;
1050
1051 /* If there are sub-regions, process them. */
1052 if (i->inner)
1053 i = i->inner;
1054 /* If there are peers, process them. */
1055 else if (i->next_peer)
1056 i = i->next_peer;
1057 /* Otherwise, step back up the tree to the next peer. */
1058 else
1059 {
1060 do {
1061 i = i->outer;
1062 if (i == NULL)
1063 return;
1064 } while (i->next_peer == NULL);
1065 i = i->next_peer;
1066 }
1067 }
27a36778
MS
1068}
1069
52a11cbf
RH
1070static void
1071resolve_fixup_regions ()
27a36778 1072{
52a11cbf 1073 int i, j, n = cfun->eh->last_region_number;
27a36778 1074
52a11cbf
RH
1075 for (i = 1; i <= n; ++i)
1076 {
1077 struct eh_region *fixup = cfun->eh->region_array[i];
1078 struct eh_region *cleanup;
27a36778 1079
52a11cbf
RH
1080 if (! fixup || fixup->type != ERT_FIXUP)
1081 continue;
27a36778 1082
52a11cbf
RH
1083 for (j = 1; j <= n; ++j)
1084 {
1085 cleanup = cfun->eh->region_array[j];
1086 if (cleanup->type == ERT_CLEANUP
1087 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1088 break;
1089 }
1090 if (j > n)
1091 abort ();
27a36778 1092
52a11cbf
RH
1093 fixup->u.fixup.real_region = cleanup->outer;
1094 }
27a36778 1095}
27a36778 1096
52a11cbf
RH
1097/* Now that we've discovered what region actually encloses a fixup,
1098 we can shuffle pointers and remove them from the tree. */
27a36778
MS
1099
1100static void
52a11cbf 1101remove_fixup_regions ()
27a36778 1102{
52a11cbf 1103 int i;
27a36778 1104
52a11cbf
RH
1105 for (i = cfun->eh->last_region_number; i > 0; --i)
1106 {
1107 struct eh_region *fixup = cfun->eh->region_array[i];
27a36778 1108
52a11cbf
RH
1109 if (! fixup)
1110 continue;
27a36778 1111
52a11cbf
RH
1112 /* Allow GC to maybe free some memory. */
1113 if (fixup->type == ERT_CLEANUP)
1114 fixup->u.cleanup.exp = NULL_TREE;
27a36778 1115
52a11cbf
RH
1116 if (fixup->type != ERT_FIXUP)
1117 continue;
27a36778 1118
52a11cbf
RH
1119 if (fixup->inner)
1120 {
1121 struct eh_region *parent, *p, **pp;
27a36778 1122
52a11cbf 1123 parent = fixup->u.fixup.real_region;
27a36778 1124
52a11cbf
RH
1125 /* Fix up the children's parent pointers; find the end of
1126 the list. */
1127 for (p = fixup->inner; ; p = p->next_peer)
1128 {
1129 p->outer = parent;
1130 if (! p->next_peer)
1131 break;
1132 }
27a36778 1133
52a11cbf
RH
1134 /* In the tree of cleanups, only outer-inner ordering matters.
1135 So link the children back in anywhere at the correct level. */
1136 if (parent)
1137 pp = &parent->inner;
1138 else
1139 pp = &cfun->eh->region_tree;
1140 p->next_peer = *pp;
1141 *pp = fixup->inner;
1142 fixup->inner = NULL;
1143 }
27a36778 1144
52a11cbf
RH
1145 remove_eh_handler (fixup);
1146 }
27a36778
MS
1147}
1148
52a11cbf
RH
1149/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1150 can_throw instruction in the region. */
27a36778
MS
1151
1152static void
52a11cbf
RH
1153convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1154 rtx *pinsns;
1155 int *orig_sp;
1156 int cur;
27a36778 1157{
52a11cbf
RH
1158 int *sp = orig_sp;
1159 rtx insn, next;
27a36778 1160
52a11cbf
RH
1161 for (insn = *pinsns; insn ; insn = next)
1162 {
1163 next = NEXT_INSN (insn);
1164 if (GET_CODE (insn) == NOTE)
1165 {
1166 int kind = NOTE_LINE_NUMBER (insn);
1167 if (kind == NOTE_INSN_EH_REGION_BEG
1168 || kind == NOTE_INSN_EH_REGION_END)
1169 {
1170 if (kind == NOTE_INSN_EH_REGION_BEG)
1171 {
1172 struct eh_region *r;
27a36778 1173
52a11cbf
RH
1174 *sp++ = cur;
1175 cur = NOTE_EH_HANDLER (insn);
27a36778 1176
52a11cbf
RH
1177 r = cfun->eh->region_array[cur];
1178 if (r->type == ERT_FIXUP)
1179 {
1180 r = r->u.fixup.real_region;
1181 cur = r ? r->region_number : 0;
1182 }
1183 else if (r->type == ERT_CATCH)
1184 {
1185 r = r->outer;
1186 cur = r ? r->region_number : 0;
1187 }
1188 }
1189 else
1190 cur = *--sp;
1191
1192 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1193 requires extra care to adjust sequence start. */
1194 if (insn == *pinsns)
1195 *pinsns = next;
1196 remove_insn (insn);
1197 continue;
1198 }
1199 }
1200 else if (INSN_P (insn))
1201 {
1202 if (cur > 0
1203 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1204 /* Calls can always potentially throw exceptions, unless
1205 they have a REG_EH_REGION note with a value of 0 or less.
1206 Which should be the only possible kind so far. */
1207 && (GET_CODE (insn) == CALL_INSN
1208 /* If we wanted exceptions for non-call insns, then
1209 any may_trap_p instruction could throw. */
1210 || (flag_non_call_exceptions
1211 && may_trap_p (PATTERN (insn)))))
1212 {
1213 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1214 REG_NOTES (insn));
1215 }
27a36778 1216
52a11cbf
RH
1217 if (GET_CODE (insn) == CALL_INSN
1218 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1219 {
1220 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1221 sp, cur);
1222 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1223 sp, cur);
1224 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1225 sp, cur);
1226 }
1227 }
1228 }
27a36778 1229
52a11cbf
RH
1230 if (sp != orig_sp)
1231 abort ();
1232}
27a36778 1233
52a11cbf
RH
1234void
1235convert_from_eh_region_ranges ()
1236{
1237 int *stack;
1238 rtx insns;
27a36778 1239
52a11cbf
RH
1240 collect_eh_region_array ();
1241 resolve_fixup_regions ();
27a36778 1242
52a11cbf
RH
1243 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1244 insns = get_insns ();
1245 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1246 free (stack);
27a36778 1247
52a11cbf 1248 remove_fixup_regions ();
27a36778
MS
1249}
1250
52a11cbf
RH
1251void
1252find_exception_handler_labels ()
27a36778 1253{
52a11cbf
RH
1254 rtx list = NULL_RTX;
1255 int i;
27a36778 1256
52a11cbf 1257 free_EXPR_LIST_list (&exception_handler_labels);
27a36778 1258
52a11cbf
RH
1259 if (cfun->eh->region_tree == NULL)
1260 return;
27a36778 1261
52a11cbf
RH
1262 for (i = cfun->eh->last_region_number; i > 0; --i)
1263 {
1264 struct eh_region *region = cfun->eh->region_array[i];
1265 rtx lab;
27a36778 1266
52a11cbf
RH
1267 if (! region)
1268 continue;
1269 if (cfun->eh->built_landing_pads)
1270 lab = region->landing_pad;
1271 else
1272 lab = region->label;
27a36778 1273
52a11cbf
RH
1274 if (lab)
1275 list = alloc_EXPR_LIST (0, lab, list);
27a36778
MS
1276 }
1277
52a11cbf
RH
1278 /* For sjlj exceptions, need the return label to remain live until
1279 after landing pad generation. */
1280 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1281 list = alloc_EXPR_LIST (0, return_label, list);
27a36778 1282
52a11cbf 1283 exception_handler_labels = list;
27a36778
MS
1284}
1285
52a11cbf
RH
1286\f
1287static struct eh_region *
1288duplicate_eh_region_1 (o, map)
1289 struct eh_region *o;
1290 struct inline_remap *map;
4956d07c 1291{
52a11cbf
RH
1292 struct eh_region *n
1293 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
4956d07c 1294
52a11cbf
RH
1295 n->region_number = o->region_number + cfun->eh->last_region_number;
1296 n->type = o->type;
4956d07c 1297
52a11cbf
RH
1298 switch (n->type)
1299 {
1300 case ERT_CLEANUP:
1301 case ERT_MUST_NOT_THROW:
1302 break;
27a36778 1303
52a11cbf
RH
1304 case ERT_TRY:
1305 if (o->u.try.continue_label)
1306 n->u.try.continue_label
1307 = get_label_from_map (map,
1308 CODE_LABEL_NUMBER (o->u.try.continue_label));
1309 break;
27a36778 1310
52a11cbf
RH
1311 case ERT_CATCH:
1312 n->u.catch.type = o->u.catch.type;
1313 break;
27a36778 1314
52a11cbf
RH
1315 case ERT_ALLOWED_EXCEPTIONS:
1316 n->u.allowed.type_list = o->u.allowed.type_list;
1317 break;
1318
1319 case ERT_THROW:
1320 n->u.throw.type = o->u.throw.type;
1321
1322 default:
1323 abort ();
1324 }
1325
1326 if (o->label)
1327 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
47c84870 1328 if (o->resume)
e7b9b18e 1329 {
47c84870
JM
1330 n->resume = map->insn_map[INSN_UID (o->resume)];
1331 if (n->resume == NULL)
52a11cbf 1332 abort ();
27a36778 1333 }
4956d07c 1334
52a11cbf 1335 return n;
4956d07c
MS
1336}
1337
52a11cbf
RH
1338static void
1339duplicate_eh_region_2 (o, n_array)
1340 struct eh_region *o;
1341 struct eh_region **n_array;
4c581243 1342{
52a11cbf 1343 struct eh_region *n = n_array[o->region_number];
4c581243 1344
52a11cbf
RH
1345 switch (n->type)
1346 {
1347 case ERT_TRY:
1348 n->u.try.catch = n_array[o->u.try.catch->region_number];
1349 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1350 break;
12670d88 1351
52a11cbf
RH
1352 case ERT_CATCH:
1353 if (o->u.catch.next_catch)
1354 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1355 if (o->u.catch.prev_catch)
1356 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1357 break;
12670d88 1358
52a11cbf
RH
1359 default:
1360 break;
1361 }
4956d07c 1362
52a11cbf
RH
1363 if (o->outer)
1364 n->outer = n_array[o->outer->region_number];
1365 if (o->inner)
1366 n->inner = n_array[o->inner->region_number];
1367 if (o->next_peer)
1368 n->next_peer = n_array[o->next_peer->region_number];
1369}
1370
1371int
1372duplicate_eh_regions (ifun, map)
1373 struct function *ifun;
1374 struct inline_remap *map;
4956d07c 1375{
52a11cbf
RH
1376 int ifun_last_region_number = ifun->eh->last_region_number;
1377 struct eh_region **n_array, *root, *cur;
1378 int i;
4956d07c 1379
52a11cbf
RH
1380 if (ifun_last_region_number == 0)
1381 return 0;
4956d07c 1382
52a11cbf 1383 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
4956d07c 1384
52a11cbf 1385 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1386 {
52a11cbf
RH
1387 cur = ifun->eh->region_array[i];
1388 if (!cur || cur->region_number != i)
1389 continue;
1390 n_array[i] = duplicate_eh_region_1 (cur, map);
27a36778 1391 }
52a11cbf 1392 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1393 {
52a11cbf
RH
1394 cur = ifun->eh->region_array[i];
1395 if (!cur || cur->region_number != i)
1396 continue;
1397 duplicate_eh_region_2 (cur, n_array);
1398 }
27a36778 1399
52a11cbf
RH
1400 root = n_array[ifun->eh->region_tree->region_number];
1401 cur = cfun->eh->cur_region;
1402 if (cur)
1403 {
1404 struct eh_region *p = cur->inner;
1405 if (p)
1406 {
1407 while (p->next_peer)
1408 p = p->next_peer;
1409 p->next_peer = root;
1410 }
1411 else
1412 cur->inner = root;
27a36778 1413
52a11cbf
RH
1414 for (i = 1; i <= ifun_last_region_number; ++i)
1415 if (n_array[i]->outer == NULL)
1416 n_array[i]->outer = cur;
1417 }
1418 else
1419 {
1420 struct eh_region *p = cfun->eh->region_tree;
1421 if (p)
1422 {
1423 while (p->next_peer)
1424 p = p->next_peer;
1425 p->next_peer = root;
1426 }
1427 else
1428 cfun->eh->region_tree = root;
27a36778 1429 }
1e4ceb6f 1430
52a11cbf 1431 free (n_array);
1e4ceb6f 1432
52a11cbf
RH
1433 i = cfun->eh->last_region_number;
1434 cfun->eh->last_region_number = i + ifun_last_region_number;
1435 return i;
4956d07c
MS
1436}
1437
52a11cbf
RH
1438\f
1439/* ??? Move from tree.c to tree.h. */
1440#define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
9762d48d 1441
52a11cbf
RH
1442static int
1443t2r_eq (pentry, pdata)
1444 const PTR pentry;
1445 const PTR pdata;
9762d48d 1446{
52a11cbf
RH
1447 tree entry = (tree) pentry;
1448 tree data = (tree) pdata;
9762d48d 1449
52a11cbf 1450 return TREE_PURPOSE (entry) == data;
9762d48d
JM
1451}
1452
52a11cbf
RH
1453static hashval_t
1454t2r_hash (pentry)
1455 const PTR pentry;
1456{
1457 tree entry = (tree) pentry;
1458 return TYPE_HASH (TREE_PURPOSE (entry));
1459}
9762d48d 1460
52a11cbf
RH
1461static int
1462t2r_mark_1 (slot, data)
1463 PTR *slot;
1464 PTR data ATTRIBUTE_UNUSED;
9762d48d 1465{
52a11cbf
RH
1466 tree contents = (tree) *slot;
1467 ggc_mark_tree (contents);
1468 return 1;
1469}
9762d48d 1470
52a11cbf
RH
1471static void
1472t2r_mark (addr)
1473 PTR addr;
1474{
1475 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1476}
9762d48d 1477
52a11cbf
RH
1478static void
1479add_type_for_runtime (type)
1480 tree type;
1481{
1482 tree *slot;
9762d48d 1483
52a11cbf
RH
1484 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1485 TYPE_HASH (type), INSERT);
1486 if (*slot == NULL)
1487 {
1488 tree runtime = (*lang_eh_runtime_type) (type);
1489 *slot = tree_cons (type, runtime, NULL_TREE);
1490 }
1491}
1492
1493static tree
1494lookup_type_for_runtime (type)
1495 tree type;
1496{
1497 tree *slot;
b37f006b 1498
52a11cbf
RH
1499 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1500 TYPE_HASH (type), NO_INSERT);
b37f006b 1501
52a11cbf
RH
1502 /* We should have always inserrted the data earlier. */
1503 return TREE_VALUE (*slot);
1504}
9762d48d 1505
52a11cbf
RH
1506\f
1507/* Represent an entry in @TTypes for either catch actions
1508 or exception filter actions. */
1509struct ttypes_filter
1510{
1511 tree t;
1512 int filter;
1513};
b37f006b 1514
52a11cbf
RH
1515/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1516 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 1517
52a11cbf
RH
1518static int
1519ttypes_filter_eq (pentry, pdata)
1520 const PTR pentry;
1521 const PTR pdata;
1522{
1523 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1524 tree data = (tree) pdata;
b37f006b 1525
52a11cbf 1526 return entry->t == data;
9762d48d
JM
1527}
1528
52a11cbf
RH
1529static hashval_t
1530ttypes_filter_hash (pentry)
1531 const PTR pentry;
1532{
1533 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1534 return TYPE_HASH (entry->t);
1535}
4956d07c 1536
52a11cbf
RH
1537/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1538 exception specification list we are thinking about adding. */
1539/* ??? Currently we use the type lists in the order given. Someone
1540 should put these in some canonical order. */
1541
1542static int
1543ehspec_filter_eq (pentry, pdata)
1544 const PTR pentry;
1545 const PTR pdata;
4956d07c 1546{
52a11cbf
RH
1547 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1548 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1549
1550 return type_list_equal (entry->t, data->t);
4956d07c
MS
1551}
1552
52a11cbf 1553/* Hash function for exception specification lists. */
4956d07c 1554
52a11cbf
RH
1555static hashval_t
1556ehspec_filter_hash (pentry)
1557 const PTR pentry;
4956d07c 1558{
52a11cbf
RH
1559 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1560 hashval_t h = 0;
1561 tree list;
1562
1563 for (list = entry->t; list ; list = TREE_CHAIN (list))
1564 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1565 return h;
4956d07c
MS
1566}
1567
52a11cbf
RH
1568/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1569 up the search. Return the filter value to be used. */
4956d07c 1570
52a11cbf
RH
1571static int
1572add_ttypes_entry (ttypes_hash, type)
1573 htab_t ttypes_hash;
1574 tree type;
4956d07c 1575{
52a11cbf 1576 struct ttypes_filter **slot, *n;
4956d07c 1577
52a11cbf
RH
1578 slot = (struct ttypes_filter **)
1579 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1580
1581 if ((n = *slot) == NULL)
4956d07c 1582 {
52a11cbf 1583 /* Filter value is a 1 based table index. */
12670d88 1584
52a11cbf
RH
1585 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1586 n->t = type;
1587 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1588 *slot = n;
1589
1590 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 1591 }
52a11cbf
RH
1592
1593 return n->filter;
4956d07c
MS
1594}
1595
52a11cbf
RH
1596/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1597 to speed up the search. Return the filter value to be used. */
1598
1599static int
1600add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1601 htab_t ehspec_hash;
1602 htab_t ttypes_hash;
1603 tree list;
12670d88 1604{
52a11cbf
RH
1605 struct ttypes_filter **slot, *n;
1606 struct ttypes_filter dummy;
12670d88 1607
52a11cbf
RH
1608 dummy.t = list;
1609 slot = (struct ttypes_filter **)
1610 htab_find_slot (ehspec_hash, &dummy, INSERT);
1611
1612 if ((n = *slot) == NULL)
1613 {
1614 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1615
1616 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1617 n->t = list;
1618 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1619 *slot = n;
1620
1621 /* Look up each type in the list and encode its filter
1622 value as a uleb128. Terminate the list with 0. */
1623 for (; list ; list = TREE_CHAIN (list))
1624 push_uleb128 (&cfun->eh->ehspec_data,
1625 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1626 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1627 }
1628
1629 return n->filter;
12670d88
RK
1630}
1631
52a11cbf
RH
1632/* Generate the action filter values to be used for CATCH and
1633 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1634 we use lots of landing pads, and so every type or list can share
1635 the same filter value, which saves table space. */
1636
1637static void
1638assign_filter_values ()
9a0d1e1b 1639{
52a11cbf
RH
1640 int i;
1641 htab_t ttypes, ehspec;
9a9deafc 1642
52a11cbf
RH
1643 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1644 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1645
52a11cbf
RH
1646 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1647 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1648
52a11cbf
RH
1649 for (i = cfun->eh->last_region_number; i > 0; --i)
1650 {
1651 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1652
52a11cbf
RH
1653 /* Mind we don't process a region more than once. */
1654 if (!r || r->region_number != i)
1655 continue;
9a0d1e1b 1656
52a11cbf
RH
1657 switch (r->type)
1658 {
1659 case ERT_CATCH:
1660 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1661 break;
bf71cd2e 1662
52a11cbf
RH
1663 case ERT_ALLOWED_EXCEPTIONS:
1664 r->u.allowed.filter
1665 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1666 break;
bf71cd2e 1667
52a11cbf
RH
1668 default:
1669 break;
1670 }
1671 }
1672
1673 htab_delete (ttypes);
1674 htab_delete (ehspec);
1675}
1676
1677static void
1678build_post_landing_pads ()
1679{
1680 int i;
bf71cd2e 1681
52a11cbf 1682 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1683 {
52a11cbf
RH
1684 struct eh_region *region = cfun->eh->region_array[i];
1685 rtx seq;
bf71cd2e 1686
52a11cbf
RH
1687 /* Mind we don't process a region more than once. */
1688 if (!region || region->region_number != i)
1689 continue;
1690
1691 switch (region->type)
987009bf 1692 {
52a11cbf
RH
1693 case ERT_TRY:
1694 /* ??? Collect the set of all non-overlapping catch handlers
1695 all the way up the chain until blocked by a cleanup. */
1696 /* ??? Outer try regions can share landing pads with inner
1697 try regions if the types are completely non-overlapping,
1698 and there are no interveaning cleanups. */
bf71cd2e 1699
52a11cbf 1700 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1701
52a11cbf 1702 start_sequence ();
bf71cd2e 1703
52a11cbf 1704 emit_label (region->post_landing_pad);
bf71cd2e 1705
52a11cbf
RH
1706 /* ??? It is mighty inconvenient to call back into the
1707 switch statement generation code in expand_end_case.
1708 Rapid prototyping sez a sequence of ifs. */
1709 {
1710 struct eh_region *c;
1711 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1712 {
1713 /* ??? _Unwind_ForcedUnwind wants no match here. */
1714 if (c->u.catch.type == NULL)
1715 emit_jump (c->label);
1716 else
1717 emit_cmp_and_jump_insns (cfun->eh->filter,
1718 GEN_INT (c->u.catch.filter),
1719 EQ, NULL_RTX, word_mode,
1720 0, 0, c->label);
1721 }
1722 }
bf71cd2e 1723
47c84870
JM
1724 /* We delay the generation of the _Unwind_Resume until we generate
1725 landing pads. We emit a marker here so as to get good control
1726 flow data in the meantime. */
1727 region->resume
1728 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1729 emit_barrier ();
1730
52a11cbf
RH
1731 seq = get_insns ();
1732 end_sequence ();
e6cfb550 1733
47c84870 1734 emit_insns_before (seq, region->u.try.catch->label);
52a11cbf 1735 break;
bf71cd2e 1736
52a11cbf
RH
1737 case ERT_ALLOWED_EXCEPTIONS:
1738 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1739
52a11cbf 1740 start_sequence ();
f54a7f6f 1741
52a11cbf 1742 emit_label (region->post_landing_pad);
f54a7f6f 1743
52a11cbf
RH
1744 emit_cmp_and_jump_insns (cfun->eh->filter,
1745 GEN_INT (region->u.allowed.filter),
1746 EQ, NULL_RTX, word_mode, 0, 0,
1747 region->label);
f54a7f6f 1748
47c84870
JM
1749 /* We delay the generation of the _Unwind_Resume until we generate
1750 landing pads. We emit a marker here so as to get good control
1751 flow data in the meantime. */
1752 region->resume
1753 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1754 emit_barrier ();
1755
52a11cbf
RH
1756 seq = get_insns ();
1757 end_sequence ();
1758
47c84870 1759 emit_insns_before (seq, region->label);
52a11cbf 1760 break;
f54a7f6f 1761
52a11cbf
RH
1762 case ERT_CLEANUP:
1763 case ERT_MUST_NOT_THROW:
1764 region->post_landing_pad = region->label;
1765 break;
1766
1767 case ERT_CATCH:
1768 case ERT_THROW:
1769 /* Nothing to do. */
1770 break;
1771
1772 default:
1773 abort ();
1774 }
1775 }
1776}
1e4ceb6f 1777
47c84870
JM
1778/* Replace RESX patterns with jumps to the next handler if any, or calls to
1779 _Unwind_Resume otherwise. */
1780
1e4ceb6f 1781static void
52a11cbf 1782connect_post_landing_pads ()
1e4ceb6f 1783{
52a11cbf 1784 int i;
76fc91c7 1785
52a11cbf
RH
1786 for (i = cfun->eh->last_region_number; i > 0; --i)
1787 {
1788 struct eh_region *region = cfun->eh->region_array[i];
1789 struct eh_region *outer;
47c84870 1790 rtx seq;
1e4ceb6f 1791
52a11cbf
RH
1792 /* Mind we don't process a region more than once. */
1793 if (!region || region->region_number != i)
1794 continue;
1e4ceb6f 1795
47c84870
JM
1796 /* If there is no RESX, or it has been deleted by flow, there's
1797 nothing to fix up. */
1798 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 1799 continue;
76fc91c7 1800
52a11cbf
RH
1801 /* Search for another landing pad in this function. */
1802 for (outer = region->outer; outer ; outer = outer->outer)
1803 if (outer->post_landing_pad)
1804 break;
1e4ceb6f 1805
52a11cbf 1806 start_sequence ();
12670d88 1807
52a11cbf
RH
1808 if (outer)
1809 emit_jump (outer->post_landing_pad);
1810 else
9555a122 1811 emit_library_call (unwind_resume_libfunc, LCT_THROW,
52a11cbf 1812 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
4956d07c 1813
52a11cbf
RH
1814 seq = get_insns ();
1815 end_sequence ();
47c84870
JM
1816 emit_insns_before (seq, region->resume);
1817
1818 /* Leave the RESX to be deleted by flow. */
52a11cbf
RH
1819 }
1820}
1821
1822\f
1823static void
1824dw2_build_landing_pads ()
4956d07c 1825{
52a11cbf 1826 int i, j;
4956d07c 1827
52a11cbf
RH
1828 for (i = cfun->eh->last_region_number; i > 0; --i)
1829 {
1830 struct eh_region *region = cfun->eh->region_array[i];
1831 rtx seq;
4956d07c 1832
52a11cbf
RH
1833 /* Mind we don't process a region more than once. */
1834 if (!region || region->region_number != i)
1835 continue;
1418bb67 1836
52a11cbf
RH
1837 if (region->type != ERT_CLEANUP
1838 && region->type != ERT_TRY
1839 && region->type != ERT_ALLOWED_EXCEPTIONS)
1840 continue;
12670d88 1841
52a11cbf 1842 start_sequence ();
4956d07c 1843
52a11cbf
RH
1844 region->landing_pad = gen_label_rtx ();
1845 emit_label (region->landing_pad);
4956d07c 1846
52a11cbf
RH
1847#ifdef HAVE_exception_receiver
1848 if (HAVE_exception_receiver)
1849 emit_insn (gen_exception_receiver ());
1850 else
1851#endif
1852#ifdef HAVE_nonlocal_goto_receiver
1853 if (HAVE_nonlocal_goto_receiver)
1854 emit_insn (gen_nonlocal_goto_receiver ());
1855 else
1856#endif
1857 { /* Nothing */ }
4956d07c 1858
52a11cbf
RH
1859 /* If the eh_return data registers are call-saved, then we
1860 won't have considered them clobbered from the call that
1861 threw. Kill them now. */
1862 for (j = 0; ; ++j)
1863 {
1864 unsigned r = EH_RETURN_DATA_REGNO (j);
1865 if (r == INVALID_REGNUM)
1866 break;
1867 if (! call_used_regs[r])
1868 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1869 }
e701eb4d 1870
52a11cbf
RH
1871 emit_move_insn (cfun->eh->exc_ptr,
1872 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1873 emit_move_insn (cfun->eh->filter,
1874 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 1875
52a11cbf
RH
1876 seq = get_insns ();
1877 end_sequence ();
5816cb14 1878
52a11cbf
RH
1879 emit_insns_before (seq, region->post_landing_pad);
1880 }
4956d07c
MS
1881}
1882
52a11cbf
RH
1883\f
1884struct sjlj_lp_info
1885{
1886 int directly_reachable;
1887 int action_index;
1888 int dispatch_index;
1889 int call_site_index;
1890};
4956d07c 1891
52a11cbf
RH
1892static bool
1893sjlj_find_directly_reachable_regions (lp_info)
1894 struct sjlj_lp_info *lp_info;
4956d07c 1895{
52a11cbf
RH
1896 rtx insn;
1897 bool found_one = false;
4956d07c 1898
52a11cbf
RH
1899 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1900 {
1901 struct eh_region *region;
1902 tree type_thrown;
1903 rtx note;
4956d07c 1904
52a11cbf
RH
1905 if (! INSN_P (insn))
1906 continue;
0d3453df 1907
52a11cbf
RH
1908 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1909 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1910 continue;
5dfa7520 1911
52a11cbf 1912 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 1913
52a11cbf
RH
1914 type_thrown = NULL_TREE;
1915 if (region->type == ERT_THROW)
1916 {
1917 type_thrown = region->u.throw.type;
1918 region = region->outer;
1919 }
12670d88 1920
52a11cbf
RH
1921 /* Find the first containing region that might handle the exception.
1922 That's the landing pad to which we will transfer control. */
1923 for (; region; region = region->outer)
1924 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1925 break;
4956d07c 1926
52a11cbf
RH
1927 if (region)
1928 {
1929 lp_info[region->region_number].directly_reachable = 1;
1930 found_one = true;
1931 }
1932 }
4956d07c 1933
52a11cbf
RH
1934 return found_one;
1935}
e701eb4d
JM
1936
1937static void
52a11cbf
RH
1938sjlj_assign_call_site_values (dispatch_label, lp_info)
1939 rtx dispatch_label;
1940 struct sjlj_lp_info *lp_info;
e701eb4d 1941{
52a11cbf
RH
1942 htab_t ar_hash;
1943 int i, index;
1944
1945 /* First task: build the action table. */
1946
1947 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1948 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1949
1950 for (i = cfun->eh->last_region_number; i > 0; --i)
1951 if (lp_info[i].directly_reachable)
e6cfb550 1952 {
52a11cbf
RH
1953 struct eh_region *r = cfun->eh->region_array[i];
1954 r->landing_pad = dispatch_label;
1955 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1956 if (lp_info[i].action_index != -1)
1957 cfun->uses_eh_lsda = 1;
e6cfb550 1958 }
e701eb4d 1959
52a11cbf 1960 htab_delete (ar_hash);
76fc91c7 1961
52a11cbf
RH
1962 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1963 landing pad label for the region. For sjlj though, there is one
1964 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 1965
52a11cbf
RH
1966 A region receives a dispatch index if it is directly reachable
1967 and requires in-function processing. Regions that share post-landing
1968 pads may share dispatch indicies. */
1969 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1970 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 1971
52a11cbf
RH
1972 index = 0;
1973 for (i = cfun->eh->last_region_number; i > 0; --i)
1974 if (lp_info[i].directly_reachable
1975 && lp_info[i].action_index >= 0)
1976 lp_info[i].dispatch_index = index++;
76fc91c7 1977
52a11cbf
RH
1978 /* Finally: assign call-site values. If dwarf2 terms, this would be
1979 the region number assigned by convert_to_eh_region_ranges, but
1980 handles no-action and must-not-throw differently. */
76fc91c7 1981
52a11cbf
RH
1982 call_site_base = 1;
1983 for (i = cfun->eh->last_region_number; i > 0; --i)
1984 if (lp_info[i].directly_reachable)
1985 {
1986 int action = lp_info[i].action_index;
1987
1988 /* Map must-not-throw to otherwise unused call-site index 0. */
1989 if (action == -2)
1990 index = 0;
1991 /* Map no-action to otherwise unused call-site index -1. */
1992 else if (action == -1)
1993 index = -1;
1994 /* Otherwise, look it up in the table. */
1995 else
1996 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1997
1998 lp_info[i].call_site_index = index;
1999 }
4956d07c 2000}
27a36778 2001
52a11cbf
RH
2002static void
2003sjlj_mark_call_sites (lp_info)
2004 struct sjlj_lp_info *lp_info;
27a36778 2005{
52a11cbf
RH
2006 int last_call_site = -2;
2007 rtx insn, mem;
2008
2009 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2010 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2011 sjlj_fc_call_site_ofs));
2012
2013 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 2014 {
52a11cbf
RH
2015 struct eh_region *region;
2016 int this_call_site;
2017 rtx note, before, p;
27a36778 2018
52a11cbf
RH
2019 /* Reset value tracking at extended basic block boundaries. */
2020 if (GET_CODE (insn) == CODE_LABEL)
2021 last_call_site = -2;
27a36778 2022
52a11cbf
RH
2023 if (! INSN_P (insn))
2024 continue;
27a36778 2025
52a11cbf
RH
2026 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2027 if (!note)
2028 {
2029 /* Calls (and trapping insns) without notes are outside any
2030 exception handling region in this function. Mark them as
2031 no action. */
2032 if (GET_CODE (insn) == CALL_INSN
2033 || (flag_non_call_exceptions
2034 && may_trap_p (PATTERN (insn))))
2035 this_call_site = -1;
2036 else
2037 continue;
2038 }
2039 else
2040 {
2041 /* Calls that are known to not throw need not be marked. */
2042 if (INTVAL (XEXP (note, 0)) <= 0)
2043 continue;
27a36778 2044
52a11cbf
RH
2045 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2046 this_call_site = lp_info[region->region_number].call_site_index;
2047 }
27a36778 2048
52a11cbf
RH
2049 if (this_call_site == last_call_site)
2050 continue;
2051
2052 /* Don't separate a call from it's argument loads. */
2053 before = insn;
2054 if (GET_CODE (insn) == CALL_INSN)
2055 {
2056 HARD_REG_SET parm_regs;
2057 int nparm_regs;
2058
2059 /* Since different machines initialize their parameter registers
2060 in different orders, assume nothing. Collect the set of all
2061 parameter registers. */
2062 CLEAR_HARD_REG_SET (parm_regs);
2063 nparm_regs = 0;
2064 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2065 if (GET_CODE (XEXP (p, 0)) == USE
2066 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2067 {
2068 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2069 abort ();
2070
2071 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2072 nparm_regs++;
2073 }
2074
2075 /* Search backward for the first set of a register in this set. */
2076 while (nparm_regs)
2077 {
2078 before = PREV_INSN (before);
12670d88 2079
52a11cbf
RH
2080 /* Given that we've done no other optimizations yet,
2081 the arguments should be immediately available. */
2082 if (GET_CODE (before) == CODE_LABEL)
2083 abort ();
12670d88 2084
52a11cbf
RH
2085 p = single_set (before);
2086 if (p && GET_CODE (SET_DEST (p)) == REG
2087 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2088 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2089 {
2090 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2091 nparm_regs--;
2092 }
2093 }
2094 }
4956d07c 2095
52a11cbf
RH
2096 start_sequence ();
2097 emit_move_insn (mem, GEN_INT (this_call_site));
2098 p = get_insns ();
2099 end_sequence ();
12670d88 2100
52a11cbf
RH
2101 emit_insns_before (p, before);
2102 last_call_site = this_call_site;
2103 }
2104}
4956d07c 2105
52a11cbf
RH
2106/* Construct the SjLj_Function_Context. */
2107
2108static void
2109sjlj_emit_function_enter (dispatch_label)
2110 rtx dispatch_label;
4956d07c 2111{
52a11cbf 2112 rtx fn_begin, fc, mem, seq;
4956d07c 2113
52a11cbf 2114 fc = cfun->eh->sjlj_fc;
4956d07c 2115
52a11cbf 2116 start_sequence ();
8a4451aa 2117
52a11cbf
RH
2118 mem = change_address (fc, Pmode,
2119 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2120 emit_move_insn (mem, eh_personality_libfunc);
2121
2122 mem = change_address (fc, Pmode,
2123 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2124 if (cfun->uses_eh_lsda)
2125 {
2126 char buf[20];
2127 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2128 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
8a4451aa 2129 }
52a11cbf
RH
2130 else
2131 emit_move_insn (mem, const0_rtx);
2132
2133#ifdef DONT_USE_BUILTIN_SETJMP
2134 {
2135 rtx x, note;
2136 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2137 TYPE_MODE (integer_type_node), 1,
2138 plus_constant (XEXP (fc, 0),
2139 sjlj_fc_jbuf_ofs), Pmode);
2140
2141 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2142 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2143
2144 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2145 TYPE_MODE (integer_type_node), 0, 0,
2146 dispatch_label);
2147 }
2148#else
2149 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2150 dispatch_label);
4956d07c 2151#endif
4956d07c 2152
52a11cbf
RH
2153 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2154 1, XEXP (fc, 0), Pmode);
12670d88 2155
52a11cbf
RH
2156 seq = get_insns ();
2157 end_sequence ();
4956d07c 2158
52a11cbf
RH
2159 /* ??? Instead of doing this at the beginning of the function,
2160 do this in a block that is at loop level 0 and dominates all
2161 can_throw_internal instructions. */
4956d07c 2162
52a11cbf
RH
2163 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2164 if (GET_CODE (fn_begin) == NOTE
2165 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2166 break;
2167 emit_insns_after (seq, fn_begin);
4956d07c
MS
2168}
2169
52a11cbf
RH
2170/* Call back from expand_function_end to know where we should put
2171 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 2172
52a11cbf
RH
2173void
2174sjlj_emit_function_exit_after (after)
2175 rtx after;
2176{
2177 cfun->eh->sjlj_exit_after = after;
2178}
4956d07c
MS
2179
2180static void
52a11cbf
RH
2181sjlj_emit_function_exit ()
2182{
2183 rtx seq;
4956d07c 2184
52a11cbf 2185 start_sequence ();
ce152ef8 2186
52a11cbf
RH
2187 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2188 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 2189
52a11cbf
RH
2190 seq = get_insns ();
2191 end_sequence ();
4956d07c 2192
52a11cbf
RH
2193 /* ??? Really this can be done in any block at loop level 0 that
2194 post-dominates all can_throw_internal instructions. This is
2195 the last possible moment. */
9a0d1e1b 2196
52a11cbf 2197 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
9a0d1e1b
AM
2198}
2199
52a11cbf
RH
2200static void
2201sjlj_emit_dispatch_table (dispatch_label, lp_info)
2202 rtx dispatch_label;
2203 struct sjlj_lp_info *lp_info;
ce152ef8 2204{
52a11cbf
RH
2205 int i, first_reachable;
2206 rtx mem, dispatch, seq, fc;
2207
2208 fc = cfun->eh->sjlj_fc;
2209
2210 start_sequence ();
2211
2212 emit_label (dispatch_label);
ce152ef8 2213
52a11cbf
RH
2214#ifndef DONT_USE_BUILTIN_SETJMP
2215 expand_builtin_setjmp_receiver (dispatch_label);
2216#endif
2217
2218 /* Load up dispatch index, exc_ptr and filter values from the
2219 function context. */
2220 mem = change_address (fc, TYPE_MODE (integer_type_node),
2221 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2222 dispatch = copy_to_reg (mem);
2223
2224 mem = change_address (fc, word_mode,
2225 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2226 if (word_mode != Pmode)
2227 {
2228#ifdef POINTERS_EXTEND_UNSIGNED
2229 mem = convert_memory_address (Pmode, mem);
2230#else
2231 mem = convert_to_mode (Pmode, mem, 0);
2232#endif
2233 }
2234 emit_move_insn (cfun->eh->exc_ptr, mem);
2235
2236 mem = change_address (fc, word_mode,
2237 plus_constant (XEXP (fc, 0),
2238 sjlj_fc_data_ofs + UNITS_PER_WORD));
2239 emit_move_insn (cfun->eh->filter, mem);
4956d07c 2240
52a11cbf
RH
2241 /* Jump to one of the directly reachable regions. */
2242 /* ??? This really ought to be using a switch statement. */
2243
2244 first_reachable = 0;
2245 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 2246 {
52a11cbf
RH
2247 if (! lp_info[i].directly_reachable
2248 || lp_info[i].action_index < 0)
2249 continue;
a1622f83 2250
52a11cbf
RH
2251 if (! first_reachable)
2252 {
2253 first_reachable = i;
2254 continue;
2255 }
e6cfb550 2256
52a11cbf
RH
2257 emit_cmp_and_jump_insns (dispatch,
2258 GEN_INT (lp_info[i].dispatch_index), EQ,
2259 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2260 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 2261 }
9a0d1e1b 2262
52a11cbf
RH
2263 seq = get_insns ();
2264 end_sequence ();
4956d07c 2265
52a11cbf
RH
2266 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2267 ->post_landing_pad));
ce152ef8
AM
2268}
2269
52a11cbf
RH
2270static void
2271sjlj_build_landing_pads ()
ce152ef8 2272{
52a11cbf 2273 struct sjlj_lp_info *lp_info;
ce152ef8 2274
52a11cbf
RH
2275 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2276 sizeof (struct sjlj_lp_info));
ce152ef8 2277
52a11cbf
RH
2278 if (sjlj_find_directly_reachable_regions (lp_info))
2279 {
2280 rtx dispatch_label = gen_label_rtx ();
ce152ef8 2281
52a11cbf
RH
2282 cfun->eh->sjlj_fc
2283 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2284 int_size_in_bytes (sjlj_fc_type_node),
2285 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 2286
52a11cbf
RH
2287 sjlj_assign_call_site_values (dispatch_label, lp_info);
2288 sjlj_mark_call_sites (lp_info);
a1622f83 2289
52a11cbf
RH
2290 sjlj_emit_function_enter (dispatch_label);
2291 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2292 sjlj_emit_function_exit ();
2293 }
a1622f83 2294
52a11cbf 2295 free (lp_info);
4956d07c 2296}
ce152ef8 2297
ce152ef8 2298void
52a11cbf 2299finish_eh_generation ()
ce152ef8 2300{
52a11cbf
RH
2301 /* Nothing to do if no regions created. */
2302 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
2303 return;
2304
52a11cbf
RH
2305 /* The object here is to provide find_basic_blocks with detailed
2306 information (via reachable_handlers) on how exception control
2307 flows within the function. In this first pass, we can include
2308 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2309 regions, and hope that it will be useful in deleting unreachable
2310 handlers. Subsequently, we will generate landing pads which will
2311 connect many of the handlers, and then type information will not
2312 be effective. Still, this is a win over previous implementations. */
2313
2314 jump_optimize_minimal (get_insns ());
2315 find_basic_blocks (get_insns (), max_reg_num (), 0);
2316 cleanup_cfg ();
2317
2318 /* These registers are used by the landing pads. Make sure they
2319 have been generated. */
2320 get_exception_pointer ();
47c84870 2321 get_exception_filter ();
52a11cbf
RH
2322
2323 /* Construct the landing pads. */
2324
2325 assign_filter_values ();
2326 build_post_landing_pads ();
2327 connect_post_landing_pads ();
2328 if (USING_SJLJ_EXCEPTIONS)
2329 sjlj_build_landing_pads ();
2330 else
2331 dw2_build_landing_pads ();
ce152ef8 2332
52a11cbf 2333 cfun->eh->built_landing_pads = 1;
ce152ef8 2334
52a11cbf
RH
2335 /* We've totally changed the CFG. Start over. */
2336 find_exception_handler_labels ();
2337 jump_optimize_minimal (get_insns ());
2338 find_basic_blocks (get_insns (), max_reg_num (), 0);
2339 cleanup_cfg ();
ce152ef8 2340}
4956d07c 2341\f
52a11cbf 2342/* This section handles removing dead code for flow. */
154bba13 2343
52a11cbf 2344/* Remove LABEL from the exception_handler_labels list. */
154bba13 2345
52a11cbf
RH
2346static void
2347remove_exception_handler_label (label)
2348 rtx label;
154bba13 2349{
52a11cbf 2350 rtx *pl, l;
100d81d4 2351
52a11cbf
RH
2352 for (pl = &exception_handler_labels, l = *pl;
2353 XEXP (l, 0) != label;
2354 pl = &XEXP (l, 1), l = *pl)
2355 continue;
154bba13 2356
52a11cbf
RH
2357 *pl = XEXP (l, 1);
2358 free_EXPR_LIST_node (l);
154bba13
TT
2359}
2360
52a11cbf 2361/* Splice REGION from the region tree etc. */
12670d88 2362
f19c9228 2363static void
52a11cbf
RH
2364remove_eh_handler (region)
2365 struct eh_region *region;
4956d07c 2366{
52a11cbf
RH
2367 struct eh_region **pp, *p;
2368 rtx lab;
2369 int i;
4956d07c 2370
52a11cbf
RH
2371 /* For the benefit of efficiently handling REG_EH_REGION notes,
2372 replace this region in the region array with its containing
2373 region. Note that previous region deletions may result in
2374 multiple copies of this region in the array, so we have to
2375 search the whole thing. */
2376 for (i = cfun->eh->last_region_number; i > 0; --i)
2377 if (cfun->eh->region_array[i] == region)
2378 cfun->eh->region_array[i] = region->outer;
2379
2380 if (cfun->eh->built_landing_pads)
2381 lab = region->landing_pad;
2382 else
2383 lab = region->label;
2384 if (lab)
2385 remove_exception_handler_label (lab);
2386
2387 if (region->outer)
2388 pp = &region->outer->inner;
2389 else
2390 pp = &cfun->eh->region_tree;
2391 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2392 continue;
12670d88 2393
52a11cbf 2394 if (region->inner)
4956d07c 2395 {
52a11cbf
RH
2396 for (p = region->inner; p->next_peer ; p = p->next_peer)
2397 p->outer = region->outer;
2398 p->next_peer = region->next_peer;
2399 p->outer = region->outer;
2400 *pp = region->inner;
4956d07c 2401 }
52a11cbf
RH
2402 else
2403 *pp = region->next_peer;
f19c9228 2404
52a11cbf
RH
2405 if (region->type == ERT_CATCH)
2406 {
2407 struct eh_region *try, *next, *prev;
f19c9228 2408
52a11cbf
RH
2409 for (try = region->next_peer;
2410 try->type == ERT_CATCH;
2411 try = try->next_peer)
2412 continue;
2413 if (try->type != ERT_TRY)
2414 abort ();
f19c9228 2415
52a11cbf
RH
2416 next = region->u.catch.next_catch;
2417 prev = region->u.catch.prev_catch;
f19c9228 2418
52a11cbf
RH
2419 if (next)
2420 next->u.catch.prev_catch = prev;
2421 else
2422 try->u.try.last_catch = prev;
2423 if (prev)
2424 prev->u.catch.next_catch = next;
2425 else
2426 {
2427 try->u.try.catch = next;
2428 if (! next)
2429 remove_eh_handler (try);
2430 }
2431 }
988cea7d 2432
52a11cbf 2433 free (region);
4956d07c
MS
2434}
2435
52a11cbf
RH
2436/* LABEL heads a basic block that is about to be deleted. If this
2437 label corresponds to an exception region, we may be able to
2438 delete the region. */
4956d07c
MS
2439
2440void
52a11cbf
RH
2441maybe_remove_eh_handler (label)
2442 rtx label;
4956d07c 2443{
52a11cbf 2444 int i;
4956d07c 2445
52a11cbf
RH
2446 /* ??? After generating landing pads, it's not so simple to determine
2447 if the region data is completely unused. One must examine the
2448 landing pad and the post landing pad, and whether an inner try block
2449 is referencing the catch handlers directly. */
2450 if (cfun->eh->built_landing_pads)
4956d07c
MS
2451 return;
2452
52a11cbf 2453 for (i = cfun->eh->last_region_number; i > 0; --i)
87ff9c8e 2454 {
52a11cbf
RH
2455 struct eh_region *region = cfun->eh->region_array[i];
2456 if (region && region->label == label)
87ff9c8e 2457 {
52a11cbf
RH
2458 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2459 because there is no path to the fallback call to terminate.
2460 But the region continues to affect call-site data until there
2461 are no more contained calls, which we don't see here. */
2462 if (region->type == ERT_MUST_NOT_THROW)
2463 {
2464 remove_exception_handler_label (region->label);
2465 region->label = NULL_RTX;
2466 }
2467 else
2468 remove_eh_handler (region);
2469 break;
87ff9c8e 2470 }
87ff9c8e
RH
2471 }
2472}
2473
52a11cbf
RH
2474\f
2475/* This section describes CFG exception edges for flow. */
87ff9c8e 2476
52a11cbf
RH
2477/* For communicating between calls to reachable_next_level. */
2478struct reachable_info
87ff9c8e 2479{
52a11cbf
RH
2480 tree types_caught;
2481 tree types_allowed;
2482 rtx handlers;
2483};
87ff9c8e 2484
52a11cbf
RH
2485/* A subroutine of reachable_next_level. Return true if TYPE, or a
2486 base class of TYPE, is in HANDLED. */
87ff9c8e 2487
52a11cbf
RH
2488static int
2489check_handled (handled, type)
2490 tree handled, type;
87ff9c8e 2491{
52a11cbf
RH
2492 tree t;
2493
2494 /* We can check for exact matches without front-end help. */
2495 if (! lang_eh_type_covers)
f54a7f6f 2496 {
52a11cbf
RH
2497 for (t = handled; t ; t = TREE_CHAIN (t))
2498 if (TREE_VALUE (t) == type)
2499 return 1;
2500 }
2501 else
2502 {
2503 for (t = handled; t ; t = TREE_CHAIN (t))
2504 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2505 return 1;
f54a7f6f 2506 }
52a11cbf
RH
2507
2508 return 0;
87ff9c8e
RH
2509}
2510
52a11cbf
RH
2511/* A subroutine of reachable_next_level. If we are collecting a list
2512 of handlers, add one. After landing pad generation, reference
2513 it instead of the handlers themselves. Further, the handlers are
2514 all wired together, so by referencing one, we've got them all.
2515 Before landing pad generation we reference each handler individually.
2516
2517 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2518
2519static void
52a11cbf
RH
2520add_reachable_handler (info, lp_region, region)
2521 struct reachable_info *info;
2522 struct eh_region *lp_region;
2523 struct eh_region *region;
87ff9c8e 2524{
52a11cbf
RH
2525 if (! info)
2526 return;
2527
2528 if (cfun->eh->built_landing_pads)
87ff9c8e 2529 {
52a11cbf
RH
2530 if (! info->handlers)
2531 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
87ff9c8e 2532 }
52a11cbf
RH
2533 else
2534 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
87ff9c8e
RH
2535}
2536
52a11cbf
RH
2537/* Process one level of exception regions for reachability.
2538 If TYPE_THROWN is non-null, then it is the *exact* type being
2539 propagated. If INFO is non-null, then collect handler labels
2540 and caught/allowed type information between invocations. */
87ff9c8e 2541
52a11cbf
RH
2542static enum reachable_code
2543reachable_next_level (region, type_thrown, info)
2544 struct eh_region *region;
2545 tree type_thrown;
2546 struct reachable_info *info;
87ff9c8e 2547{
52a11cbf
RH
2548 switch (region->type)
2549 {
2550 case ERT_CLEANUP:
2551 /* Before landing-pad generation, we model control flow
2552 directly to the individual handlers. In this way we can
2553 see that catch handler types may shadow one another. */
2554 add_reachable_handler (info, region, region);
2555 return RNL_MAYBE_CAUGHT;
2556
2557 case ERT_TRY:
2558 {
2559 struct eh_region *c;
2560 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2561
52a11cbf
RH
2562 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2563 {
2564 /* A catch-all handler ends the search. */
2565 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2566 to be run as well. */
2567 if (c->u.catch.type == NULL)
2568 {
2569 add_reachable_handler (info, region, c);
2570 return RNL_CAUGHT;
2571 }
2572
2573 if (type_thrown)
2574 {
2575 /* If we have a type match, end the search. */
2576 if (c->u.catch.type == type_thrown
2577 || (lang_eh_type_covers
2578 && (*lang_eh_type_covers) (c->u.catch.type,
2579 type_thrown)))
2580 {
2581 add_reachable_handler (info, region, c);
2582 return RNL_CAUGHT;
2583 }
2584
2585 /* If we have definitive information of a match failure,
2586 the catch won't trigger. */
2587 if (lang_eh_type_covers)
2588 return RNL_NOT_CAUGHT;
2589 }
2590
2591 if (! info)
2592 ret = RNL_MAYBE_CAUGHT;
2593
2594 /* A type must not have been previously caught. */
2595 else if (! check_handled (info->types_caught, c->u.catch.type))
2596 {
2597 add_reachable_handler (info, region, c);
2598 info->types_caught = tree_cons (NULL, c->u.catch.type,
2599 info->types_caught);
2600
2601 /* ??? If the catch type is a base class of every allowed
2602 type, then we know we can stop the search. */
2603 ret = RNL_MAYBE_CAUGHT;
2604 }
2605 }
87ff9c8e 2606
52a11cbf
RH
2607 return ret;
2608 }
87ff9c8e 2609
52a11cbf
RH
2610 case ERT_ALLOWED_EXCEPTIONS:
2611 /* An empty list of types definitely ends the search. */
2612 if (region->u.allowed.type_list == NULL_TREE)
2613 {
2614 add_reachable_handler (info, region, region);
2615 return RNL_CAUGHT;
2616 }
87ff9c8e 2617
52a11cbf
RH
2618 /* Collect a list of lists of allowed types for use in detecting
2619 when a catch may be transformed into a catch-all. */
2620 if (info)
2621 info->types_allowed = tree_cons (NULL_TREE,
2622 region->u.allowed.type_list,
2623 info->types_allowed);
2624
2625 /* If we have definitive information about the type heirarchy,
2626 then we can tell if the thrown type will pass through the
2627 filter. */
2628 if (type_thrown && lang_eh_type_covers)
2629 {
2630 if (check_handled (region->u.allowed.type_list, type_thrown))
2631 return RNL_NOT_CAUGHT;
2632 else
2633 {
2634 add_reachable_handler (info, region, region);
2635 return RNL_CAUGHT;
2636 }
2637 }
21cd906e 2638
52a11cbf
RH
2639 add_reachable_handler (info, region, region);
2640 return RNL_MAYBE_CAUGHT;
21cd906e 2641
52a11cbf
RH
2642 case ERT_CATCH:
2643 /* Catch regions are handled by their controling try region. */
2644 return RNL_NOT_CAUGHT;
21cd906e 2645
52a11cbf
RH
2646 case ERT_MUST_NOT_THROW:
2647 /* Here we end our search, since no exceptions may propagate.
2648 If we've touched down at some landing pad previous, then the
2649 explicit function call we generated may be used. Otherwise
2650 the call is made by the runtime. */
2651 if (info && info->handlers)
21cd906e 2652 {
52a11cbf
RH
2653 add_reachable_handler (info, region, region);
2654 return RNL_CAUGHT;
21cd906e 2655 }
52a11cbf
RH
2656 else
2657 return RNL_BLOCKED;
21cd906e 2658
52a11cbf
RH
2659 case ERT_THROW:
2660 case ERT_FIXUP:
2661 /* Shouldn't see these here. */
2662 break;
21cd906e 2663 }
fa51b01b 2664
52a11cbf 2665 abort ();
fa51b01b 2666}
4956d07c 2667
52a11cbf
RH
2668/* Retrieve a list of labels of exception handlers which can be
2669 reached by a given insn. */
4956d07c 2670
52a11cbf
RH
2671rtx
2672reachable_handlers (insn)
4956d07c
MS
2673 rtx insn;
2674{
52a11cbf
RH
2675 struct reachable_info info;
2676 struct eh_region *region;
2677 tree type_thrown;
2678 int region_number;
fb13d4d0 2679
52a11cbf
RH
2680 if (GET_CODE (insn) == JUMP_INSN
2681 && GET_CODE (PATTERN (insn)) == RESX)
2682 region_number = XINT (PATTERN (insn), 0);
2683 else
1ef1bf06
AM
2684 {
2685 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
52a11cbf
RH
2686 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2687 return NULL;
2688 region_number = INTVAL (XEXP (note, 0));
27a36778 2689 }
4956d07c 2690
52a11cbf 2691 memset (&info, 0, sizeof (info));
4956d07c 2692
52a11cbf 2693 region = cfun->eh->region_array[region_number];
fb13d4d0 2694
52a11cbf
RH
2695 type_thrown = NULL_TREE;
2696 if (region->type == ERT_THROW)
2697 {
2698 type_thrown = region->u.throw.type;
2699 region = region->outer;
2700 }
47c84870
JM
2701 else if (GET_CODE (insn) == JUMP_INSN
2702 && GET_CODE (PATTERN (insn)) == RESX)
2703 region = region->outer;
fac62ecf 2704
52a11cbf
RH
2705 for (; region; region = region->outer)
2706 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2707 break;
fb13d4d0 2708
52a11cbf 2709 return info.handlers;
fb13d4d0
JM
2710}
2711
52a11cbf
RH
2712/* Determine if the given INSN can throw an exception that is caught
2713 within the function. */
4956d07c 2714
52a11cbf
RH
2715bool
2716can_throw_internal (insn)
4956d07c 2717 rtx insn;
4956d07c 2718{
52a11cbf
RH
2719 struct eh_region *region;
2720 tree type_thrown;
2721 rtx note;
e6cfb550 2722
52a11cbf
RH
2723 if (! INSN_P (insn))
2724 return false;
12670d88 2725
52a11cbf
RH
2726 if (GET_CODE (insn) == INSN
2727 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2728 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 2729
52a11cbf
RH
2730 if (GET_CODE (insn) == CALL_INSN
2731 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2732 {
52a11cbf
RH
2733 int i;
2734 for (i = 0; i < 3; ++i)
4956d07c 2735 {
52a11cbf
RH
2736 rtx sub = XEXP (PATTERN (insn), i);
2737 for (; sub ; sub = NEXT_INSN (sub))
2738 if (can_throw_internal (sub))
2739 return true;
4956d07c 2740 }
52a11cbf 2741 return false;
4956d07c
MS
2742 }
2743
52a11cbf
RH
2744 /* Every insn that might throw has an EH_REGION note. */
2745 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2746 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2747 return false;
4956d07c 2748
52a11cbf 2749 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
4956d07c 2750
52a11cbf
RH
2751 type_thrown = NULL_TREE;
2752 if (region->type == ERT_THROW)
2753 {
2754 type_thrown = region->u.throw.type;
2755 region = region->outer;
2756 }
4956d07c 2757
52a11cbf
RH
2758 /* If this exception is ignored by each and every containing region,
2759 then control passes straight out. The runtime may handle some
2760 regions, which also do not require processing internally. */
2761 for (; region; region = region->outer)
2762 {
2763 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2764 if (how == RNL_BLOCKED)
2765 return false;
2766 if (how != RNL_NOT_CAUGHT)
2767 return true;
4956d07c 2768 }
4956d07c 2769
52a11cbf
RH
2770 return false;
2771}
4956d07c 2772
52a11cbf
RH
2773/* Determine if the given INSN can throw an exception that is
2774 visible outside the function. */
4956d07c 2775
52a11cbf
RH
2776bool
2777can_throw_external (insn)
2778 rtx insn;
4956d07c 2779{
52a11cbf
RH
2780 struct eh_region *region;
2781 tree type_thrown;
2782 rtx note;
4956d07c 2783
52a11cbf
RH
2784 if (! INSN_P (insn))
2785 return false;
2786
2787 if (GET_CODE (insn) == INSN
2788 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2789 insn = XVECEXP (PATTERN (insn), 0, 0);
2790
2791 if (GET_CODE (insn) == CALL_INSN
2792 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2793 {
52a11cbf
RH
2794 int i;
2795 for (i = 0; i < 3; ++i)
4956d07c 2796 {
52a11cbf
RH
2797 rtx sub = XEXP (PATTERN (insn), i);
2798 for (; sub ; sub = NEXT_INSN (sub))
2799 if (can_throw_external (sub))
2800 return true;
4956d07c 2801 }
52a11cbf 2802 return false;
4956d07c 2803 }
52a11cbf
RH
2804
2805 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2806 if (!note)
2807 {
2808 /* Calls (and trapping insns) without notes are outside any
2809 exception handling region in this function. We have to
2810 assume it might throw. Given that the front end and middle
2811 ends mark known NOTHROW functions, this isn't so wildly
2812 inaccurate. */
2813 return (GET_CODE (insn) == CALL_INSN
2814 || (flag_non_call_exceptions
2815 && may_trap_p (PATTERN (insn))));
2816 }
2817 if (INTVAL (XEXP (note, 0)) <= 0)
2818 return false;
2819
2820 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2821
2822 type_thrown = NULL_TREE;
2823 if (region->type == ERT_THROW)
2824 {
2825 type_thrown = region->u.throw.type;
2826 region = region->outer;
2827 }
2828
2829 /* If the exception is caught or blocked by any containing region,
2830 then it is not seen by any calling function. */
2831 for (; region ; region = region->outer)
2832 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2833 return false;
2834
2835 return true;
4956d07c 2836}
1ef1bf06 2837
52a11cbf 2838/* True if nothing in this function can throw outside this function. */
6814a8a0 2839
52a11cbf
RH
2840bool
2841nothrow_function_p ()
1ef1bf06
AM
2842{
2843 rtx insn;
1ef1bf06 2844
52a11cbf
RH
2845 if (! flag_exceptions)
2846 return true;
1ef1bf06 2847
1ef1bf06 2848 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf
RH
2849 if (can_throw_external (insn))
2850 return false;
2851 for (insn = current_function_epilogue_delay_list; insn;
2852 insn = XEXP (insn, 1))
2853 if (can_throw_external (insn))
2854 return false;
4da896b2 2855
52a11cbf 2856 return true;
1ef1bf06 2857}
52a11cbf 2858
ca55abae 2859\f
52a11cbf 2860/* Various hooks for unwind library. */
ca55abae
JM
2861
2862/* Do any necessary initialization to access arbitrary stack frames.
2863 On the SPARC, this means flushing the register windows. */
2864
2865void
2866expand_builtin_unwind_init ()
2867{
2868 /* Set this so all the registers get saved in our frame; we need to be
2869 able to copy the saved values for any registers from frames we unwind. */
2870 current_function_has_nonlocal_label = 1;
2871
2872#ifdef SETUP_FRAME_ADDRESSES
2873 SETUP_FRAME_ADDRESSES ();
2874#endif
2875}
2876
52a11cbf
RH
2877rtx
2878expand_builtin_eh_return_data_regno (arglist)
2879 tree arglist;
2880{
2881 tree which = TREE_VALUE (arglist);
2882 unsigned HOST_WIDE_INT iwhich;
2883
2884 if (TREE_CODE (which) != INTEGER_CST)
2885 {
2886 error ("argument of `__builtin_eh_return_regno' must be constant");
2887 return constm1_rtx;
2888 }
2889
2890 iwhich = tree_low_cst (which, 1);
2891 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2892 if (iwhich == INVALID_REGNUM)
2893 return constm1_rtx;
2894
2895#ifdef DWARF_FRAME_REGNUM
2896 iwhich = DWARF_FRAME_REGNUM (iwhich);
2897#else
2898 iwhich = DBX_REGISTER_NUMBER (iwhich);
2899#endif
2900
2901 return GEN_INT (iwhich);
2902}
2903
ca55abae
JM
2904/* Given a value extracted from the return address register or stack slot,
2905 return the actual address encoded in that value. */
2906
2907rtx
2908expand_builtin_extract_return_addr (addr_tree)
2909 tree addr_tree;
2910{
2911 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf
RH
2912
2913 /* First mask out any unwanted bits. */
2914#ifdef MASK_RETURN_ADDR
2915 expand_and (addr, MASK_RETURN_ADDR, addr);
2916#endif
2917
2918 /* Then adjust to find the real return address. */
2919#if defined (RETURN_ADDR_OFFSET)
2920 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2921#endif
2922
2923 return addr;
ca55abae
JM
2924}
2925
2926/* Given an actual address in addr_tree, do any necessary encoding
2927 and return the value to be stored in the return address register or
2928 stack slot so the epilogue will return to that address. */
2929
2930rtx
2931expand_builtin_frob_return_addr (addr_tree)
2932 tree addr_tree;
2933{
2934 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf 2935
ca55abae 2936#ifdef RETURN_ADDR_OFFSET
52a11cbf 2937 addr = force_reg (Pmode, addr);
ca55abae
JM
2938 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2939#endif
52a11cbf 2940
ca55abae
JM
2941 return addr;
2942}
2943
52a11cbf
RH
2944/* Set up the epilogue with the magic bits we'll need to return to the
2945 exception handler. */
ca55abae 2946
52a11cbf
RH
2947void
2948expand_builtin_eh_return (stackadj_tree, handler_tree)
2949 tree stackadj_tree, handler_tree;
ca55abae 2950{
52a11cbf 2951 rtx stackadj, handler;
ca55abae 2952
52a11cbf
RH
2953 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2954 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
ca55abae 2955
52a11cbf
RH
2956 if (! cfun->eh->ehr_label)
2957 {
2958 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2959 cfun->eh->ehr_handler = copy_to_reg (handler);
2960 cfun->eh->ehr_label = gen_label_rtx ();
2961 }
ca55abae 2962 else
ca55abae 2963 {
52a11cbf
RH
2964 if (stackadj != cfun->eh->ehr_stackadj)
2965 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2966 if (handler != cfun->eh->ehr_handler)
2967 emit_move_insn (cfun->eh->ehr_handler, handler);
ca55abae
JM
2968 }
2969
52a11cbf 2970 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
2971}
2972
71038426
RH
2973void
2974expand_eh_return ()
ca55abae 2975{
52a11cbf 2976 rtx sa, ra, around_label;
ca55abae 2977
52a11cbf 2978 if (! cfun->eh->ehr_label)
71038426 2979 return;
ca55abae 2980
52a11cbf
RH
2981 sa = EH_RETURN_STACKADJ_RTX;
2982 if (! sa)
71038426 2983 {
52a11cbf 2984 error ("__builtin_eh_return not supported on this target");
71038426
RH
2985 return;
2986 }
ca55abae 2987
52a11cbf 2988 current_function_calls_eh_return = 1;
ca55abae 2989
52a11cbf
RH
2990 around_label = gen_label_rtx ();
2991 emit_move_insn (sa, const0_rtx);
2992 emit_jump (around_label);
ca55abae 2993
52a11cbf
RH
2994 emit_label (cfun->eh->ehr_label);
2995 clobber_return_register ();
ca55abae 2996
52a11cbf
RH
2997#ifdef HAVE_eh_return
2998 if (HAVE_eh_return)
2999 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3000 else
71038426 3001#endif
52a11cbf
RH
3002 {
3003 ra = EH_RETURN_HANDLER_RTX;
3004 if (! ra)
3005 {
3006 error ("__builtin_eh_return not supported on this target");
3007 ra = gen_reg_rtx (Pmode);
3008 }
71038426 3009
52a11cbf
RH
3010 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3011 emit_move_insn (ra, cfun->eh->ehr_handler);
3012 }
71038426 3013
52a11cbf 3014 emit_label (around_label);
71038426 3015}
77d33a84 3016\f
52a11cbf
RH
3017struct action_record
3018{
3019 int offset;
3020 int filter;
3021 int next;
3022};
77d33a84 3023
52a11cbf
RH
3024static int
3025action_record_eq (pentry, pdata)
3026 const PTR pentry;
3027 const PTR pdata;
3028{
3029 const struct action_record *entry = (const struct action_record *) pentry;
3030 const struct action_record *data = (const struct action_record *) pdata;
3031 return entry->filter == data->filter && entry->next == data->next;
3032}
77d33a84 3033
52a11cbf
RH
3034static hashval_t
3035action_record_hash (pentry)
3036 const PTR pentry;
3037{
3038 const struct action_record *entry = (const struct action_record *) pentry;
3039 return entry->next * 1009 + entry->filter;
3040}
77d33a84 3041
52a11cbf
RH
3042static int
3043add_action_record (ar_hash, filter, next)
3044 htab_t ar_hash;
3045 int filter, next;
77d33a84 3046{
52a11cbf
RH
3047 struct action_record **slot, *new, tmp;
3048
3049 tmp.filter = filter;
3050 tmp.next = next;
3051 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 3052
52a11cbf 3053 if ((new = *slot) == NULL)
77d33a84 3054 {
52a11cbf
RH
3055 new = (struct action_record *) xmalloc (sizeof (*new));
3056 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3057 new->filter = filter;
3058 new->next = next;
3059 *slot = new;
3060
3061 /* The filter value goes in untouched. The link to the next
3062 record is a "self-relative" byte offset, or zero to indicate
3063 that there is no next record. So convert the absolute 1 based
3064 indicies we've been carrying around into a displacement. */
3065
3066 push_sleb128 (&cfun->eh->action_record_data, filter);
3067 if (next)
3068 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3069 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 3070 }
77d33a84 3071
52a11cbf
RH
3072 return new->offset;
3073}
77d33a84 3074
52a11cbf
RH
3075static int
3076collect_one_action_chain (ar_hash, region)
3077 htab_t ar_hash;
3078 struct eh_region *region;
77d33a84 3079{
52a11cbf
RH
3080 struct eh_region *c;
3081 int next;
77d33a84 3082
52a11cbf
RH
3083 /* If we've reached the top of the region chain, then we have
3084 no actions, and require no landing pad. */
3085 if (region == NULL)
3086 return -1;
3087
3088 switch (region->type)
77d33a84 3089 {
52a11cbf
RH
3090 case ERT_CLEANUP:
3091 /* A cleanup adds a zero filter to the beginning of the chain, but
3092 there are special cases to look out for. If there are *only*
3093 cleanups along a path, then it compresses to a zero action.
3094 Further, if there are multiple cleanups along a path, we only
3095 need to represent one of them, as that is enough to trigger
3096 entry to the landing pad at runtime. */
3097 next = collect_one_action_chain (ar_hash, region->outer);
3098 if (next <= 0)
3099 return 0;
3100 for (c = region->outer; c ; c = c->outer)
3101 if (c->type == ERT_CLEANUP)
3102 return next;
3103 return add_action_record (ar_hash, 0, next);
3104
3105 case ERT_TRY:
3106 /* Process the associated catch regions in reverse order.
3107 If there's a catch-all handler, then we don't need to
3108 search outer regions. Use a magic -3 value to record
3109 that we havn't done the outer search. */
3110 next = -3;
3111 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3112 {
3113 if (c->u.catch.type == NULL)
3114 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3115 else
3116 {
3117 if (next == -3)
3118 {
3119 next = collect_one_action_chain (ar_hash, region->outer);
3120 if (next < 0)
3121 next = 0;
3122 }
3123 next = add_action_record (ar_hash, c->u.catch.filter, next);
3124 }
3125 }
3126 return next;
3127
3128 case ERT_ALLOWED_EXCEPTIONS:
3129 /* An exception specification adds its filter to the
3130 beginning of the chain. */
3131 next = collect_one_action_chain (ar_hash, region->outer);
3132 return add_action_record (ar_hash, region->u.allowed.filter,
3133 next < 0 ? 0 : next);
3134
3135 case ERT_MUST_NOT_THROW:
3136 /* A must-not-throw region with no inner handlers or cleanups
3137 requires no call-site entry. Note that this differs from
3138 the no handler or cleanup case in that we do require an lsda
3139 to be generated. Return a magic -2 value to record this. */
3140 return -2;
3141
3142 case ERT_CATCH:
3143 case ERT_THROW:
3144 /* CATCH regions are handled in TRY above. THROW regions are
3145 for optimization information only and produce no output. */
3146 return collect_one_action_chain (ar_hash, region->outer);
3147
3148 default:
3149 abort ();
77d33a84
AM
3150 }
3151}
3152
52a11cbf
RH
3153static int
3154add_call_site (landing_pad, action)
3155 rtx landing_pad;
3156 int action;
77d33a84 3157{
52a11cbf
RH
3158 struct call_site_record *data = cfun->eh->call_site_data;
3159 int used = cfun->eh->call_site_data_used;
3160 int size = cfun->eh->call_site_data_size;
77d33a84 3161
52a11cbf
RH
3162 if (used >= size)
3163 {
3164 size = (size ? size * 2 : 64);
3165 data = (struct call_site_record *)
3166 xrealloc (data, sizeof (*data) * size);
3167 cfun->eh->call_site_data = data;
3168 cfun->eh->call_site_data_size = size;
3169 }
77d33a84 3170
52a11cbf
RH
3171 data[used].landing_pad = landing_pad;
3172 data[used].action = action;
77d33a84 3173
52a11cbf 3174 cfun->eh->call_site_data_used = used + 1;
77d33a84 3175
52a11cbf 3176 return used + call_site_base;
77d33a84
AM
3177}
3178
52a11cbf
RH
3179/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3180 The new note numbers will not refer to region numbers, but
3181 instead to call site entries. */
77d33a84 3182
52a11cbf
RH
3183void
3184convert_to_eh_region_ranges ()
77d33a84 3185{
52a11cbf
RH
3186 rtx insn, iter, note;
3187 htab_t ar_hash;
3188 int last_action = -3;
3189 rtx last_action_insn = NULL_RTX;
3190 rtx last_landing_pad = NULL_RTX;
3191 rtx first_no_action_insn = NULL_RTX;
3192 int call_site;
77d33a84 3193
52a11cbf
RH
3194 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3195 return;
77d33a84 3196
52a11cbf 3197 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 3198
52a11cbf 3199 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 3200
52a11cbf
RH
3201 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3202 if (INSN_P (iter))
3203 {
3204 struct eh_region *region;
3205 int this_action;
3206 rtx this_landing_pad;
77d33a84 3207
52a11cbf
RH
3208 insn = iter;
3209 if (GET_CODE (insn) == INSN
3210 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3211 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 3212
52a11cbf
RH
3213 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3214 if (!note)
3215 {
3216 if (! (GET_CODE (insn) == CALL_INSN
3217 || (flag_non_call_exceptions
3218 && may_trap_p (PATTERN (insn)))))
3219 continue;
3220 this_action = -1;
3221 region = NULL;
3222 }
3223 else
3224 {
3225 if (INTVAL (XEXP (note, 0)) <= 0)
3226 continue;
3227 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3228 this_action = collect_one_action_chain (ar_hash, region);
3229 }
3230
3231 /* Existence of catch handlers, or must-not-throw regions
3232 implies that an lsda is needed (even if empty). */
3233 if (this_action != -1)
3234 cfun->uses_eh_lsda = 1;
3235
3236 /* Delay creation of region notes for no-action regions
3237 until we're sure that an lsda will be required. */
3238 else if (last_action == -3)
3239 {
3240 first_no_action_insn = iter;
3241 last_action = -1;
3242 }
1ef1bf06 3243
52a11cbf
RH
3244 /* Cleanups and handlers may share action chains but not
3245 landing pads. Collect the landing pad for this region. */
3246 if (this_action >= 0)
3247 {
3248 struct eh_region *o;
3249 for (o = region; ! o->landing_pad ; o = o->outer)
3250 continue;
3251 this_landing_pad = o->landing_pad;
3252 }
3253 else
3254 this_landing_pad = NULL_RTX;
1ef1bf06 3255
52a11cbf
RH
3256 /* Differing actions or landing pads implies a change in call-site
3257 info, which implies some EH_REGION note should be emitted. */
3258 if (last_action != this_action
3259 || last_landing_pad != this_landing_pad)
3260 {
3261 /* If we'd not seen a previous action (-3) or the previous
3262 action was must-not-throw (-2), then we do not need an
3263 end note. */
3264 if (last_action >= -1)
3265 {
3266 /* If we delayed the creation of the begin, do it now. */
3267 if (first_no_action_insn)
3268 {
3269 call_site = add_call_site (NULL_RTX, 0);
3270 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3271 first_no_action_insn);
3272 NOTE_EH_HANDLER (note) = call_site;
3273 first_no_action_insn = NULL_RTX;
3274 }
3275
3276 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3277 last_action_insn);
3278 NOTE_EH_HANDLER (note) = call_site;
3279 }
3280
3281 /* If the new action is must-not-throw, then no region notes
3282 are created. */
3283 if (this_action >= -1)
3284 {
3285 call_site = add_call_site (this_landing_pad,
3286 this_action < 0 ? 0 : this_action);
3287 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3288 NOTE_EH_HANDLER (note) = call_site;
3289 }
3290
3291 last_action = this_action;
3292 last_landing_pad = this_landing_pad;
3293 }
3294 last_action_insn = iter;
3295 }
1ef1bf06 3296
52a11cbf 3297 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3298 {
52a11cbf
RH
3299 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3300 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3301 }
3302
52a11cbf
RH
3303 htab_delete (ar_hash);
3304}
1ef1bf06 3305
52a11cbf
RH
3306\f
3307static void
3308push_uleb128 (data_area, value)
3309 varray_type *data_area;
3310 unsigned int value;
3311{
3312 do
3313 {
3314 unsigned char byte = value & 0x7f;
3315 value >>= 7;
3316 if (value)
3317 byte |= 0x80;
3318 VARRAY_PUSH_UCHAR (*data_area, byte);
3319 }
3320 while (value);
3321}
1ef1bf06 3322
52a11cbf
RH
3323static void
3324push_sleb128 (data_area, value)
3325 varray_type *data_area;
3326 int value;
3327{
3328 unsigned char byte;
3329 int more;
1ef1bf06 3330
52a11cbf 3331 do
1ef1bf06 3332 {
52a11cbf
RH
3333 byte = value & 0x7f;
3334 value >>= 7;
3335 more = ! ((value == 0 && (byte & 0x40) == 0)
3336 || (value == -1 && (byte & 0x40) != 0));
3337 if (more)
3338 byte |= 0x80;
3339 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3340 }
52a11cbf
RH
3341 while (more);
3342}
1ef1bf06 3343
52a11cbf
RH
3344\f
3345#define DW_EH_PE_absptr 0x00
3346#define DW_EH_PE_omit 0xff
3347
3348#define DW_EH_PE_uleb128 0x01
3349#define DW_EH_PE_udata2 0x02
3350#define DW_EH_PE_udata4 0x03
3351#define DW_EH_PE_udata8 0x04
3352#define DW_EH_PE_sleb128 0x09
3353#define DW_EH_PE_sdata2 0x0A
3354#define DW_EH_PE_sdata4 0x0B
3355#define DW_EH_PE_sdata8 0x0C
3356#define DW_EH_PE_signed 0x08
3357
3358#define DW_EH_PE_pcrel 0x10
3359#define DW_EH_PE_textrel 0x20
3360#define DW_EH_PE_datarel 0x30
3361#define DW_EH_PE_funcrel 0x40
3362
3363static const char *
3364eh_data_format_name (format)
3365 int format;
3366{
3367 switch (format)
1ef1bf06 3368 {
52a11cbf
RH
3369 case DW_EH_PE_absptr: return "absolute";
3370 case DW_EH_PE_omit: return "omit";
3371
3372 case DW_EH_PE_uleb128: return "uleb128";
3373 case DW_EH_PE_udata2: return "udata2";
3374 case DW_EH_PE_udata4: return "udata4";
3375 case DW_EH_PE_udata8: return "udata8";
3376 case DW_EH_PE_sleb128: return "sleb128";
3377 case DW_EH_PE_sdata2: return "sdata2";
3378 case DW_EH_PE_sdata4: return "sdata4";
3379 case DW_EH_PE_sdata8: return "sdata8";
3380
3381 case DW_EH_PE_uleb128 | DW_EH_PE_pcrel: return "pcrel uleb128";
3382 case DW_EH_PE_udata2 | DW_EH_PE_pcrel: return "pcrel udata2";
3383 case DW_EH_PE_udata4 | DW_EH_PE_pcrel: return "pcrel udata4";
3384 case DW_EH_PE_udata8 | DW_EH_PE_pcrel: return "pcrel udata8";
3385 case DW_EH_PE_sleb128 | DW_EH_PE_pcrel: return "pcrel sleb128";
3386 case DW_EH_PE_sdata2 | DW_EH_PE_pcrel: return "pcrel sdata2";
3387 case DW_EH_PE_sdata4 | DW_EH_PE_pcrel: return "pcrel sdata4";
3388 case DW_EH_PE_sdata8 | DW_EH_PE_pcrel: return "pcrel sdata8";
3389
3390 case DW_EH_PE_uleb128 | DW_EH_PE_textrel: return "textrel uleb128";
3391 case DW_EH_PE_udata2 | DW_EH_PE_textrel: return "textrel udata2";
3392 case DW_EH_PE_udata4 | DW_EH_PE_textrel: return "textrel udata4";
3393 case DW_EH_PE_udata8 | DW_EH_PE_textrel: return "textrel udata8";
3394 case DW_EH_PE_sleb128 | DW_EH_PE_textrel: return "textrel sleb128";
3395 case DW_EH_PE_sdata2 | DW_EH_PE_textrel: return "textrel sdata2";
3396 case DW_EH_PE_sdata4 | DW_EH_PE_textrel: return "textrel sdata4";
3397 case DW_EH_PE_sdata8 | DW_EH_PE_textrel: return "textrel sdata8";
3398
3399 case DW_EH_PE_uleb128 | DW_EH_PE_datarel: return "datarel uleb128";
3400 case DW_EH_PE_udata2 | DW_EH_PE_datarel: return "datarel udata2";
3401 case DW_EH_PE_udata4 | DW_EH_PE_datarel: return "datarel udata4";
3402 case DW_EH_PE_udata8 | DW_EH_PE_datarel: return "datarel udata8";
3403 case DW_EH_PE_sleb128 | DW_EH_PE_datarel: return "datarel sleb128";
3404 case DW_EH_PE_sdata2 | DW_EH_PE_datarel: return "datarel sdata2";
3405 case DW_EH_PE_sdata4 | DW_EH_PE_datarel: return "datarel sdata4";
3406 case DW_EH_PE_sdata8 | DW_EH_PE_datarel: return "datarel sdata8";
3407
3408 case DW_EH_PE_uleb128 | DW_EH_PE_funcrel: return "funcrel uleb128";
3409 case DW_EH_PE_udata2 | DW_EH_PE_funcrel: return "funcrel udata2";
3410 case DW_EH_PE_udata4 | DW_EH_PE_funcrel: return "funcrel udata4";
3411 case DW_EH_PE_udata8 | DW_EH_PE_funcrel: return "funcrel udata8";
3412 case DW_EH_PE_sleb128 | DW_EH_PE_funcrel: return "funcrel sleb128";
3413 case DW_EH_PE_sdata2 | DW_EH_PE_funcrel: return "funcrel sdata2";
3414 case DW_EH_PE_sdata4 | DW_EH_PE_funcrel: return "funcrel sdata4";
3415 case DW_EH_PE_sdata8 | DW_EH_PE_funcrel: return "funcrel sdata8";
3416
3417 default:
3418 abort ();
1ef1bf06
AM
3419 }
3420}
3421
52a11cbf
RH
3422#ifndef HAVE_AS_LEB128
3423static int
3424dw2_size_of_call_site_table ()
1ef1bf06 3425{
52a11cbf
RH
3426 int n = cfun->eh->call_site_data_used;
3427 int size = n * (4 + 4 + 4);
3428 int i;
1ef1bf06 3429
52a11cbf
RH
3430 for (i = 0; i < n; ++i)
3431 {
3432 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3433 size += size_of_uleb128 (cs->action);
3434 }
fac62ecf 3435
52a11cbf
RH
3436 return size;
3437}
3438
3439static int
3440sjlj_size_of_call_site_table ()
3441{
3442 int n = cfun->eh->call_site_data_used;
3443 int size = 0;
3444 int i;
77d33a84 3445
52a11cbf 3446 for (i = 0; i < n; ++i)
1ef1bf06 3447 {
52a11cbf
RH
3448 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3449 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3450 size += size_of_uleb128 (cs->action);
1ef1bf06 3451 }
52a11cbf
RH
3452
3453 return size;
3454}
3455#endif
3456
3457static void
3458dw2_output_call_site_table ()
3459{
3460 const char *function_start_lab
3461 = IDENTIFIER_POINTER (current_function_func_begin_label);
3462 int n = cfun->eh->call_site_data_used;
3463 int i;
3464
3465 for (i = 0; i < n; ++i)
1ef1bf06 3466 {
52a11cbf
RH
3467 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3468 char reg_start_lab[32];
3469 char reg_end_lab[32];
3470 char landing_pad_lab[32];
3471
3472 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3473 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3474
3475 if (cs->landing_pad)
3476 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3477 CODE_LABEL_NUMBER (cs->landing_pad));
3478
3479 /* ??? Perhaps use insn length scaling if the assembler supports
3480 generic arithmetic. */
3481 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3482 data4 if the function is small enough. */
3483#ifdef HAVE_AS_LEB128
3484 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3485 "region %d start", i);
3486 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3487 "length");
3488 if (cs->landing_pad)
3489 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3490 "landing pad");
3491 else
3492 dw2_asm_output_data_uleb128 (0, "landing pad");
3493#else
3494 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3495 "region %d start", i);
3496 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3497 if (cs->landing_pad)
3498 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3499 "landing pad");
3500 else
3501 dw2_asm_output_data (4, 0, "landing pad");
3502#endif
3503 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3504 }
3505
52a11cbf
RH
3506 call_site_base += n;
3507}
3508
3509static void
3510sjlj_output_call_site_table ()
3511{
3512 int n = cfun->eh->call_site_data_used;
3513 int i;
1ef1bf06 3514
52a11cbf 3515 for (i = 0; i < n; ++i)
1ef1bf06 3516 {
52a11cbf 3517 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3518
52a11cbf
RH
3519 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3520 "region %d landing pad", i);
3521 dw2_asm_output_data_uleb128 (cs->action, "action");
3522 }
4da896b2 3523
52a11cbf 3524 call_site_base += n;
1ef1bf06
AM
3525}
3526
52a11cbf
RH
3527void
3528output_function_exception_table ()
3529{
3530 int format, i, n;
3531#ifdef HAVE_AS_LEB128
3532 char ttype_label[32];
3533 char cs_after_size_label[32];
3534 char cs_end_label[32];
3535#else
3536 int call_site_len;
3537#endif
3538 int have_tt_data;
3539 int funcdef_number;
1ef1bf06 3540
52a11cbf
RH
3541 /* Not all functions need anything. */
3542 if (! cfun->uses_eh_lsda)
3543 return;
fac62ecf 3544
52a11cbf
RH
3545 funcdef_number = (USING_SJLJ_EXCEPTIONS
3546 ? sjlj_funcdef_number
3547 : current_funcdef_number);
1ef1bf06 3548
52a11cbf
RH
3549 exception_section ();
3550
3551 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3552 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3553
3554 if (have_tt_data)
3555 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3556
3557 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3558
3559 /* The LSDA header. */
3560
3561 /* Indicate the format of the landing pad start pointer. An omitted
3562 field implies @LPStart == @Start. */
3563 /* Currently we always put @LPStart == @Start. This field would
3564 be most useful in moving the landing pads completely out of
3565 line to another section, but it could also be used to minimize
3566 the size of uleb128 landing pad offsets. */
3567 format = DW_EH_PE_omit;
3568 dw2_asm_output_data (1, format, "@LPStart format (%s)",
3569 eh_data_format_name (format));
3570
3571 /* @LPStart pointer would go here. */
3572
3573 /* Indicate the format of the @TType entries. */
3574 if (! have_tt_data)
3575 format = DW_EH_PE_omit;
3576 else
3577 {
3578 /* ??? Define a ASM_PREFERRED_DATA_FORMAT to say what
3579 sort of dynamic-relocation-free reference to emit. */
3580 format = 0;
3581#ifdef HAVE_AS_LEB128
3582 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3583#endif
3584 }
3585 dw2_asm_output_data (1, format, "@TType format (%s)",
3586 eh_data_format_name (format));
3587
3588#ifndef HAVE_AS_LEB128
3589 if (USING_SJLJ_EXCEPTIONS)
3590 call_site_len = sjlj_size_of_call_site_table ();
3591 else
3592 call_site_len = dw2_size_of_call_site_table ();
3593#endif
3594
3595 /* A pc-relative 4-byte displacement to the @TType data. */
3596 if (have_tt_data)
3597 {
3598#ifdef HAVE_AS_LEB128
3599 char ttype_after_disp_label[32];
3600 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3601 funcdef_number);
3602 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3603 "@TType base offset");
3604 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3605#else
3606 /* Ug. Alignment queers things. */
3607 unsigned int before_disp, after_disp, last_disp, disp, align;
3608
3609 align = POINTER_SIZE / BITS_PER_UNIT;
3610 before_disp = 1 + 1;
3611 after_disp = (1 + size_of_uleb128 (call_site_len)
3612 + call_site_len
3613 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3614 + VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) * align);
3615
3616 disp = after_disp;
3617 do
1ef1bf06 3618 {
52a11cbf
RH
3619 unsigned int disp_size, pad;
3620
3621 last_disp = disp;
3622 disp_size = size_of_uleb128 (disp);
3623 pad = before_disp + disp_size + after_disp;
3624 if (pad % align)
3625 pad = align - (pad % align);
3626 else
3627 pad = 0;
3628 disp = after_disp + pad;
1ef1bf06 3629 }
52a11cbf
RH
3630 while (disp != last_disp);
3631
3632 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3633#endif
1ef1bf06 3634 }
1ef1bf06 3635
52a11cbf
RH
3636 /* Indicate the format of the call-site offsets. */
3637#ifdef HAVE_AS_LEB128
3638 format = DW_EH_PE_uleb128;
3639#else
3640 format = DW_EH_PE_udata4;
3641#endif
3642 dw2_asm_output_data (1, format, "call-site format (%s)",
3643 eh_data_format_name (format));
3644
3645#ifdef HAVE_AS_LEB128
3646 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3647 funcdef_number);
3648 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3649 funcdef_number);
3650 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3651 "Call-site table length");
3652 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3653 if (USING_SJLJ_EXCEPTIONS)
3654 sjlj_output_call_site_table ();
3655 else
3656 dw2_output_call_site_table ();
3657 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3658#else
3659 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3660 if (USING_SJLJ_EXCEPTIONS)
3661 sjlj_output_call_site_table ();
3662 else
3663 dw2_output_call_site_table ();
3664#endif
3665
3666 /* ??? Decode and interpret the data for flag_debug_asm. */
3667 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3668 for (i = 0; i < n; ++i)
3669 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3670 (i ? NULL : "Action record table"));
1ef1bf06 3671
52a11cbf
RH
3672 if (have_tt_data)
3673 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
1ef1bf06 3674
52a11cbf
RH
3675 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3676 while (i-- > 0)
1ef1bf06 3677 {
52a11cbf
RH
3678 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3679
3680 if (type == NULL_TREE)
3681 type = integer_zero_node;
3682 else
3683 type = lookup_type_for_runtime (type);
3684
3685 /* ??? Handle ASM_PREFERRED_DATA_FORMAT. */
3686 output_constant (type, GET_MODE_SIZE (ptr_mode));
1ef1bf06 3687 }
52a11cbf
RH
3688
3689#ifdef HAVE_AS_LEB128
3690 if (have_tt_data)
3691 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3692#endif
3693
3694 /* ??? Decode and interpret the data for flag_debug_asm. */
3695 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3696 for (i = 0; i < n; ++i)
3697 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3698 (i ? NULL : "Exception specification table"));
3699
3700 function_section (current_function_decl);
3701
3702 if (USING_SJLJ_EXCEPTIONS)
3703 sjlj_funcdef_number += 1;
1ef1bf06 3704}