]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
ia64.c (group_barrier_needed_p): Don't allow calls and jumps to be bundled together.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
ef58a523 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
c913b6f1 3 1999, 2000, 2001 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING. If not, write to
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA. */
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4956d07c
MS
52#include "rtl.h"
53#include "tree.h"
54#include "flags.h"
4956d07c 55#include "function.h"
4956d07c 56#include "expr.h"
4956d07c 57#include "insn-config.h"
52a11cbf
RH
58#include "except.h"
59#include "integrate.h"
60#include "hard-reg-set.h"
61#include "basic-block.h"
4956d07c 62#include "output.h"
52a11cbf
RH
63#include "dwarf2asm.h"
64#include "dwarf2out.h"
10f0ad3d 65#include "toplev.h"
52a11cbf 66#include "hashtab.h"
2b12ffe0 67#include "intl.h"
87ff9c8e 68#include "ggc.h"
b1474bb7 69#include "tm_p.h"
4956d07c 70
52a11cbf
RH
71
72/* Provide defaults for stuff that may not be defined when using
73 sjlj exceptions. */
74#ifndef EH_RETURN_STACKADJ_RTX
75#define EH_RETURN_STACKADJ_RTX 0
76#endif
77#ifndef EH_RETURN_HANDLER_RTX
78#define EH_RETURN_HANDLER_RTX 0
79#endif
80#ifndef EH_RETURN_DATA_REGNO
81#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
82#endif
83
27a36778 84
52a11cbf
RH
85/* Nonzero means enable synchronous exceptions for non-call instructions. */
86int flag_non_call_exceptions;
27a36778 87
52a11cbf
RH
88/* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90tree protect_cleanup_actions;
27a36778 91
52a11cbf
RH
92/* Return true if type A catches type B. */
93int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
27a36778 94
52a11cbf
RH
95/* Map a type to a runtime object to match type. */
96tree (*lang_eh_runtime_type) PARAMS ((tree));
4956d07c 97
52a11cbf 98/* A list of labels used for exception handlers. */
4956d07c
MS
99rtx exception_handler_labels;
100
52a11cbf
RH
101static int call_site_base;
102static int sjlj_funcdef_number;
103static htab_t type_to_runtime_map;
104
105/* Describe the SjLj_Function_Context structure. */
106static tree sjlj_fc_type_node;
107static int sjlj_fc_call_site_ofs;
108static int sjlj_fc_data_ofs;
109static int sjlj_fc_personality_ofs;
110static int sjlj_fc_lsda_ofs;
111static int sjlj_fc_jbuf_ofs;
112\f
113/* Describes one exception region. */
114struct eh_region
115{
116 /* The immediately surrounding region. */
117 struct eh_region *outer;
956d6950 118
52a11cbf
RH
119 /* The list of immediately contained regions. */
120 struct eh_region *inner;
121 struct eh_region *next_peer;
956d6950 122
52a11cbf
RH
123 /* An identifier for this region. */
124 int region_number;
71038426 125
52a11cbf
RH
126 /* Each region does exactly one thing. */
127 enum eh_region_type
128 {
129 ERT_CLEANUP = 1,
130 ERT_TRY,
131 ERT_CATCH,
132 ERT_ALLOWED_EXCEPTIONS,
133 ERT_MUST_NOT_THROW,
134 ERT_THROW,
135 ERT_FIXUP
136 } type;
137
138 /* Holds the action to perform based on the preceeding type. */
139 union {
140 /* A list of catch blocks, a surrounding try block,
141 and the label for continuing after a catch. */
142 struct {
143 struct eh_region *catch;
144 struct eh_region *last_catch;
145 struct eh_region *prev_try;
146 rtx continue_label;
147 } try;
148
149 /* The list through the catch handlers, the type object
150 matched, and a pointer to the generated code. */
151 struct {
152 struct eh_region *next_catch;
153 struct eh_region *prev_catch;
154 tree type;
155 int filter;
156 } catch;
157
158 /* A tree_list of allowed types. */
159 struct {
160 tree type_list;
161 int filter;
162 } allowed;
163
164 /* The type given by a call to "throw foo();", or discovered
165 for a throw. */
166 struct {
167 tree type;
168 } throw;
169
170 /* Retain the cleanup expression even after expansion so that
171 we can match up fixup regions. */
172 struct {
173 tree exp;
174 } cleanup;
175
176 /* The real region (by expression and by pointer) that fixup code
177 should live in. */
178 struct {
179 tree cleanup_exp;
180 struct eh_region *real_region;
181 } fixup;
182 } u;
183
47c84870
JM
184 /* Entry point for this region's handler before landing pads are built. */
185 rtx label;
52a11cbf 186
47c84870 187 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
188 rtx landing_pad;
189
47c84870 190 /* Entry point for this region's handler from an inner region. */
52a11cbf 191 rtx post_landing_pad;
47c84870
JM
192
193 /* The RESX insn for handing off control to the next outermost handler,
194 if appropriate. */
195 rtx resume;
52a11cbf 196};
71038426 197
52a11cbf
RH
198/* Used to save exception status for each function. */
199struct eh_status
200{
201 /* The tree of all regions for this function. */
202 struct eh_region *region_tree;
e6cfb550 203
52a11cbf
RH
204 /* The same information as an indexable array. */
205 struct eh_region **region_array;
e6cfb550 206
52a11cbf
RH
207 /* The most recently open region. */
208 struct eh_region *cur_region;
e6cfb550 209
52a11cbf
RH
210 /* This is the region for which we are processing catch blocks. */
211 struct eh_region *try_region;
71038426 212
52a11cbf
RH
213 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
214 node is itself a TREE_CHAINed list of handlers for regions that
215 are not yet closed. The TREE_VALUE of each entry contains the
216 handler for the corresponding entry on the ehstack. */
217 tree protect_list;
1e4ceb6f 218
52a11cbf
RH
219 rtx filter;
220 rtx exc_ptr;
4956d07c 221
52a11cbf
RH
222 int built_landing_pads;
223 int last_region_number;
e6cfb550 224
52a11cbf
RH
225 varray_type ttype_data;
226 varray_type ehspec_data;
227 varray_type action_record_data;
6814a8a0 228
52a11cbf
RH
229 struct call_site_record
230 {
231 rtx landing_pad;
232 int action;
233 } *call_site_data;
234 int call_site_data_used;
235 int call_site_data_size;
236
237 rtx ehr_stackadj;
238 rtx ehr_handler;
239 rtx ehr_label;
240
241 rtx sjlj_fc;
242 rtx sjlj_exit_after;
243};
e6cfb550 244
52a11cbf
RH
245\f
246static void mark_eh_region PARAMS ((struct eh_region *));
247
248static int t2r_eq PARAMS ((const PTR,
249 const PTR));
250static hashval_t t2r_hash PARAMS ((const PTR));
251static int t2r_mark_1 PARAMS ((PTR *, PTR));
252static void t2r_mark PARAMS ((PTR));
253static void add_type_for_runtime PARAMS ((tree));
254static tree lookup_type_for_runtime PARAMS ((tree));
255
256static struct eh_region *expand_eh_region_end PARAMS ((void));
257
47c84870
JM
258static rtx get_exception_filter PARAMS ((void));
259
52a11cbf
RH
260static void collect_eh_region_array PARAMS ((void));
261static void resolve_fixup_regions PARAMS ((void));
262static void remove_fixup_regions PARAMS ((void));
263static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
264
265static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
266 struct inline_remap *));
267static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
268 struct eh_region **));
269static int ttypes_filter_eq PARAMS ((const PTR,
270 const PTR));
271static hashval_t ttypes_filter_hash PARAMS ((const PTR));
272static int ehspec_filter_eq PARAMS ((const PTR,
273 const PTR));
274static hashval_t ehspec_filter_hash PARAMS ((const PTR));
275static int add_ttypes_entry PARAMS ((htab_t, tree));
276static int add_ehspec_entry PARAMS ((htab_t, htab_t,
277 tree));
278static void assign_filter_values PARAMS ((void));
279static void build_post_landing_pads PARAMS ((void));
280static void connect_post_landing_pads PARAMS ((void));
281static void dw2_build_landing_pads PARAMS ((void));
282
283struct sjlj_lp_info;
284static bool sjlj_find_directly_reachable_regions
285 PARAMS ((struct sjlj_lp_info *));
286static void sjlj_assign_call_site_values
287 PARAMS ((rtx, struct sjlj_lp_info *));
288static void sjlj_mark_call_sites
289 PARAMS ((struct sjlj_lp_info *));
290static void sjlj_emit_function_enter PARAMS ((rtx));
291static void sjlj_emit_function_exit PARAMS ((void));
292static void sjlj_emit_dispatch_table
293 PARAMS ((rtx, struct sjlj_lp_info *));
294static void sjlj_build_landing_pads PARAMS ((void));
295
296static void remove_exception_handler_label PARAMS ((rtx));
297static void remove_eh_handler PARAMS ((struct eh_region *));
298
299struct reachable_info;
300
301/* The return value of reachable_next_level. */
302enum reachable_code
303{
304 /* The given exception is not processed by the given region. */
305 RNL_NOT_CAUGHT,
306 /* The given exception may need processing by the given region. */
307 RNL_MAYBE_CAUGHT,
308 /* The given exception is completely processed by the given region. */
309 RNL_CAUGHT,
310 /* The given exception is completely processed by the runtime. */
311 RNL_BLOCKED
312};
e6cfb550 313
52a11cbf
RH
314static int check_handled PARAMS ((tree, tree));
315static void add_reachable_handler
316 PARAMS ((struct reachable_info *, struct eh_region *,
317 struct eh_region *));
318static enum reachable_code reachable_next_level
319 PARAMS ((struct eh_region *, tree, struct reachable_info *));
320
321static int action_record_eq PARAMS ((const PTR,
322 const PTR));
323static hashval_t action_record_hash PARAMS ((const PTR));
324static int add_action_record PARAMS ((htab_t, int, int));
325static int collect_one_action_chain PARAMS ((htab_t,
326 struct eh_region *));
327static int add_call_site PARAMS ((rtx, int));
328
329static void push_uleb128 PARAMS ((varray_type *,
330 unsigned int));
331static void push_sleb128 PARAMS ((varray_type *, int));
332static const char *eh_data_format_name PARAMS ((int));
333#ifndef HAVE_AS_LEB128
334static int dw2_size_of_call_site_table PARAMS ((void));
335static int sjlj_size_of_call_site_table PARAMS ((void));
336#endif
337static void dw2_output_call_site_table PARAMS ((void));
338static void sjlj_output_call_site_table PARAMS ((void));
e6cfb550 339
52a11cbf
RH
340\f
341/* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
4956d07c 344
52a11cbf
RH
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
4956d07c 347
52a11cbf
RH
348int
349doing_eh (do_warn)
350 int do_warn;
351{
352 if (! flag_exceptions)
353 {
354 static int warned = 0;
355 if (! warned && do_warn)
356 {
357 error ("exception handling disabled, use -fexceptions to enable");
358 warned = 1;
359 }
360 return 0;
361 }
362 return 1;
4956d07c
MS
363}
364
52a11cbf
RH
365\f
366void
367init_eh ()
4956d07c 368{
52a11cbf
RH
369 ggc_add_rtx_root (&exception_handler_labels, 1);
370 ggc_add_tree_root (&protect_cleanup_actions, 1);
4956d07c 371
52a11cbf
RH
372 if (! flag_exceptions)
373 return;
4956d07c 374
52a11cbf
RH
375 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
376 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
4956d07c 377
52a11cbf
RH
378 /* Create the SjLj_Function_Context structure. This should match
379 the definition in unwind-sjlj.c. */
380 if (USING_SJLJ_EXCEPTIONS)
381 {
382 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 383
52a11cbf
RH
384 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
385 ggc_add_tree_root (&sjlj_fc_type_node, 1);
9a0d1e1b 386
52a11cbf
RH
387 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
388 build_pointer_type (sjlj_fc_type_node));
389 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 390
52a11cbf
RH
391 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
392 integer_type_node);
393 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 394
52a11cbf
RH
395 tmp = build_index_type (build_int_2 (4 - 1, 0));
396 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
397 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
398 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 399
52a11cbf
RH
400 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
401 ptr_type_node);
402 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 403
52a11cbf
RH
404 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
405 ptr_type_node);
406 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 407
52a11cbf
RH
408#ifdef DONT_USE_BUILTIN_SETJMP
409#ifdef JMP_BUF_SIZE
410 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
411#else
412 /* Should be large enough for most systems, if it is not,
413 JMP_BUF_SIZE should be defined with the proper value. It will
414 also tend to be larger than necessary for most systems, a more
415 optimal port will define JMP_BUF_SIZE. */
416 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
417#endif
418#else
419 /* This is 2 for builtin_setjmp, plus whatever the target requires
420 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
421 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
422 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
423#endif
424 tmp = build_index_type (tmp);
425 tmp = build_array_type (ptr_type_node, tmp);
426 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
427#ifdef DONT_USE_BUILTIN_SETJMP
428 /* We don't know what the alignment requirements of the
429 runtime's jmp_buf has. Overestimate. */
430 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
431 DECL_USER_ALIGN (f_jbuf) = 1;
432#endif
433 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
434
435 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
436 TREE_CHAIN (f_prev) = f_cs;
437 TREE_CHAIN (f_cs) = f_data;
438 TREE_CHAIN (f_data) = f_per;
439 TREE_CHAIN (f_per) = f_lsda;
440 TREE_CHAIN (f_lsda) = f_jbuf;
441
442 layout_type (sjlj_fc_type_node);
443
444 /* Cache the interesting field offsets so that we have
445 easy access from rtl. */
446 sjlj_fc_call_site_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
449 sjlj_fc_data_ofs
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
452 sjlj_fc_personality_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
455 sjlj_fc_lsda_ofs
456 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
457 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
458 sjlj_fc_jbuf_ofs
459 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
460 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
461 }
4956d07c
MS
462}
463
52a11cbf
RH
464void
465init_eh_for_function ()
4956d07c 466{
52a11cbf 467 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
4956d07c
MS
468}
469
52a11cbf 470/* Mark EH for GC. */
4956d07c
MS
471
472static void
52a11cbf
RH
473mark_eh_region (region)
474 struct eh_region *region;
4956d07c 475{
52a11cbf
RH
476 if (! region)
477 return;
4956d07c 478
52a11cbf
RH
479 switch (region->type)
480 {
481 case ERT_CLEANUP:
482 ggc_mark_tree (region->u.cleanup.exp);
483 break;
484 case ERT_TRY:
485 ggc_mark_rtx (region->u.try.continue_label);
486 break;
487 case ERT_CATCH:
488 ggc_mark_tree (region->u.catch.type);
489 break;
490 case ERT_ALLOWED_EXCEPTIONS:
491 ggc_mark_tree (region->u.allowed.type_list);
492 break;
493 case ERT_MUST_NOT_THROW:
494 break;
495 case ERT_THROW:
496 ggc_mark_tree (region->u.throw.type);
497 break;
498 case ERT_FIXUP:
499 ggc_mark_tree (region->u.fixup.cleanup_exp);
500 break;
501 default:
502 abort ();
503 }
4956d07c 504
52a11cbf 505 ggc_mark_rtx (region->label);
47c84870 506 ggc_mark_rtx (region->resume);
52a11cbf
RH
507 ggc_mark_rtx (region->landing_pad);
508 ggc_mark_rtx (region->post_landing_pad);
4956d07c
MS
509}
510
52a11cbf
RH
511void
512mark_eh_status (eh)
513 struct eh_status *eh;
4956d07c 514{
52a11cbf
RH
515 int i;
516
517 if (eh == 0)
518 return;
519
520 /* If we've called collect_eh_region_array, use it. Otherwise walk
521 the tree non-recursively. */
522 if (eh->region_array)
523 {
524 for (i = eh->last_region_number; i > 0; --i)
525 {
526 struct eh_region *r = eh->region_array[i];
527 if (r && r->region_number == i)
528 mark_eh_region (r);
529 }
530 }
531 else if (eh->region_tree)
532 {
533 struct eh_region *r = eh->region_tree;
534 while (1)
535 {
536 mark_eh_region (r);
537 if (r->inner)
538 r = r->inner;
539 else if (r->next_peer)
540 r = r->next_peer;
541 else
542 {
543 do {
544 r = r->outer;
545 if (r == NULL)
546 goto tree_done;
547 } while (r->next_peer == NULL);
548 r = r->next_peer;
549 }
550 }
551 tree_done:;
552 }
4956d07c 553
52a11cbf
RH
554 ggc_mark_tree (eh->protect_list);
555 ggc_mark_rtx (eh->filter);
556 ggc_mark_rtx (eh->exc_ptr);
557 ggc_mark_tree_varray (eh->ttype_data);
4956d07c 558
52a11cbf
RH
559 if (eh->call_site_data)
560 {
561 for (i = eh->call_site_data_used - 1; i >= 0; --i)
562 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
563 }
4956d07c 564
52a11cbf
RH
565 ggc_mark_rtx (eh->ehr_stackadj);
566 ggc_mark_rtx (eh->ehr_handler);
567 ggc_mark_rtx (eh->ehr_label);
4956d07c 568
52a11cbf
RH
569 ggc_mark_rtx (eh->sjlj_fc);
570 ggc_mark_rtx (eh->sjlj_exit_after);
4956d07c 571}
9a0d1e1b 572
52a11cbf
RH
573void
574free_eh_status (f)
575 struct function *f;
9a0d1e1b 576{
52a11cbf 577 struct eh_status *eh = f->eh;
250d07b6 578
52a11cbf 579 if (eh->region_array)
250d07b6 580 {
52a11cbf
RH
581 int i;
582 for (i = eh->last_region_number; i > 0; --i)
583 {
584 struct eh_region *r = eh->region_array[i];
585 /* Mind we don't free a region struct more than once. */
586 if (r && r->region_number == i)
587 free (r);
588 }
589 free (eh->region_array);
250d07b6 590 }
52a11cbf 591 else if (eh->region_tree)
250d07b6 592 {
52a11cbf
RH
593 struct eh_region *next, *r = eh->region_tree;
594 while (1)
595 {
596 if (r->inner)
597 r = r->inner;
598 else if (r->next_peer)
599 {
600 next = r->next_peer;
601 free (r);
602 r = next;
603 }
604 else
605 {
606 do {
607 next = r->outer;
608 free (r);
609 r = next;
610 if (r == NULL)
611 goto tree_done;
612 } while (r->next_peer == NULL);
613 next = r->next_peer;
614 free (r);
615 r = next;
616 }
617 }
618 tree_done:;
250d07b6
RH
619 }
620
52a11cbf
RH
621 VARRAY_FREE (eh->ttype_data);
622 VARRAY_FREE (eh->ehspec_data);
623 VARRAY_FREE (eh->action_record_data);
624 if (eh->call_site_data)
625 free (eh->call_site_data);
626
627 free (eh);
628 f->eh = NULL;
9a0d1e1b
AM
629}
630
52a11cbf
RH
631\f
632/* Start an exception handling region. All instructions emitted
633 after this point are considered to be part of the region until
634 expand_eh_region_end is invoked. */
9a0d1e1b 635
52a11cbf
RH
636void
637expand_eh_region_start ()
9a0d1e1b 638{
52a11cbf
RH
639 struct eh_region *new_region;
640 struct eh_region *cur_region;
641 rtx note;
9a0d1e1b 642
52a11cbf
RH
643 if (! doing_eh (0))
644 return;
9a0d1e1b 645
52a11cbf
RH
646 /* Insert a new blank region as a leaf in the tree. */
647 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
648 cur_region = cfun->eh->cur_region;
649 new_region->outer = cur_region;
650 if (cur_region)
9a0d1e1b 651 {
52a11cbf
RH
652 new_region->next_peer = cur_region->inner;
653 cur_region->inner = new_region;
654 }
e6cfb550 655 else
9a0d1e1b 656 {
52a11cbf
RH
657 new_region->next_peer = cfun->eh->region_tree;
658 cfun->eh->region_tree = new_region;
9a0d1e1b 659 }
52a11cbf
RH
660 cfun->eh->cur_region = new_region;
661
662 /* Create a note marking the start of this region. */
663 new_region->region_number = ++cfun->eh->last_region_number;
6496a589 664 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
52a11cbf 665 NOTE_EH_HANDLER (note) = new_region->region_number;
9a0d1e1b
AM
666}
667
52a11cbf 668/* Common code to end a region. Returns the region just ended. */
9f8e6243 669
52a11cbf
RH
670static struct eh_region *
671expand_eh_region_end ()
9f8e6243 672{
52a11cbf
RH
673 struct eh_region *cur_region = cfun->eh->cur_region;
674 rtx note;
675
676 /* Create a nute marking the end of this region. */
6496a589 677 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
52a11cbf
RH
678 NOTE_EH_HANDLER (note) = cur_region->region_number;
679
680 /* Pop. */
681 cfun->eh->cur_region = cur_region->outer;
682
52a11cbf 683 return cur_region;
9f8e6243
AM
684}
685
52a11cbf
RH
686/* End an exception handling region for a cleanup. HANDLER is an
687 expression to expand for the cleanup. */
9c606f69 688
52a11cbf
RH
689void
690expand_eh_region_end_cleanup (handler)
691 tree handler;
9c606f69 692{
52a11cbf
RH
693 struct eh_region *region;
694 rtx around_label;
47c84870 695 rtx data_save[2];
52a11cbf
RH
696
697 if (! doing_eh (0))
698 return;
9c606f69 699
52a11cbf
RH
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
9c606f69 704
52a11cbf
RH
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
9c606f69 707
52a11cbf 708 emit_label (region->label);
9c606f69 709
52a11cbf
RH
710 if (protect_cleanup_actions)
711 expand_eh_region_start ();
9c606f69 712
47c84870
JM
713 /* In case this cleanup involves an inline destructor with a try block in
714 it, we need to save the EH return data registers around it. */
715 data_save[0] = gen_reg_rtx (Pmode);
716 emit_move_insn (data_save[0], get_exception_pointer ());
16842c15 717 data_save[1] = gen_reg_rtx (word_mode);
47c84870
JM
718 emit_move_insn (data_save[1], get_exception_filter ());
719
52a11cbf 720 expand_expr (handler, const0_rtx, VOIDmode, 0);
9c606f69 721
47c84870
JM
722 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
723 emit_move_insn (cfun->eh->filter, data_save[1]);
724
52a11cbf
RH
725 if (protect_cleanup_actions)
726 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
a9f0664a 727
c10f3adf
AH
728 /* We need any stack adjustment complete before the around_label. */
729 do_pending_stack_adjust ();
730
52a11cbf
RH
731 /* We delay the generation of the _Unwind_Resume until we generate
732 landing pads. We emit a marker here so as to get good control
733 flow data in the meantime. */
47c84870
JM
734 region->resume
735 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
52a11cbf
RH
736 emit_barrier ();
737
52a11cbf 738 emit_label (around_label);
9c606f69
AM
739}
740
52a11cbf
RH
741/* End an exception handling region for a try block, and prepares
742 for subsequent calls to expand_start_catch. */
9a0d1e1b 743
52a11cbf
RH
744void
745expand_start_all_catch ()
9a0d1e1b 746{
52a11cbf 747 struct eh_region *region;
9a0d1e1b 748
52a11cbf
RH
749 if (! doing_eh (1))
750 return;
9a0d1e1b 751
52a11cbf
RH
752 region = expand_eh_region_end ();
753 region->type = ERT_TRY;
754 region->u.try.prev_try = cfun->eh->try_region;
755 region->u.try.continue_label = gen_label_rtx ();
9a0d1e1b 756
52a11cbf
RH
757 cfun->eh->try_region = region;
758
759 emit_jump (region->u.try.continue_label);
760}
9a0d1e1b 761
52a11cbf
RH
762/* Begin a catch clause. TYPE is the type caught, or null if this is
763 a catch-all clause. */
9a0d1e1b 764
52a11cbf
RH
765void
766expand_start_catch (type)
767 tree type;
9a0d1e1b 768{
52a11cbf
RH
769 struct eh_region *t, *c, *l;
770
771 if (! doing_eh (0))
772 return;
773
774 if (type)
775 add_type_for_runtime (type);
776 expand_eh_region_start ();
777
778 t = cfun->eh->try_region;
779 c = cfun->eh->cur_region;
780 c->type = ERT_CATCH;
781 c->u.catch.type = type;
782 c->label = gen_label_rtx ();
783
784 l = t->u.try.last_catch;
785 c->u.catch.prev_catch = l;
786 if (l)
787 l->u.catch.next_catch = c;
788 else
789 t->u.try.catch = c;
790 t->u.try.last_catch = c;
9a0d1e1b 791
52a11cbf 792 emit_label (c->label);
9a0d1e1b
AM
793}
794
52a11cbf 795/* End a catch clause. Control will resume after the try/catch block. */
9a0d1e1b 796
52a11cbf
RH
797void
798expand_end_catch ()
9a0d1e1b 799{
52a11cbf
RH
800 struct eh_region *try_region, *catch_region;
801
802 if (! doing_eh (0))
803 return;
804
805 catch_region = expand_eh_region_end ();
806 try_region = cfun->eh->try_region;
807
808 emit_jump (try_region->u.try.continue_label);
9a0d1e1b
AM
809}
810
52a11cbf 811/* End a sequence of catch handlers for a try block. */
9a0d1e1b 812
52a11cbf
RH
813void
814expand_end_all_catch ()
9a0d1e1b 815{
52a11cbf
RH
816 struct eh_region *try_region;
817
818 if (! doing_eh (0))
819 return;
820
821 try_region = cfun->eh->try_region;
822 cfun->eh->try_region = try_region->u.try.prev_try;
823
824 emit_label (try_region->u.try.continue_label);
9a0d1e1b
AM
825}
826
52a11cbf
RH
827/* End an exception region for an exception type filter. ALLOWED is a
828 TREE_LIST of types to be matched by the runtime. FAILURE is an
829 expression to invoke if a mismatch ocurrs. */
9a0d1e1b 830
52a11cbf
RH
831void
832expand_eh_region_end_allowed (allowed, failure)
833 tree allowed, failure;
9a0d1e1b 834{
52a11cbf
RH
835 struct eh_region *region;
836 rtx around_label;
9a0d1e1b 837
52a11cbf
RH
838 if (! doing_eh (0))
839 return;
e6cfb550 840
52a11cbf
RH
841 region = expand_eh_region_end ();
842 region->type = ERT_ALLOWED_EXCEPTIONS;
843 region->u.allowed.type_list = allowed;
844 region->label = gen_label_rtx ();
9a0d1e1b 845
52a11cbf
RH
846 for (; allowed ; allowed = TREE_CHAIN (allowed))
847 add_type_for_runtime (TREE_VALUE (allowed));
9a0d1e1b 848
52a11cbf
RH
849 /* We must emit the call to FAILURE here, so that if this function
850 throws a different exception, that it will be processed by the
851 correct region. */
9a0d1e1b 852
52a11cbf
RH
853 around_label = gen_label_rtx ();
854 emit_jump (around_label);
855
856 emit_label (region->label);
857 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
9a0d1e1b 858
52a11cbf 859 emit_label (around_label);
9a0d1e1b
AM
860}
861
52a11cbf
RH
862/* End an exception region for a must-not-throw filter. FAILURE is an
863 expression invoke if an uncaught exception propagates this far.
e6cfb550 864
52a11cbf
RH
865 This is conceptually identical to expand_eh_region_end_allowed with
866 an empty allowed list (if you passed "std::terminate" instead of
867 "__cxa_call_unexpected"), but they are represented differently in
868 the C++ LSDA. */
6814a8a0 869
52a11cbf
RH
870void
871expand_eh_region_end_must_not_throw (failure)
872 tree failure;
e6cfb550 873{
52a11cbf
RH
874 struct eh_region *region;
875 rtx around_label;
e6cfb550 876
52a11cbf
RH
877 if (! doing_eh (0))
878 return;
6814a8a0 879
52a11cbf
RH
880 region = expand_eh_region_end ();
881 region->type = ERT_MUST_NOT_THROW;
882 region->label = gen_label_rtx ();
e6cfb550 883
52a11cbf
RH
884 /* We must emit the call to FAILURE here, so that if this function
885 throws a different exception, that it will be processed by the
886 correct region. */
6814a8a0 887
52a11cbf
RH
888 around_label = gen_label_rtx ();
889 emit_jump (around_label);
6814a8a0 890
52a11cbf
RH
891 emit_label (region->label);
892 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
6814a8a0 893
52a11cbf 894 emit_label (around_label);
e6cfb550
AM
895}
896
52a11cbf
RH
897/* End an exception region for a throw. No handling goes on here,
898 but it's the easiest way for the front-end to indicate what type
899 is being thrown. */
6814a8a0 900
52a11cbf
RH
901void
902expand_eh_region_end_throw (type)
903 tree type;
e6cfb550 904{
52a11cbf
RH
905 struct eh_region *region;
906
907 if (! doing_eh (0))
908 return;
909
910 region = expand_eh_region_end ();
911 region->type = ERT_THROW;
912 region->u.throw.type = type;
e6cfb550
AM
913}
914
52a11cbf
RH
915/* End a fixup region. Within this region the cleanups for the immediately
916 enclosing region are _not_ run. This is used for goto cleanup to avoid
917 destroying an object twice.
12670d88 918
52a11cbf
RH
919 This would be an extraordinarily simple prospect, were it not for the
920 fact that we don't actually know what the immediately enclosing region
921 is. This surprising fact is because expand_cleanups is currently
922 generating a sequence that it will insert somewhere else. We collect
923 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
4956d07c 924
52a11cbf
RH
925void
926expand_eh_region_end_fixup (handler)
927 tree handler;
4956d07c 928{
52a11cbf
RH
929 struct eh_region *fixup;
930
931 if (! doing_eh (0))
932 return;
933
934 fixup = expand_eh_region_end ();
935 fixup->type = ERT_FIXUP;
936 fixup->u.fixup.cleanup_exp = handler;
4956d07c
MS
937}
938
47c84870 939/* Return an rtl expression for a pointer to the exception object
52a11cbf 940 within a handler. */
4956d07c
MS
941
942rtx
52a11cbf 943get_exception_pointer ()
4956d07c 944{
52a11cbf
RH
945 rtx exc_ptr = cfun->eh->exc_ptr;
946 if (! exc_ptr)
947 {
948 exc_ptr = gen_reg_rtx (Pmode);
949 cfun->eh->exc_ptr = exc_ptr;
950 }
951 return exc_ptr;
952}
4956d07c 953
47c84870
JM
954/* Return an rtl expression for the exception dispatch filter
955 within a handler. */
956
957static rtx
958get_exception_filter ()
959{
960 rtx filter = cfun->eh->filter;
961 if (! filter)
962 {
041c9d5a 963 filter = gen_reg_rtx (word_mode);
47c84870
JM
964 cfun->eh->filter = filter;
965 }
966 return filter;
967}
52a11cbf
RH
968\f
969/* Begin a region that will contain entries created with
970 add_partial_entry. */
4956d07c 971
52a11cbf
RH
972void
973begin_protect_partials ()
974{
975 /* Push room for a new list. */
976 cfun->eh->protect_list
977 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
4956d07c
MS
978}
979
27a36778
MS
980/* Start a new exception region for a region of code that has a
981 cleanup action and push the HANDLER for the region onto
982 protect_list. All of the regions created with add_partial_entry
983 will be ended when end_protect_partials is invoked. */
12670d88
RK
984
985void
986add_partial_entry (handler)
987 tree handler;
988{
989 expand_eh_region_start ();
990
52a11cbf
RH
991 /* ??? This comment was old before the most recent rewrite. We
992 really ought to fix the callers at some point. */
76fc91c7
MM
993 /* For backwards compatibility, we allow callers to omit calls to
994 begin_protect_partials for the outermost region. So, we must
995 explicitly do so here. */
52a11cbf 996 if (!cfun->eh->protect_list)
76fc91c7
MM
997 begin_protect_partials ();
998
999 /* Add this entry to the front of the list. */
52a11cbf
RH
1000 TREE_VALUE (cfun->eh->protect_list)
1001 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
12670d88
RK
1002}
1003
52a11cbf 1004/* End all the pending exception regions on protect_list. */
27a36778 1005
52a11cbf
RH
1006void
1007end_protect_partials ()
27a36778 1008{
52a11cbf 1009 tree t;
638e6ebc 1010
52a11cbf
RH
1011 /* ??? This comment was old before the most recent rewrite. We
1012 really ought to fix the callers at some point. */
1013 /* For backwards compatibility, we allow callers to omit the call to
1014 begin_protect_partials for the outermost region. So,
1015 PROTECT_LIST may be NULL. */
1016 if (!cfun->eh->protect_list)
1017 return;
bb727b5a 1018
52a11cbf
RH
1019 /* Pop the topmost entry. */
1020 t = TREE_VALUE (cfun->eh->protect_list);
1021 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
bb727b5a 1022
52a11cbf
RH
1023 /* End all the exception regions. */
1024 for (; t; t = TREE_CHAIN (t))
1025 expand_eh_region_end_cleanup (TREE_VALUE (t));
154bba13
TT
1026}
1027
52a11cbf
RH
1028\f
1029/* This section is for the exception handling specific optimization pass. */
154bba13 1030
52a11cbf
RH
1031/* Random access the exception region tree. It's just as simple to
1032 collect the regions this way as in expand_eh_region_start, but
1033 without having to realloc memory. */
154bba13 1034
52a11cbf
RH
1035static void
1036collect_eh_region_array ()
154bba13 1037{
52a11cbf 1038 struct eh_region **array, *i;
154bba13 1039
52a11cbf
RH
1040 i = cfun->eh->region_tree;
1041 if (! i)
1042 return;
154bba13 1043
52a11cbf
RH
1044 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1045 cfun->eh->region_array = array;
154bba13 1046
52a11cbf
RH
1047 while (1)
1048 {
1049 array[i->region_number] = i;
1050
1051 /* If there are sub-regions, process them. */
1052 if (i->inner)
1053 i = i->inner;
1054 /* If there are peers, process them. */
1055 else if (i->next_peer)
1056 i = i->next_peer;
1057 /* Otherwise, step back up the tree to the next peer. */
1058 else
1059 {
1060 do {
1061 i = i->outer;
1062 if (i == NULL)
1063 return;
1064 } while (i->next_peer == NULL);
1065 i = i->next_peer;
1066 }
1067 }
27a36778
MS
1068}
1069
52a11cbf
RH
1070static void
1071resolve_fixup_regions ()
27a36778 1072{
52a11cbf 1073 int i, j, n = cfun->eh->last_region_number;
27a36778 1074
52a11cbf
RH
1075 for (i = 1; i <= n; ++i)
1076 {
1077 struct eh_region *fixup = cfun->eh->region_array[i];
1078 struct eh_region *cleanup;
27a36778 1079
52a11cbf
RH
1080 if (! fixup || fixup->type != ERT_FIXUP)
1081 continue;
27a36778 1082
52a11cbf
RH
1083 for (j = 1; j <= n; ++j)
1084 {
1085 cleanup = cfun->eh->region_array[j];
1086 if (cleanup->type == ERT_CLEANUP
1087 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1088 break;
1089 }
1090 if (j > n)
1091 abort ();
27a36778 1092
52a11cbf
RH
1093 fixup->u.fixup.real_region = cleanup->outer;
1094 }
27a36778 1095}
27a36778 1096
52a11cbf
RH
1097/* Now that we've discovered what region actually encloses a fixup,
1098 we can shuffle pointers and remove them from the tree. */
27a36778
MS
1099
1100static void
52a11cbf 1101remove_fixup_regions ()
27a36778 1102{
52a11cbf 1103 int i;
45053eaf
RH
1104 rtx insn, note;
1105 struct eh_region *fixup;
27a36778 1106
45053eaf
RH
1107 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1108 for instructions referencing fixup regions. This is only
1109 strictly necessary for fixup regions with no parent, but
1110 doesn't hurt to do it for all regions. */
1111 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1112 if (INSN_P (insn)
1113 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1114 && INTVAL (XEXP (note, 0)) > 0
1115 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1116 && fixup->type == ERT_FIXUP)
1117 {
1118 if (fixup->u.fixup.real_region)
1119 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1120 else
1121 remove_note (insn, note);
1122 }
1123
1124 /* Remove the fixup regions from the tree. */
52a11cbf
RH
1125 for (i = cfun->eh->last_region_number; i > 0; --i)
1126 {
45053eaf 1127 fixup = cfun->eh->region_array[i];
52a11cbf
RH
1128 if (! fixup)
1129 continue;
27a36778 1130
52a11cbf
RH
1131 /* Allow GC to maybe free some memory. */
1132 if (fixup->type == ERT_CLEANUP)
1133 fixup->u.cleanup.exp = NULL_TREE;
27a36778 1134
52a11cbf
RH
1135 if (fixup->type != ERT_FIXUP)
1136 continue;
27a36778 1137
52a11cbf
RH
1138 if (fixup->inner)
1139 {
1140 struct eh_region *parent, *p, **pp;
27a36778 1141
52a11cbf 1142 parent = fixup->u.fixup.real_region;
27a36778 1143
52a11cbf
RH
1144 /* Fix up the children's parent pointers; find the end of
1145 the list. */
1146 for (p = fixup->inner; ; p = p->next_peer)
1147 {
1148 p->outer = parent;
1149 if (! p->next_peer)
1150 break;
1151 }
27a36778 1152
52a11cbf
RH
1153 /* In the tree of cleanups, only outer-inner ordering matters.
1154 So link the children back in anywhere at the correct level. */
1155 if (parent)
1156 pp = &parent->inner;
1157 else
1158 pp = &cfun->eh->region_tree;
1159 p->next_peer = *pp;
1160 *pp = fixup->inner;
1161 fixup->inner = NULL;
1162 }
27a36778 1163
52a11cbf
RH
1164 remove_eh_handler (fixup);
1165 }
27a36778
MS
1166}
1167
52a11cbf
RH
1168/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1169 can_throw instruction in the region. */
27a36778
MS
1170
1171static void
52a11cbf
RH
1172convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1173 rtx *pinsns;
1174 int *orig_sp;
1175 int cur;
27a36778 1176{
52a11cbf
RH
1177 int *sp = orig_sp;
1178 rtx insn, next;
27a36778 1179
52a11cbf
RH
1180 for (insn = *pinsns; insn ; insn = next)
1181 {
1182 next = NEXT_INSN (insn);
1183 if (GET_CODE (insn) == NOTE)
1184 {
1185 int kind = NOTE_LINE_NUMBER (insn);
1186 if (kind == NOTE_INSN_EH_REGION_BEG
1187 || kind == NOTE_INSN_EH_REGION_END)
1188 {
1189 if (kind == NOTE_INSN_EH_REGION_BEG)
1190 {
1191 struct eh_region *r;
27a36778 1192
52a11cbf
RH
1193 *sp++ = cur;
1194 cur = NOTE_EH_HANDLER (insn);
27a36778 1195
52a11cbf
RH
1196 r = cfun->eh->region_array[cur];
1197 if (r->type == ERT_FIXUP)
1198 {
1199 r = r->u.fixup.real_region;
1200 cur = r ? r->region_number : 0;
1201 }
1202 else if (r->type == ERT_CATCH)
1203 {
1204 r = r->outer;
1205 cur = r ? r->region_number : 0;
1206 }
1207 }
1208 else
1209 cur = *--sp;
1210
1211 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1212 requires extra care to adjust sequence start. */
1213 if (insn == *pinsns)
1214 *pinsns = next;
1215 remove_insn (insn);
1216 continue;
1217 }
1218 }
1219 else if (INSN_P (insn))
1220 {
1221 if (cur > 0
1222 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1223 /* Calls can always potentially throw exceptions, unless
1224 they have a REG_EH_REGION note with a value of 0 or less.
1225 Which should be the only possible kind so far. */
1226 && (GET_CODE (insn) == CALL_INSN
1227 /* If we wanted exceptions for non-call insns, then
1228 any may_trap_p instruction could throw. */
1229 || (flag_non_call_exceptions
1230 && may_trap_p (PATTERN (insn)))))
1231 {
1232 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1233 REG_NOTES (insn));
1234 }
27a36778 1235
52a11cbf
RH
1236 if (GET_CODE (insn) == CALL_INSN
1237 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1238 {
1239 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1240 sp, cur);
1241 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1242 sp, cur);
1243 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1244 sp, cur);
1245 }
1246 }
1247 }
27a36778 1248
52a11cbf
RH
1249 if (sp != orig_sp)
1250 abort ();
1251}
27a36778 1252
52a11cbf
RH
1253void
1254convert_from_eh_region_ranges ()
1255{
1256 int *stack;
1257 rtx insns;
27a36778 1258
52a11cbf
RH
1259 collect_eh_region_array ();
1260 resolve_fixup_regions ();
27a36778 1261
52a11cbf
RH
1262 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1263 insns = get_insns ();
1264 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1265 free (stack);
27a36778 1266
52a11cbf 1267 remove_fixup_regions ();
27a36778
MS
1268}
1269
52a11cbf
RH
1270void
1271find_exception_handler_labels ()
27a36778 1272{
52a11cbf
RH
1273 rtx list = NULL_RTX;
1274 int i;
27a36778 1275
52a11cbf 1276 free_EXPR_LIST_list (&exception_handler_labels);
27a36778 1277
52a11cbf
RH
1278 if (cfun->eh->region_tree == NULL)
1279 return;
27a36778 1280
52a11cbf
RH
1281 for (i = cfun->eh->last_region_number; i > 0; --i)
1282 {
1283 struct eh_region *region = cfun->eh->region_array[i];
1284 rtx lab;
27a36778 1285
52a11cbf
RH
1286 if (! region)
1287 continue;
1288 if (cfun->eh->built_landing_pads)
1289 lab = region->landing_pad;
1290 else
1291 lab = region->label;
27a36778 1292
52a11cbf
RH
1293 if (lab)
1294 list = alloc_EXPR_LIST (0, lab, list);
27a36778
MS
1295 }
1296
52a11cbf
RH
1297 /* For sjlj exceptions, need the return label to remain live until
1298 after landing pad generation. */
1299 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1300 list = alloc_EXPR_LIST (0, return_label, list);
27a36778 1301
52a11cbf 1302 exception_handler_labels = list;
27a36778
MS
1303}
1304
52a11cbf
RH
1305\f
1306static struct eh_region *
1307duplicate_eh_region_1 (o, map)
1308 struct eh_region *o;
1309 struct inline_remap *map;
4956d07c 1310{
52a11cbf
RH
1311 struct eh_region *n
1312 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
4956d07c 1313
52a11cbf
RH
1314 n->region_number = o->region_number + cfun->eh->last_region_number;
1315 n->type = o->type;
4956d07c 1316
52a11cbf
RH
1317 switch (n->type)
1318 {
1319 case ERT_CLEANUP:
1320 case ERT_MUST_NOT_THROW:
1321 break;
27a36778 1322
52a11cbf
RH
1323 case ERT_TRY:
1324 if (o->u.try.continue_label)
1325 n->u.try.continue_label
1326 = get_label_from_map (map,
1327 CODE_LABEL_NUMBER (o->u.try.continue_label));
1328 break;
27a36778 1329
52a11cbf
RH
1330 case ERT_CATCH:
1331 n->u.catch.type = o->u.catch.type;
1332 break;
27a36778 1333
52a11cbf
RH
1334 case ERT_ALLOWED_EXCEPTIONS:
1335 n->u.allowed.type_list = o->u.allowed.type_list;
1336 break;
1337
1338 case ERT_THROW:
1339 n->u.throw.type = o->u.throw.type;
1340
1341 default:
1342 abort ();
1343 }
1344
1345 if (o->label)
1346 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
47c84870 1347 if (o->resume)
e7b9b18e 1348 {
47c84870
JM
1349 n->resume = map->insn_map[INSN_UID (o->resume)];
1350 if (n->resume == NULL)
52a11cbf 1351 abort ();
27a36778 1352 }
4956d07c 1353
52a11cbf 1354 return n;
4956d07c
MS
1355}
1356
52a11cbf
RH
1357static void
1358duplicate_eh_region_2 (o, n_array)
1359 struct eh_region *o;
1360 struct eh_region **n_array;
4c581243 1361{
52a11cbf 1362 struct eh_region *n = n_array[o->region_number];
4c581243 1363
52a11cbf
RH
1364 switch (n->type)
1365 {
1366 case ERT_TRY:
1367 n->u.try.catch = n_array[o->u.try.catch->region_number];
1368 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1369 break;
12670d88 1370
52a11cbf
RH
1371 case ERT_CATCH:
1372 if (o->u.catch.next_catch)
1373 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1374 if (o->u.catch.prev_catch)
1375 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1376 break;
12670d88 1377
52a11cbf
RH
1378 default:
1379 break;
1380 }
4956d07c 1381
52a11cbf
RH
1382 if (o->outer)
1383 n->outer = n_array[o->outer->region_number];
1384 if (o->inner)
1385 n->inner = n_array[o->inner->region_number];
1386 if (o->next_peer)
1387 n->next_peer = n_array[o->next_peer->region_number];
1388}
1389
1390int
1391duplicate_eh_regions (ifun, map)
1392 struct function *ifun;
1393 struct inline_remap *map;
4956d07c 1394{
52a11cbf
RH
1395 int ifun_last_region_number = ifun->eh->last_region_number;
1396 struct eh_region **n_array, *root, *cur;
1397 int i;
4956d07c 1398
52a11cbf
RH
1399 if (ifun_last_region_number == 0)
1400 return 0;
4956d07c 1401
52a11cbf 1402 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
4956d07c 1403
52a11cbf 1404 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1405 {
52a11cbf
RH
1406 cur = ifun->eh->region_array[i];
1407 if (!cur || cur->region_number != i)
1408 continue;
1409 n_array[i] = duplicate_eh_region_1 (cur, map);
27a36778 1410 }
52a11cbf 1411 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1412 {
52a11cbf
RH
1413 cur = ifun->eh->region_array[i];
1414 if (!cur || cur->region_number != i)
1415 continue;
1416 duplicate_eh_region_2 (cur, n_array);
1417 }
27a36778 1418
52a11cbf
RH
1419 root = n_array[ifun->eh->region_tree->region_number];
1420 cur = cfun->eh->cur_region;
1421 if (cur)
1422 {
1423 struct eh_region *p = cur->inner;
1424 if (p)
1425 {
1426 while (p->next_peer)
1427 p = p->next_peer;
1428 p->next_peer = root;
1429 }
1430 else
1431 cur->inner = root;
27a36778 1432
52a11cbf
RH
1433 for (i = 1; i <= ifun_last_region_number; ++i)
1434 if (n_array[i]->outer == NULL)
1435 n_array[i]->outer = cur;
1436 }
1437 else
1438 {
1439 struct eh_region *p = cfun->eh->region_tree;
1440 if (p)
1441 {
1442 while (p->next_peer)
1443 p = p->next_peer;
1444 p->next_peer = root;
1445 }
1446 else
1447 cfun->eh->region_tree = root;
27a36778 1448 }
1e4ceb6f 1449
52a11cbf 1450 free (n_array);
1e4ceb6f 1451
52a11cbf
RH
1452 i = cfun->eh->last_region_number;
1453 cfun->eh->last_region_number = i + ifun_last_region_number;
1454 return i;
4956d07c
MS
1455}
1456
52a11cbf
RH
1457\f
1458/* ??? Move from tree.c to tree.h. */
1459#define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
9762d48d 1460
52a11cbf
RH
1461static int
1462t2r_eq (pentry, pdata)
1463 const PTR pentry;
1464 const PTR pdata;
9762d48d 1465{
52a11cbf
RH
1466 tree entry = (tree) pentry;
1467 tree data = (tree) pdata;
9762d48d 1468
52a11cbf 1469 return TREE_PURPOSE (entry) == data;
9762d48d
JM
1470}
1471
52a11cbf
RH
1472static hashval_t
1473t2r_hash (pentry)
1474 const PTR pentry;
1475{
1476 tree entry = (tree) pentry;
1477 return TYPE_HASH (TREE_PURPOSE (entry));
1478}
9762d48d 1479
52a11cbf
RH
1480static int
1481t2r_mark_1 (slot, data)
1482 PTR *slot;
1483 PTR data ATTRIBUTE_UNUSED;
9762d48d 1484{
52a11cbf
RH
1485 tree contents = (tree) *slot;
1486 ggc_mark_tree (contents);
1487 return 1;
1488}
9762d48d 1489
52a11cbf
RH
1490static void
1491t2r_mark (addr)
1492 PTR addr;
1493{
1494 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1495}
9762d48d 1496
52a11cbf
RH
1497static void
1498add_type_for_runtime (type)
1499 tree type;
1500{
1501 tree *slot;
9762d48d 1502
52a11cbf
RH
1503 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1504 TYPE_HASH (type), INSERT);
1505 if (*slot == NULL)
1506 {
1507 tree runtime = (*lang_eh_runtime_type) (type);
1508 *slot = tree_cons (type, runtime, NULL_TREE);
1509 }
1510}
1511
1512static tree
1513lookup_type_for_runtime (type)
1514 tree type;
1515{
1516 tree *slot;
b37f006b 1517
52a11cbf
RH
1518 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1519 TYPE_HASH (type), NO_INSERT);
b37f006b 1520
52a11cbf
RH
1521 /* We should have always inserrted the data earlier. */
1522 return TREE_VALUE (*slot);
1523}
9762d48d 1524
52a11cbf
RH
1525\f
1526/* Represent an entry in @TTypes for either catch actions
1527 or exception filter actions. */
1528struct ttypes_filter
1529{
1530 tree t;
1531 int filter;
1532};
b37f006b 1533
52a11cbf
RH
1534/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1535 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 1536
52a11cbf
RH
1537static int
1538ttypes_filter_eq (pentry, pdata)
1539 const PTR pentry;
1540 const PTR pdata;
1541{
1542 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1543 tree data = (tree) pdata;
b37f006b 1544
52a11cbf 1545 return entry->t == data;
9762d48d
JM
1546}
1547
52a11cbf
RH
1548static hashval_t
1549ttypes_filter_hash (pentry)
1550 const PTR pentry;
1551{
1552 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1553 return TYPE_HASH (entry->t);
1554}
4956d07c 1555
52a11cbf
RH
1556/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1557 exception specification list we are thinking about adding. */
1558/* ??? Currently we use the type lists in the order given. Someone
1559 should put these in some canonical order. */
1560
1561static int
1562ehspec_filter_eq (pentry, pdata)
1563 const PTR pentry;
1564 const PTR pdata;
4956d07c 1565{
52a11cbf
RH
1566 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1567 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1568
1569 return type_list_equal (entry->t, data->t);
4956d07c
MS
1570}
1571
52a11cbf 1572/* Hash function for exception specification lists. */
4956d07c 1573
52a11cbf
RH
1574static hashval_t
1575ehspec_filter_hash (pentry)
1576 const PTR pentry;
4956d07c 1577{
52a11cbf
RH
1578 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1579 hashval_t h = 0;
1580 tree list;
1581
1582 for (list = entry->t; list ; list = TREE_CHAIN (list))
1583 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1584 return h;
4956d07c
MS
1585}
1586
52a11cbf
RH
1587/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1588 up the search. Return the filter value to be used. */
4956d07c 1589
52a11cbf
RH
1590static int
1591add_ttypes_entry (ttypes_hash, type)
1592 htab_t ttypes_hash;
1593 tree type;
4956d07c 1594{
52a11cbf 1595 struct ttypes_filter **slot, *n;
4956d07c 1596
52a11cbf
RH
1597 slot = (struct ttypes_filter **)
1598 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1599
1600 if ((n = *slot) == NULL)
4956d07c 1601 {
52a11cbf 1602 /* Filter value is a 1 based table index. */
12670d88 1603
52a11cbf
RH
1604 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1605 n->t = type;
1606 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1607 *slot = n;
1608
1609 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 1610 }
52a11cbf
RH
1611
1612 return n->filter;
4956d07c
MS
1613}
1614
52a11cbf
RH
1615/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1616 to speed up the search. Return the filter value to be used. */
1617
1618static int
1619add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1620 htab_t ehspec_hash;
1621 htab_t ttypes_hash;
1622 tree list;
12670d88 1623{
52a11cbf
RH
1624 struct ttypes_filter **slot, *n;
1625 struct ttypes_filter dummy;
12670d88 1626
52a11cbf
RH
1627 dummy.t = list;
1628 slot = (struct ttypes_filter **)
1629 htab_find_slot (ehspec_hash, &dummy, INSERT);
1630
1631 if ((n = *slot) == NULL)
1632 {
1633 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1634
1635 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1636 n->t = list;
1637 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1638 *slot = n;
1639
1640 /* Look up each type in the list and encode its filter
1641 value as a uleb128. Terminate the list with 0. */
1642 for (; list ; list = TREE_CHAIN (list))
1643 push_uleb128 (&cfun->eh->ehspec_data,
1644 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1645 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1646 }
1647
1648 return n->filter;
12670d88
RK
1649}
1650
52a11cbf
RH
1651/* Generate the action filter values to be used for CATCH and
1652 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1653 we use lots of landing pads, and so every type or list can share
1654 the same filter value, which saves table space. */
1655
1656static void
1657assign_filter_values ()
9a0d1e1b 1658{
52a11cbf
RH
1659 int i;
1660 htab_t ttypes, ehspec;
9a9deafc 1661
52a11cbf
RH
1662 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1663 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1664
52a11cbf
RH
1665 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1666 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1667
52a11cbf
RH
1668 for (i = cfun->eh->last_region_number; i > 0; --i)
1669 {
1670 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1671
52a11cbf
RH
1672 /* Mind we don't process a region more than once. */
1673 if (!r || r->region_number != i)
1674 continue;
9a0d1e1b 1675
52a11cbf
RH
1676 switch (r->type)
1677 {
1678 case ERT_CATCH:
1679 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1680 break;
bf71cd2e 1681
52a11cbf
RH
1682 case ERT_ALLOWED_EXCEPTIONS:
1683 r->u.allowed.filter
1684 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1685 break;
bf71cd2e 1686
52a11cbf
RH
1687 default:
1688 break;
1689 }
1690 }
1691
1692 htab_delete (ttypes);
1693 htab_delete (ehspec);
1694}
1695
1696static void
1697build_post_landing_pads ()
1698{
1699 int i;
bf71cd2e 1700
52a11cbf 1701 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1702 {
52a11cbf
RH
1703 struct eh_region *region = cfun->eh->region_array[i];
1704 rtx seq;
bf71cd2e 1705
52a11cbf
RH
1706 /* Mind we don't process a region more than once. */
1707 if (!region || region->region_number != i)
1708 continue;
1709
1710 switch (region->type)
987009bf 1711 {
52a11cbf
RH
1712 case ERT_TRY:
1713 /* ??? Collect the set of all non-overlapping catch handlers
1714 all the way up the chain until blocked by a cleanup. */
1715 /* ??? Outer try regions can share landing pads with inner
1716 try regions if the types are completely non-overlapping,
1717 and there are no interveaning cleanups. */
bf71cd2e 1718
52a11cbf 1719 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1720
52a11cbf 1721 start_sequence ();
bf71cd2e 1722
52a11cbf 1723 emit_label (region->post_landing_pad);
bf71cd2e 1724
52a11cbf
RH
1725 /* ??? It is mighty inconvenient to call back into the
1726 switch statement generation code in expand_end_case.
1727 Rapid prototyping sez a sequence of ifs. */
1728 {
1729 struct eh_region *c;
1730 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1731 {
1732 /* ??? _Unwind_ForcedUnwind wants no match here. */
1733 if (c->u.catch.type == NULL)
1734 emit_jump (c->label);
1735 else
1736 emit_cmp_and_jump_insns (cfun->eh->filter,
1737 GEN_INT (c->u.catch.filter),
1738 EQ, NULL_RTX, word_mode,
1739 0, 0, c->label);
1740 }
1741 }
bf71cd2e 1742
47c84870
JM
1743 /* We delay the generation of the _Unwind_Resume until we generate
1744 landing pads. We emit a marker here so as to get good control
1745 flow data in the meantime. */
1746 region->resume
1747 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1748 emit_barrier ();
1749
52a11cbf
RH
1750 seq = get_insns ();
1751 end_sequence ();
e6cfb550 1752
47c84870 1753 emit_insns_before (seq, region->u.try.catch->label);
52a11cbf 1754 break;
bf71cd2e 1755
52a11cbf
RH
1756 case ERT_ALLOWED_EXCEPTIONS:
1757 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1758
52a11cbf 1759 start_sequence ();
f54a7f6f 1760
52a11cbf 1761 emit_label (region->post_landing_pad);
f54a7f6f 1762
52a11cbf
RH
1763 emit_cmp_and_jump_insns (cfun->eh->filter,
1764 GEN_INT (region->u.allowed.filter),
1765 EQ, NULL_RTX, word_mode, 0, 0,
1766 region->label);
f54a7f6f 1767
47c84870
JM
1768 /* We delay the generation of the _Unwind_Resume until we generate
1769 landing pads. We emit a marker here so as to get good control
1770 flow data in the meantime. */
1771 region->resume
1772 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1773 emit_barrier ();
1774
52a11cbf
RH
1775 seq = get_insns ();
1776 end_sequence ();
1777
47c84870 1778 emit_insns_before (seq, region->label);
52a11cbf 1779 break;
f54a7f6f 1780
52a11cbf
RH
1781 case ERT_CLEANUP:
1782 case ERT_MUST_NOT_THROW:
1783 region->post_landing_pad = region->label;
1784 break;
1785
1786 case ERT_CATCH:
1787 case ERT_THROW:
1788 /* Nothing to do. */
1789 break;
1790
1791 default:
1792 abort ();
1793 }
1794 }
1795}
1e4ceb6f 1796
47c84870
JM
1797/* Replace RESX patterns with jumps to the next handler if any, or calls to
1798 _Unwind_Resume otherwise. */
1799
1e4ceb6f 1800static void
52a11cbf 1801connect_post_landing_pads ()
1e4ceb6f 1802{
52a11cbf 1803 int i;
76fc91c7 1804
52a11cbf
RH
1805 for (i = cfun->eh->last_region_number; i > 0; --i)
1806 {
1807 struct eh_region *region = cfun->eh->region_array[i];
1808 struct eh_region *outer;
47c84870 1809 rtx seq;
1e4ceb6f 1810
52a11cbf
RH
1811 /* Mind we don't process a region more than once. */
1812 if (!region || region->region_number != i)
1813 continue;
1e4ceb6f 1814
47c84870
JM
1815 /* If there is no RESX, or it has been deleted by flow, there's
1816 nothing to fix up. */
1817 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 1818 continue;
76fc91c7 1819
52a11cbf
RH
1820 /* Search for another landing pad in this function. */
1821 for (outer = region->outer; outer ; outer = outer->outer)
1822 if (outer->post_landing_pad)
1823 break;
1e4ceb6f 1824
52a11cbf 1825 start_sequence ();
12670d88 1826
52a11cbf
RH
1827 if (outer)
1828 emit_jump (outer->post_landing_pad);
1829 else
9555a122 1830 emit_library_call (unwind_resume_libfunc, LCT_THROW,
52a11cbf 1831 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
4956d07c 1832
52a11cbf
RH
1833 seq = get_insns ();
1834 end_sequence ();
47c84870
JM
1835 emit_insns_before (seq, region->resume);
1836
1837 /* Leave the RESX to be deleted by flow. */
52a11cbf
RH
1838 }
1839}
1840
1841\f
1842static void
1843dw2_build_landing_pads ()
4956d07c 1844{
52a11cbf 1845 int i, j;
4956d07c 1846
52a11cbf
RH
1847 for (i = cfun->eh->last_region_number; i > 0; --i)
1848 {
1849 struct eh_region *region = cfun->eh->region_array[i];
1850 rtx seq;
4956d07c 1851
52a11cbf
RH
1852 /* Mind we don't process a region more than once. */
1853 if (!region || region->region_number != i)
1854 continue;
1418bb67 1855
52a11cbf
RH
1856 if (region->type != ERT_CLEANUP
1857 && region->type != ERT_TRY
1858 && region->type != ERT_ALLOWED_EXCEPTIONS)
1859 continue;
12670d88 1860
52a11cbf 1861 start_sequence ();
4956d07c 1862
52a11cbf
RH
1863 region->landing_pad = gen_label_rtx ();
1864 emit_label (region->landing_pad);
4956d07c 1865
52a11cbf
RH
1866#ifdef HAVE_exception_receiver
1867 if (HAVE_exception_receiver)
1868 emit_insn (gen_exception_receiver ());
1869 else
1870#endif
1871#ifdef HAVE_nonlocal_goto_receiver
1872 if (HAVE_nonlocal_goto_receiver)
1873 emit_insn (gen_nonlocal_goto_receiver ());
1874 else
1875#endif
1876 { /* Nothing */ }
4956d07c 1877
52a11cbf
RH
1878 /* If the eh_return data registers are call-saved, then we
1879 won't have considered them clobbered from the call that
1880 threw. Kill them now. */
1881 for (j = 0; ; ++j)
1882 {
1883 unsigned r = EH_RETURN_DATA_REGNO (j);
1884 if (r == INVALID_REGNUM)
1885 break;
1886 if (! call_used_regs[r])
1887 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1888 }
e701eb4d 1889
52a11cbf
RH
1890 emit_move_insn (cfun->eh->exc_ptr,
1891 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1892 emit_move_insn (cfun->eh->filter,
1893 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 1894
52a11cbf
RH
1895 seq = get_insns ();
1896 end_sequence ();
5816cb14 1897
52a11cbf
RH
1898 emit_insns_before (seq, region->post_landing_pad);
1899 }
4956d07c
MS
1900}
1901
52a11cbf
RH
1902\f
1903struct sjlj_lp_info
1904{
1905 int directly_reachable;
1906 int action_index;
1907 int dispatch_index;
1908 int call_site_index;
1909};
4956d07c 1910
52a11cbf
RH
1911static bool
1912sjlj_find_directly_reachable_regions (lp_info)
1913 struct sjlj_lp_info *lp_info;
4956d07c 1914{
52a11cbf
RH
1915 rtx insn;
1916 bool found_one = false;
4956d07c 1917
52a11cbf
RH
1918 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1919 {
1920 struct eh_region *region;
1921 tree type_thrown;
1922 rtx note;
4956d07c 1923
52a11cbf
RH
1924 if (! INSN_P (insn))
1925 continue;
0d3453df 1926
52a11cbf
RH
1927 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1928 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1929 continue;
5dfa7520 1930
52a11cbf 1931 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 1932
52a11cbf
RH
1933 type_thrown = NULL_TREE;
1934 if (region->type == ERT_THROW)
1935 {
1936 type_thrown = region->u.throw.type;
1937 region = region->outer;
1938 }
12670d88 1939
52a11cbf
RH
1940 /* Find the first containing region that might handle the exception.
1941 That's the landing pad to which we will transfer control. */
1942 for (; region; region = region->outer)
1943 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1944 break;
4956d07c 1945
52a11cbf
RH
1946 if (region)
1947 {
1948 lp_info[region->region_number].directly_reachable = 1;
1949 found_one = true;
1950 }
1951 }
4956d07c 1952
52a11cbf
RH
1953 return found_one;
1954}
e701eb4d
JM
1955
1956static void
52a11cbf
RH
1957sjlj_assign_call_site_values (dispatch_label, lp_info)
1958 rtx dispatch_label;
1959 struct sjlj_lp_info *lp_info;
e701eb4d 1960{
52a11cbf
RH
1961 htab_t ar_hash;
1962 int i, index;
1963
1964 /* First task: build the action table. */
1965
1966 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1967 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1968
1969 for (i = cfun->eh->last_region_number; i > 0; --i)
1970 if (lp_info[i].directly_reachable)
e6cfb550 1971 {
52a11cbf
RH
1972 struct eh_region *r = cfun->eh->region_array[i];
1973 r->landing_pad = dispatch_label;
1974 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1975 if (lp_info[i].action_index != -1)
1976 cfun->uses_eh_lsda = 1;
e6cfb550 1977 }
e701eb4d 1978
52a11cbf 1979 htab_delete (ar_hash);
76fc91c7 1980
52a11cbf
RH
1981 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1982 landing pad label for the region. For sjlj though, there is one
1983 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 1984
52a11cbf
RH
1985 A region receives a dispatch index if it is directly reachable
1986 and requires in-function processing. Regions that share post-landing
1987 pads may share dispatch indicies. */
1988 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1989 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 1990
52a11cbf
RH
1991 index = 0;
1992 for (i = cfun->eh->last_region_number; i > 0; --i)
1993 if (lp_info[i].directly_reachable
1994 && lp_info[i].action_index >= 0)
1995 lp_info[i].dispatch_index = index++;
76fc91c7 1996
52a11cbf
RH
1997 /* Finally: assign call-site values. If dwarf2 terms, this would be
1998 the region number assigned by convert_to_eh_region_ranges, but
1999 handles no-action and must-not-throw differently. */
76fc91c7 2000
52a11cbf
RH
2001 call_site_base = 1;
2002 for (i = cfun->eh->last_region_number; i > 0; --i)
2003 if (lp_info[i].directly_reachable)
2004 {
2005 int action = lp_info[i].action_index;
2006
2007 /* Map must-not-throw to otherwise unused call-site index 0. */
2008 if (action == -2)
2009 index = 0;
2010 /* Map no-action to otherwise unused call-site index -1. */
2011 else if (action == -1)
2012 index = -1;
2013 /* Otherwise, look it up in the table. */
2014 else
2015 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2016
2017 lp_info[i].call_site_index = index;
2018 }
4956d07c 2019}
27a36778 2020
52a11cbf
RH
2021static void
2022sjlj_mark_call_sites (lp_info)
2023 struct sjlj_lp_info *lp_info;
27a36778 2024{
52a11cbf
RH
2025 int last_call_site = -2;
2026 rtx insn, mem;
2027
2028 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2029 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2030 sjlj_fc_call_site_ofs));
2031
2032 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 2033 {
52a11cbf
RH
2034 struct eh_region *region;
2035 int this_call_site;
2036 rtx note, before, p;
27a36778 2037
52a11cbf
RH
2038 /* Reset value tracking at extended basic block boundaries. */
2039 if (GET_CODE (insn) == CODE_LABEL)
2040 last_call_site = -2;
27a36778 2041
52a11cbf
RH
2042 if (! INSN_P (insn))
2043 continue;
27a36778 2044
52a11cbf
RH
2045 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2046 if (!note)
2047 {
2048 /* Calls (and trapping insns) without notes are outside any
2049 exception handling region in this function. Mark them as
2050 no action. */
2051 if (GET_CODE (insn) == CALL_INSN
2052 || (flag_non_call_exceptions
2053 && may_trap_p (PATTERN (insn))))
2054 this_call_site = -1;
2055 else
2056 continue;
2057 }
2058 else
2059 {
2060 /* Calls that are known to not throw need not be marked. */
2061 if (INTVAL (XEXP (note, 0)) <= 0)
2062 continue;
27a36778 2063
52a11cbf
RH
2064 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2065 this_call_site = lp_info[region->region_number].call_site_index;
2066 }
27a36778 2067
52a11cbf
RH
2068 if (this_call_site == last_call_site)
2069 continue;
2070
2071 /* Don't separate a call from it's argument loads. */
2072 before = insn;
2073 if (GET_CODE (insn) == CALL_INSN)
2074 {
2075 HARD_REG_SET parm_regs;
2076 int nparm_regs;
2077
2078 /* Since different machines initialize their parameter registers
2079 in different orders, assume nothing. Collect the set of all
2080 parameter registers. */
2081 CLEAR_HARD_REG_SET (parm_regs);
2082 nparm_regs = 0;
2083 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2084 if (GET_CODE (XEXP (p, 0)) == USE
2085 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2086 {
2087 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2088 abort ();
2089
cb0bb87d
JL
2090 /* We only care about registers which can hold function
2091 arguments. */
2092 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2093 continue;
2094
52a11cbf
RH
2095 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2096 nparm_regs++;
2097 }
2098
2099 /* Search backward for the first set of a register in this set. */
2100 while (nparm_regs)
2101 {
2102 before = PREV_INSN (before);
12670d88 2103
52a11cbf
RH
2104 /* Given that we've done no other optimizations yet,
2105 the arguments should be immediately available. */
2106 if (GET_CODE (before) == CODE_LABEL)
2107 abort ();
12670d88 2108
52a11cbf
RH
2109 p = single_set (before);
2110 if (p && GET_CODE (SET_DEST (p)) == REG
2111 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2112 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2113 {
2114 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2115 nparm_regs--;
2116 }
2117 }
2118 }
4956d07c 2119
52a11cbf
RH
2120 start_sequence ();
2121 emit_move_insn (mem, GEN_INT (this_call_site));
2122 p = get_insns ();
2123 end_sequence ();
12670d88 2124
52a11cbf
RH
2125 emit_insns_before (p, before);
2126 last_call_site = this_call_site;
2127 }
2128}
4956d07c 2129
52a11cbf
RH
2130/* Construct the SjLj_Function_Context. */
2131
2132static void
2133sjlj_emit_function_enter (dispatch_label)
2134 rtx dispatch_label;
4956d07c 2135{
52a11cbf 2136 rtx fn_begin, fc, mem, seq;
4956d07c 2137
52a11cbf 2138 fc = cfun->eh->sjlj_fc;
4956d07c 2139
52a11cbf 2140 start_sequence ();
8a4451aa 2141
52a11cbf
RH
2142 mem = change_address (fc, Pmode,
2143 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2144 emit_move_insn (mem, eh_personality_libfunc);
2145
2146 mem = change_address (fc, Pmode,
2147 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2148 if (cfun->uses_eh_lsda)
2149 {
2150 char buf[20];
2151 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2152 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
8a4451aa 2153 }
52a11cbf
RH
2154 else
2155 emit_move_insn (mem, const0_rtx);
2156
2157#ifdef DONT_USE_BUILTIN_SETJMP
2158 {
2159 rtx x, note;
2160 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2161 TYPE_MODE (integer_type_node), 1,
2162 plus_constant (XEXP (fc, 0),
2163 sjlj_fc_jbuf_ofs), Pmode);
2164
2165 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2166 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2167
2168 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2169 TYPE_MODE (integer_type_node), 0, 0,
2170 dispatch_label);
2171 }
2172#else
2173 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2174 dispatch_label);
4956d07c 2175#endif
4956d07c 2176
52a11cbf
RH
2177 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2178 1, XEXP (fc, 0), Pmode);
12670d88 2179
52a11cbf
RH
2180 seq = get_insns ();
2181 end_sequence ();
4956d07c 2182
52a11cbf
RH
2183 /* ??? Instead of doing this at the beginning of the function,
2184 do this in a block that is at loop level 0 and dominates all
2185 can_throw_internal instructions. */
4956d07c 2186
52a11cbf
RH
2187 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2188 if (GET_CODE (fn_begin) == NOTE
2189 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2190 break;
2191 emit_insns_after (seq, fn_begin);
4956d07c
MS
2192}
2193
52a11cbf
RH
2194/* Call back from expand_function_end to know where we should put
2195 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 2196
52a11cbf
RH
2197void
2198sjlj_emit_function_exit_after (after)
2199 rtx after;
2200{
2201 cfun->eh->sjlj_exit_after = after;
2202}
4956d07c
MS
2203
2204static void
52a11cbf
RH
2205sjlj_emit_function_exit ()
2206{
2207 rtx seq;
4956d07c 2208
52a11cbf 2209 start_sequence ();
ce152ef8 2210
52a11cbf
RH
2211 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2212 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 2213
52a11cbf
RH
2214 seq = get_insns ();
2215 end_sequence ();
4956d07c 2216
52a11cbf
RH
2217 /* ??? Really this can be done in any block at loop level 0 that
2218 post-dominates all can_throw_internal instructions. This is
2219 the last possible moment. */
9a0d1e1b 2220
52a11cbf 2221 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
9a0d1e1b
AM
2222}
2223
52a11cbf
RH
2224static void
2225sjlj_emit_dispatch_table (dispatch_label, lp_info)
2226 rtx dispatch_label;
2227 struct sjlj_lp_info *lp_info;
ce152ef8 2228{
52a11cbf
RH
2229 int i, first_reachable;
2230 rtx mem, dispatch, seq, fc;
2231
2232 fc = cfun->eh->sjlj_fc;
2233
2234 start_sequence ();
2235
2236 emit_label (dispatch_label);
ce152ef8 2237
52a11cbf
RH
2238#ifndef DONT_USE_BUILTIN_SETJMP
2239 expand_builtin_setjmp_receiver (dispatch_label);
2240#endif
2241
2242 /* Load up dispatch index, exc_ptr and filter values from the
2243 function context. */
2244 mem = change_address (fc, TYPE_MODE (integer_type_node),
2245 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2246 dispatch = copy_to_reg (mem);
2247
2248 mem = change_address (fc, word_mode,
2249 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2250 if (word_mode != Pmode)
2251 {
2252#ifdef POINTERS_EXTEND_UNSIGNED
2253 mem = convert_memory_address (Pmode, mem);
2254#else
2255 mem = convert_to_mode (Pmode, mem, 0);
2256#endif
2257 }
2258 emit_move_insn (cfun->eh->exc_ptr, mem);
2259
2260 mem = change_address (fc, word_mode,
2261 plus_constant (XEXP (fc, 0),
2262 sjlj_fc_data_ofs + UNITS_PER_WORD));
2263 emit_move_insn (cfun->eh->filter, mem);
4956d07c 2264
52a11cbf
RH
2265 /* Jump to one of the directly reachable regions. */
2266 /* ??? This really ought to be using a switch statement. */
2267
2268 first_reachable = 0;
2269 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 2270 {
52a11cbf
RH
2271 if (! lp_info[i].directly_reachable
2272 || lp_info[i].action_index < 0)
2273 continue;
a1622f83 2274
52a11cbf
RH
2275 if (! first_reachable)
2276 {
2277 first_reachable = i;
2278 continue;
2279 }
e6cfb550 2280
52a11cbf
RH
2281 emit_cmp_and_jump_insns (dispatch,
2282 GEN_INT (lp_info[i].dispatch_index), EQ,
2283 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2284 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 2285 }
9a0d1e1b 2286
52a11cbf
RH
2287 seq = get_insns ();
2288 end_sequence ();
4956d07c 2289
52a11cbf
RH
2290 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2291 ->post_landing_pad));
ce152ef8
AM
2292}
2293
52a11cbf
RH
2294static void
2295sjlj_build_landing_pads ()
ce152ef8 2296{
52a11cbf 2297 struct sjlj_lp_info *lp_info;
ce152ef8 2298
52a11cbf
RH
2299 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2300 sizeof (struct sjlj_lp_info));
ce152ef8 2301
52a11cbf
RH
2302 if (sjlj_find_directly_reachable_regions (lp_info))
2303 {
2304 rtx dispatch_label = gen_label_rtx ();
ce152ef8 2305
52a11cbf
RH
2306 cfun->eh->sjlj_fc
2307 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2308 int_size_in_bytes (sjlj_fc_type_node),
2309 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 2310
52a11cbf
RH
2311 sjlj_assign_call_site_values (dispatch_label, lp_info);
2312 sjlj_mark_call_sites (lp_info);
a1622f83 2313
52a11cbf
RH
2314 sjlj_emit_function_enter (dispatch_label);
2315 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2316 sjlj_emit_function_exit ();
2317 }
a1622f83 2318
52a11cbf 2319 free (lp_info);
4956d07c 2320}
ce152ef8 2321
ce152ef8 2322void
52a11cbf 2323finish_eh_generation ()
ce152ef8 2324{
52a11cbf
RH
2325 /* Nothing to do if no regions created. */
2326 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
2327 return;
2328
52a11cbf
RH
2329 /* The object here is to provide find_basic_blocks with detailed
2330 information (via reachable_handlers) on how exception control
2331 flows within the function. In this first pass, we can include
2332 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2333 regions, and hope that it will be useful in deleting unreachable
2334 handlers. Subsequently, we will generate landing pads which will
2335 connect many of the handlers, and then type information will not
2336 be effective. Still, this is a win over previous implementations. */
2337
2338 jump_optimize_minimal (get_insns ());
2339 find_basic_blocks (get_insns (), max_reg_num (), 0);
2340 cleanup_cfg ();
2341
2342 /* These registers are used by the landing pads. Make sure they
2343 have been generated. */
2344 get_exception_pointer ();
47c84870 2345 get_exception_filter ();
52a11cbf
RH
2346
2347 /* Construct the landing pads. */
2348
2349 assign_filter_values ();
2350 build_post_landing_pads ();
2351 connect_post_landing_pads ();
2352 if (USING_SJLJ_EXCEPTIONS)
2353 sjlj_build_landing_pads ();
2354 else
2355 dw2_build_landing_pads ();
ce152ef8 2356
52a11cbf 2357 cfun->eh->built_landing_pads = 1;
ce152ef8 2358
52a11cbf
RH
2359 /* We've totally changed the CFG. Start over. */
2360 find_exception_handler_labels ();
2361 jump_optimize_minimal (get_insns ());
2362 find_basic_blocks (get_insns (), max_reg_num (), 0);
2363 cleanup_cfg ();
ce152ef8 2364}
4956d07c 2365\f
52a11cbf 2366/* This section handles removing dead code for flow. */
154bba13 2367
52a11cbf 2368/* Remove LABEL from the exception_handler_labels list. */
154bba13 2369
52a11cbf
RH
2370static void
2371remove_exception_handler_label (label)
2372 rtx label;
154bba13 2373{
52a11cbf 2374 rtx *pl, l;
100d81d4 2375
52a11cbf
RH
2376 for (pl = &exception_handler_labels, l = *pl;
2377 XEXP (l, 0) != label;
2378 pl = &XEXP (l, 1), l = *pl)
2379 continue;
154bba13 2380
52a11cbf
RH
2381 *pl = XEXP (l, 1);
2382 free_EXPR_LIST_node (l);
154bba13
TT
2383}
2384
52a11cbf 2385/* Splice REGION from the region tree etc. */
12670d88 2386
f19c9228 2387static void
52a11cbf
RH
2388remove_eh_handler (region)
2389 struct eh_region *region;
4956d07c 2390{
52a11cbf
RH
2391 struct eh_region **pp, *p;
2392 rtx lab;
2393 int i;
4956d07c 2394
52a11cbf
RH
2395 /* For the benefit of efficiently handling REG_EH_REGION notes,
2396 replace this region in the region array with its containing
2397 region. Note that previous region deletions may result in
2398 multiple copies of this region in the array, so we have to
2399 search the whole thing. */
2400 for (i = cfun->eh->last_region_number; i > 0; --i)
2401 if (cfun->eh->region_array[i] == region)
2402 cfun->eh->region_array[i] = region->outer;
2403
2404 if (cfun->eh->built_landing_pads)
2405 lab = region->landing_pad;
2406 else
2407 lab = region->label;
2408 if (lab)
2409 remove_exception_handler_label (lab);
2410
2411 if (region->outer)
2412 pp = &region->outer->inner;
2413 else
2414 pp = &cfun->eh->region_tree;
2415 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2416 continue;
12670d88 2417
52a11cbf 2418 if (region->inner)
4956d07c 2419 {
52a11cbf
RH
2420 for (p = region->inner; p->next_peer ; p = p->next_peer)
2421 p->outer = region->outer;
2422 p->next_peer = region->next_peer;
2423 p->outer = region->outer;
2424 *pp = region->inner;
4956d07c 2425 }
52a11cbf
RH
2426 else
2427 *pp = region->next_peer;
f19c9228 2428
52a11cbf
RH
2429 if (region->type == ERT_CATCH)
2430 {
2431 struct eh_region *try, *next, *prev;
f19c9228 2432
52a11cbf
RH
2433 for (try = region->next_peer;
2434 try->type == ERT_CATCH;
2435 try = try->next_peer)
2436 continue;
2437 if (try->type != ERT_TRY)
2438 abort ();
f19c9228 2439
52a11cbf
RH
2440 next = region->u.catch.next_catch;
2441 prev = region->u.catch.prev_catch;
f19c9228 2442
52a11cbf
RH
2443 if (next)
2444 next->u.catch.prev_catch = prev;
2445 else
2446 try->u.try.last_catch = prev;
2447 if (prev)
2448 prev->u.catch.next_catch = next;
2449 else
2450 {
2451 try->u.try.catch = next;
2452 if (! next)
2453 remove_eh_handler (try);
2454 }
2455 }
988cea7d 2456
52a11cbf 2457 free (region);
4956d07c
MS
2458}
2459
52a11cbf
RH
2460/* LABEL heads a basic block that is about to be deleted. If this
2461 label corresponds to an exception region, we may be able to
2462 delete the region. */
4956d07c
MS
2463
2464void
52a11cbf
RH
2465maybe_remove_eh_handler (label)
2466 rtx label;
4956d07c 2467{
52a11cbf 2468 int i;
4956d07c 2469
52a11cbf
RH
2470 /* ??? After generating landing pads, it's not so simple to determine
2471 if the region data is completely unused. One must examine the
2472 landing pad and the post landing pad, and whether an inner try block
2473 is referencing the catch handlers directly. */
2474 if (cfun->eh->built_landing_pads)
4956d07c
MS
2475 return;
2476
52a11cbf 2477 for (i = cfun->eh->last_region_number; i > 0; --i)
87ff9c8e 2478 {
52a11cbf
RH
2479 struct eh_region *region = cfun->eh->region_array[i];
2480 if (region && region->label == label)
87ff9c8e 2481 {
52a11cbf
RH
2482 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2483 because there is no path to the fallback call to terminate.
2484 But the region continues to affect call-site data until there
2485 are no more contained calls, which we don't see here. */
2486 if (region->type == ERT_MUST_NOT_THROW)
2487 {
2488 remove_exception_handler_label (region->label);
2489 region->label = NULL_RTX;
2490 }
2491 else
2492 remove_eh_handler (region);
2493 break;
87ff9c8e 2494 }
87ff9c8e
RH
2495 }
2496}
2497
52a11cbf
RH
2498\f
2499/* This section describes CFG exception edges for flow. */
87ff9c8e 2500
52a11cbf
RH
2501/* For communicating between calls to reachable_next_level. */
2502struct reachable_info
87ff9c8e 2503{
52a11cbf
RH
2504 tree types_caught;
2505 tree types_allowed;
2506 rtx handlers;
2507};
87ff9c8e 2508
52a11cbf
RH
2509/* A subroutine of reachable_next_level. Return true if TYPE, or a
2510 base class of TYPE, is in HANDLED. */
87ff9c8e 2511
52a11cbf
RH
2512static int
2513check_handled (handled, type)
2514 tree handled, type;
87ff9c8e 2515{
52a11cbf
RH
2516 tree t;
2517
2518 /* We can check for exact matches without front-end help. */
2519 if (! lang_eh_type_covers)
f54a7f6f 2520 {
52a11cbf
RH
2521 for (t = handled; t ; t = TREE_CHAIN (t))
2522 if (TREE_VALUE (t) == type)
2523 return 1;
2524 }
2525 else
2526 {
2527 for (t = handled; t ; t = TREE_CHAIN (t))
2528 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2529 return 1;
f54a7f6f 2530 }
52a11cbf
RH
2531
2532 return 0;
87ff9c8e
RH
2533}
2534
52a11cbf
RH
2535/* A subroutine of reachable_next_level. If we are collecting a list
2536 of handlers, add one. After landing pad generation, reference
2537 it instead of the handlers themselves. Further, the handlers are
2538 all wired together, so by referencing one, we've got them all.
2539 Before landing pad generation we reference each handler individually.
2540
2541 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2542
2543static void
52a11cbf
RH
2544add_reachable_handler (info, lp_region, region)
2545 struct reachable_info *info;
2546 struct eh_region *lp_region;
2547 struct eh_region *region;
87ff9c8e 2548{
52a11cbf
RH
2549 if (! info)
2550 return;
2551
2552 if (cfun->eh->built_landing_pads)
87ff9c8e 2553 {
52a11cbf
RH
2554 if (! info->handlers)
2555 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
87ff9c8e 2556 }
52a11cbf
RH
2557 else
2558 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
87ff9c8e
RH
2559}
2560
52a11cbf
RH
2561/* Process one level of exception regions for reachability.
2562 If TYPE_THROWN is non-null, then it is the *exact* type being
2563 propagated. If INFO is non-null, then collect handler labels
2564 and caught/allowed type information between invocations. */
87ff9c8e 2565
52a11cbf
RH
2566static enum reachable_code
2567reachable_next_level (region, type_thrown, info)
2568 struct eh_region *region;
2569 tree type_thrown;
2570 struct reachable_info *info;
87ff9c8e 2571{
52a11cbf
RH
2572 switch (region->type)
2573 {
2574 case ERT_CLEANUP:
2575 /* Before landing-pad generation, we model control flow
2576 directly to the individual handlers. In this way we can
2577 see that catch handler types may shadow one another. */
2578 add_reachable_handler (info, region, region);
2579 return RNL_MAYBE_CAUGHT;
2580
2581 case ERT_TRY:
2582 {
2583 struct eh_region *c;
2584 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2585
52a11cbf
RH
2586 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2587 {
2588 /* A catch-all handler ends the search. */
2589 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2590 to be run as well. */
2591 if (c->u.catch.type == NULL)
2592 {
2593 add_reachable_handler (info, region, c);
2594 return RNL_CAUGHT;
2595 }
2596
2597 if (type_thrown)
2598 {
2599 /* If we have a type match, end the search. */
2600 if (c->u.catch.type == type_thrown
2601 || (lang_eh_type_covers
2602 && (*lang_eh_type_covers) (c->u.catch.type,
2603 type_thrown)))
2604 {
2605 add_reachable_handler (info, region, c);
2606 return RNL_CAUGHT;
2607 }
2608
2609 /* If we have definitive information of a match failure,
2610 the catch won't trigger. */
2611 if (lang_eh_type_covers)
2612 return RNL_NOT_CAUGHT;
2613 }
2614
2615 if (! info)
2616 ret = RNL_MAYBE_CAUGHT;
2617
2618 /* A type must not have been previously caught. */
2619 else if (! check_handled (info->types_caught, c->u.catch.type))
2620 {
2621 add_reachable_handler (info, region, c);
2622 info->types_caught = tree_cons (NULL, c->u.catch.type,
2623 info->types_caught);
2624
2625 /* ??? If the catch type is a base class of every allowed
2626 type, then we know we can stop the search. */
2627 ret = RNL_MAYBE_CAUGHT;
2628 }
2629 }
87ff9c8e 2630
52a11cbf
RH
2631 return ret;
2632 }
87ff9c8e 2633
52a11cbf
RH
2634 case ERT_ALLOWED_EXCEPTIONS:
2635 /* An empty list of types definitely ends the search. */
2636 if (region->u.allowed.type_list == NULL_TREE)
2637 {
2638 add_reachable_handler (info, region, region);
2639 return RNL_CAUGHT;
2640 }
87ff9c8e 2641
52a11cbf
RH
2642 /* Collect a list of lists of allowed types for use in detecting
2643 when a catch may be transformed into a catch-all. */
2644 if (info)
2645 info->types_allowed = tree_cons (NULL_TREE,
2646 region->u.allowed.type_list,
2647 info->types_allowed);
2648
2649 /* If we have definitive information about the type heirarchy,
2650 then we can tell if the thrown type will pass through the
2651 filter. */
2652 if (type_thrown && lang_eh_type_covers)
2653 {
2654 if (check_handled (region->u.allowed.type_list, type_thrown))
2655 return RNL_NOT_CAUGHT;
2656 else
2657 {
2658 add_reachable_handler (info, region, region);
2659 return RNL_CAUGHT;
2660 }
2661 }
21cd906e 2662
52a11cbf
RH
2663 add_reachable_handler (info, region, region);
2664 return RNL_MAYBE_CAUGHT;
21cd906e 2665
52a11cbf
RH
2666 case ERT_CATCH:
2667 /* Catch regions are handled by their controling try region. */
2668 return RNL_NOT_CAUGHT;
21cd906e 2669
52a11cbf
RH
2670 case ERT_MUST_NOT_THROW:
2671 /* Here we end our search, since no exceptions may propagate.
2672 If we've touched down at some landing pad previous, then the
2673 explicit function call we generated may be used. Otherwise
2674 the call is made by the runtime. */
2675 if (info && info->handlers)
21cd906e 2676 {
52a11cbf
RH
2677 add_reachable_handler (info, region, region);
2678 return RNL_CAUGHT;
21cd906e 2679 }
52a11cbf
RH
2680 else
2681 return RNL_BLOCKED;
21cd906e 2682
52a11cbf
RH
2683 case ERT_THROW:
2684 case ERT_FIXUP:
2685 /* Shouldn't see these here. */
2686 break;
21cd906e 2687 }
fa51b01b 2688
52a11cbf 2689 abort ();
fa51b01b 2690}
4956d07c 2691
52a11cbf
RH
2692/* Retrieve a list of labels of exception handlers which can be
2693 reached by a given insn. */
4956d07c 2694
52a11cbf
RH
2695rtx
2696reachable_handlers (insn)
4956d07c
MS
2697 rtx insn;
2698{
52a11cbf
RH
2699 struct reachable_info info;
2700 struct eh_region *region;
2701 tree type_thrown;
2702 int region_number;
fb13d4d0 2703
52a11cbf
RH
2704 if (GET_CODE (insn) == JUMP_INSN
2705 && GET_CODE (PATTERN (insn)) == RESX)
2706 region_number = XINT (PATTERN (insn), 0);
2707 else
1ef1bf06
AM
2708 {
2709 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
52a11cbf
RH
2710 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2711 return NULL;
2712 region_number = INTVAL (XEXP (note, 0));
27a36778 2713 }
4956d07c 2714
52a11cbf 2715 memset (&info, 0, sizeof (info));
4956d07c 2716
52a11cbf 2717 region = cfun->eh->region_array[region_number];
fb13d4d0 2718
52a11cbf
RH
2719 type_thrown = NULL_TREE;
2720 if (region->type == ERT_THROW)
2721 {
2722 type_thrown = region->u.throw.type;
2723 region = region->outer;
2724 }
47c84870
JM
2725 else if (GET_CODE (insn) == JUMP_INSN
2726 && GET_CODE (PATTERN (insn)) == RESX)
2727 region = region->outer;
fac62ecf 2728
52a11cbf
RH
2729 for (; region; region = region->outer)
2730 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2731 break;
fb13d4d0 2732
52a11cbf 2733 return info.handlers;
fb13d4d0
JM
2734}
2735
52a11cbf
RH
2736/* Determine if the given INSN can throw an exception that is caught
2737 within the function. */
4956d07c 2738
52a11cbf
RH
2739bool
2740can_throw_internal (insn)
4956d07c 2741 rtx insn;
4956d07c 2742{
52a11cbf
RH
2743 struct eh_region *region;
2744 tree type_thrown;
2745 rtx note;
e6cfb550 2746
52a11cbf
RH
2747 if (! INSN_P (insn))
2748 return false;
12670d88 2749
52a11cbf
RH
2750 if (GET_CODE (insn) == INSN
2751 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2752 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 2753
52a11cbf
RH
2754 if (GET_CODE (insn) == CALL_INSN
2755 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2756 {
52a11cbf
RH
2757 int i;
2758 for (i = 0; i < 3; ++i)
4956d07c 2759 {
52a11cbf
RH
2760 rtx sub = XEXP (PATTERN (insn), i);
2761 for (; sub ; sub = NEXT_INSN (sub))
2762 if (can_throw_internal (sub))
2763 return true;
4956d07c 2764 }
52a11cbf 2765 return false;
4956d07c
MS
2766 }
2767
52a11cbf
RH
2768 /* Every insn that might throw has an EH_REGION note. */
2769 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2770 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2771 return false;
4956d07c 2772
52a11cbf 2773 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
4956d07c 2774
52a11cbf
RH
2775 type_thrown = NULL_TREE;
2776 if (region->type == ERT_THROW)
2777 {
2778 type_thrown = region->u.throw.type;
2779 region = region->outer;
2780 }
4956d07c 2781
52a11cbf
RH
2782 /* If this exception is ignored by each and every containing region,
2783 then control passes straight out. The runtime may handle some
2784 regions, which also do not require processing internally. */
2785 for (; region; region = region->outer)
2786 {
2787 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2788 if (how == RNL_BLOCKED)
2789 return false;
2790 if (how != RNL_NOT_CAUGHT)
2791 return true;
4956d07c 2792 }
4956d07c 2793
52a11cbf
RH
2794 return false;
2795}
4956d07c 2796
52a11cbf
RH
2797/* Determine if the given INSN can throw an exception that is
2798 visible outside the function. */
4956d07c 2799
52a11cbf
RH
2800bool
2801can_throw_external (insn)
2802 rtx insn;
4956d07c 2803{
52a11cbf
RH
2804 struct eh_region *region;
2805 tree type_thrown;
2806 rtx note;
4956d07c 2807
52a11cbf
RH
2808 if (! INSN_P (insn))
2809 return false;
2810
2811 if (GET_CODE (insn) == INSN
2812 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2813 insn = XVECEXP (PATTERN (insn), 0, 0);
2814
2815 if (GET_CODE (insn) == CALL_INSN
2816 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2817 {
52a11cbf
RH
2818 int i;
2819 for (i = 0; i < 3; ++i)
4956d07c 2820 {
52a11cbf
RH
2821 rtx sub = XEXP (PATTERN (insn), i);
2822 for (; sub ; sub = NEXT_INSN (sub))
2823 if (can_throw_external (sub))
2824 return true;
4956d07c 2825 }
52a11cbf 2826 return false;
4956d07c 2827 }
52a11cbf
RH
2828
2829 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2830 if (!note)
2831 {
2832 /* Calls (and trapping insns) without notes are outside any
2833 exception handling region in this function. We have to
2834 assume it might throw. Given that the front end and middle
2835 ends mark known NOTHROW functions, this isn't so wildly
2836 inaccurate. */
2837 return (GET_CODE (insn) == CALL_INSN
2838 || (flag_non_call_exceptions
2839 && may_trap_p (PATTERN (insn))));
2840 }
2841 if (INTVAL (XEXP (note, 0)) <= 0)
2842 return false;
2843
2844 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2845
2846 type_thrown = NULL_TREE;
2847 if (region->type == ERT_THROW)
2848 {
2849 type_thrown = region->u.throw.type;
2850 region = region->outer;
2851 }
2852
2853 /* If the exception is caught or blocked by any containing region,
2854 then it is not seen by any calling function. */
2855 for (; region ; region = region->outer)
2856 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2857 return false;
2858
2859 return true;
4956d07c 2860}
1ef1bf06 2861
52a11cbf 2862/* True if nothing in this function can throw outside this function. */
6814a8a0 2863
52a11cbf
RH
2864bool
2865nothrow_function_p ()
1ef1bf06
AM
2866{
2867 rtx insn;
1ef1bf06 2868
52a11cbf
RH
2869 if (! flag_exceptions)
2870 return true;
1ef1bf06 2871
1ef1bf06 2872 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf
RH
2873 if (can_throw_external (insn))
2874 return false;
2875 for (insn = current_function_epilogue_delay_list; insn;
2876 insn = XEXP (insn, 1))
2877 if (can_throw_external (insn))
2878 return false;
4da896b2 2879
52a11cbf 2880 return true;
1ef1bf06 2881}
52a11cbf 2882
ca55abae 2883\f
52a11cbf 2884/* Various hooks for unwind library. */
ca55abae
JM
2885
2886/* Do any necessary initialization to access arbitrary stack frames.
2887 On the SPARC, this means flushing the register windows. */
2888
2889void
2890expand_builtin_unwind_init ()
2891{
2892 /* Set this so all the registers get saved in our frame; we need to be
2893 able to copy the saved values for any registers from frames we unwind. */
2894 current_function_has_nonlocal_label = 1;
2895
2896#ifdef SETUP_FRAME_ADDRESSES
2897 SETUP_FRAME_ADDRESSES ();
2898#endif
2899}
2900
52a11cbf
RH
2901rtx
2902expand_builtin_eh_return_data_regno (arglist)
2903 tree arglist;
2904{
2905 tree which = TREE_VALUE (arglist);
2906 unsigned HOST_WIDE_INT iwhich;
2907
2908 if (TREE_CODE (which) != INTEGER_CST)
2909 {
2910 error ("argument of `__builtin_eh_return_regno' must be constant");
2911 return constm1_rtx;
2912 }
2913
2914 iwhich = tree_low_cst (which, 1);
2915 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2916 if (iwhich == INVALID_REGNUM)
2917 return constm1_rtx;
2918
2919#ifdef DWARF_FRAME_REGNUM
2920 iwhich = DWARF_FRAME_REGNUM (iwhich);
2921#else
2922 iwhich = DBX_REGISTER_NUMBER (iwhich);
2923#endif
2924
2925 return GEN_INT (iwhich);
2926}
2927
ca55abae
JM
2928/* Given a value extracted from the return address register or stack slot,
2929 return the actual address encoded in that value. */
2930
2931rtx
2932expand_builtin_extract_return_addr (addr_tree)
2933 tree addr_tree;
2934{
2935 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf
RH
2936
2937 /* First mask out any unwanted bits. */
2938#ifdef MASK_RETURN_ADDR
2939 expand_and (addr, MASK_RETURN_ADDR, addr);
2940#endif
2941
2942 /* Then adjust to find the real return address. */
2943#if defined (RETURN_ADDR_OFFSET)
2944 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2945#endif
2946
2947 return addr;
ca55abae
JM
2948}
2949
2950/* Given an actual address in addr_tree, do any necessary encoding
2951 and return the value to be stored in the return address register or
2952 stack slot so the epilogue will return to that address. */
2953
2954rtx
2955expand_builtin_frob_return_addr (addr_tree)
2956 tree addr_tree;
2957{
2958 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf 2959
ca55abae 2960#ifdef RETURN_ADDR_OFFSET
52a11cbf 2961 addr = force_reg (Pmode, addr);
ca55abae
JM
2962 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2963#endif
52a11cbf 2964
ca55abae
JM
2965 return addr;
2966}
2967
52a11cbf
RH
2968/* Set up the epilogue with the magic bits we'll need to return to the
2969 exception handler. */
ca55abae 2970
52a11cbf
RH
2971void
2972expand_builtin_eh_return (stackadj_tree, handler_tree)
2973 tree stackadj_tree, handler_tree;
ca55abae 2974{
52a11cbf 2975 rtx stackadj, handler;
ca55abae 2976
52a11cbf
RH
2977 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2978 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
ca55abae 2979
52a11cbf
RH
2980 if (! cfun->eh->ehr_label)
2981 {
2982 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2983 cfun->eh->ehr_handler = copy_to_reg (handler);
2984 cfun->eh->ehr_label = gen_label_rtx ();
2985 }
ca55abae 2986 else
ca55abae 2987 {
52a11cbf
RH
2988 if (stackadj != cfun->eh->ehr_stackadj)
2989 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2990 if (handler != cfun->eh->ehr_handler)
2991 emit_move_insn (cfun->eh->ehr_handler, handler);
ca55abae
JM
2992 }
2993
52a11cbf 2994 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
2995}
2996
71038426
RH
2997void
2998expand_eh_return ()
ca55abae 2999{
52a11cbf 3000 rtx sa, ra, around_label;
ca55abae 3001
52a11cbf 3002 if (! cfun->eh->ehr_label)
71038426 3003 return;
ca55abae 3004
52a11cbf
RH
3005 sa = EH_RETURN_STACKADJ_RTX;
3006 if (! sa)
71038426 3007 {
52a11cbf 3008 error ("__builtin_eh_return not supported on this target");
71038426
RH
3009 return;
3010 }
ca55abae 3011
52a11cbf 3012 current_function_calls_eh_return = 1;
ca55abae 3013
52a11cbf
RH
3014 around_label = gen_label_rtx ();
3015 emit_move_insn (sa, const0_rtx);
3016 emit_jump (around_label);
ca55abae 3017
52a11cbf
RH
3018 emit_label (cfun->eh->ehr_label);
3019 clobber_return_register ();
ca55abae 3020
52a11cbf
RH
3021#ifdef HAVE_eh_return
3022 if (HAVE_eh_return)
3023 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3024 else
71038426 3025#endif
52a11cbf
RH
3026 {
3027 ra = EH_RETURN_HANDLER_RTX;
3028 if (! ra)
3029 {
3030 error ("__builtin_eh_return not supported on this target");
3031 ra = gen_reg_rtx (Pmode);
3032 }
71038426 3033
52a11cbf
RH
3034 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3035 emit_move_insn (ra, cfun->eh->ehr_handler);
3036 }
71038426 3037
52a11cbf 3038 emit_label (around_label);
71038426 3039}
77d33a84 3040\f
52a11cbf
RH
3041struct action_record
3042{
3043 int offset;
3044 int filter;
3045 int next;
3046};
77d33a84 3047
52a11cbf
RH
3048static int
3049action_record_eq (pentry, pdata)
3050 const PTR pentry;
3051 const PTR pdata;
3052{
3053 const struct action_record *entry = (const struct action_record *) pentry;
3054 const struct action_record *data = (const struct action_record *) pdata;
3055 return entry->filter == data->filter && entry->next == data->next;
3056}
77d33a84 3057
52a11cbf
RH
3058static hashval_t
3059action_record_hash (pentry)
3060 const PTR pentry;
3061{
3062 const struct action_record *entry = (const struct action_record *) pentry;
3063 return entry->next * 1009 + entry->filter;
3064}
77d33a84 3065
52a11cbf
RH
3066static int
3067add_action_record (ar_hash, filter, next)
3068 htab_t ar_hash;
3069 int filter, next;
77d33a84 3070{
52a11cbf
RH
3071 struct action_record **slot, *new, tmp;
3072
3073 tmp.filter = filter;
3074 tmp.next = next;
3075 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 3076
52a11cbf 3077 if ((new = *slot) == NULL)
77d33a84 3078 {
52a11cbf
RH
3079 new = (struct action_record *) xmalloc (sizeof (*new));
3080 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3081 new->filter = filter;
3082 new->next = next;
3083 *slot = new;
3084
3085 /* The filter value goes in untouched. The link to the next
3086 record is a "self-relative" byte offset, or zero to indicate
3087 that there is no next record. So convert the absolute 1 based
3088 indicies we've been carrying around into a displacement. */
3089
3090 push_sleb128 (&cfun->eh->action_record_data, filter);
3091 if (next)
3092 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3093 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 3094 }
77d33a84 3095
52a11cbf
RH
3096 return new->offset;
3097}
77d33a84 3098
52a11cbf
RH
3099static int
3100collect_one_action_chain (ar_hash, region)
3101 htab_t ar_hash;
3102 struct eh_region *region;
77d33a84 3103{
52a11cbf
RH
3104 struct eh_region *c;
3105 int next;
77d33a84 3106
52a11cbf
RH
3107 /* If we've reached the top of the region chain, then we have
3108 no actions, and require no landing pad. */
3109 if (region == NULL)
3110 return -1;
3111
3112 switch (region->type)
77d33a84 3113 {
52a11cbf
RH
3114 case ERT_CLEANUP:
3115 /* A cleanup adds a zero filter to the beginning of the chain, but
3116 there are special cases to look out for. If there are *only*
3117 cleanups along a path, then it compresses to a zero action.
3118 Further, if there are multiple cleanups along a path, we only
3119 need to represent one of them, as that is enough to trigger
3120 entry to the landing pad at runtime. */
3121 next = collect_one_action_chain (ar_hash, region->outer);
3122 if (next <= 0)
3123 return 0;
3124 for (c = region->outer; c ; c = c->outer)
3125 if (c->type == ERT_CLEANUP)
3126 return next;
3127 return add_action_record (ar_hash, 0, next);
3128
3129 case ERT_TRY:
3130 /* Process the associated catch regions in reverse order.
3131 If there's a catch-all handler, then we don't need to
3132 search outer regions. Use a magic -3 value to record
3133 that we havn't done the outer search. */
3134 next = -3;
3135 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3136 {
3137 if (c->u.catch.type == NULL)
3138 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3139 else
3140 {
3141 if (next == -3)
3142 {
3143 next = collect_one_action_chain (ar_hash, region->outer);
3144 if (next < 0)
3145 next = 0;
3146 }
3147 next = add_action_record (ar_hash, c->u.catch.filter, next);
3148 }
3149 }
3150 return next;
3151
3152 case ERT_ALLOWED_EXCEPTIONS:
3153 /* An exception specification adds its filter to the
3154 beginning of the chain. */
3155 next = collect_one_action_chain (ar_hash, region->outer);
3156 return add_action_record (ar_hash, region->u.allowed.filter,
3157 next < 0 ? 0 : next);
3158
3159 case ERT_MUST_NOT_THROW:
3160 /* A must-not-throw region with no inner handlers or cleanups
3161 requires no call-site entry. Note that this differs from
3162 the no handler or cleanup case in that we do require an lsda
3163 to be generated. Return a magic -2 value to record this. */
3164 return -2;
3165
3166 case ERT_CATCH:
3167 case ERT_THROW:
3168 /* CATCH regions are handled in TRY above. THROW regions are
3169 for optimization information only and produce no output. */
3170 return collect_one_action_chain (ar_hash, region->outer);
3171
3172 default:
3173 abort ();
77d33a84
AM
3174 }
3175}
3176
52a11cbf
RH
3177static int
3178add_call_site (landing_pad, action)
3179 rtx landing_pad;
3180 int action;
77d33a84 3181{
52a11cbf
RH
3182 struct call_site_record *data = cfun->eh->call_site_data;
3183 int used = cfun->eh->call_site_data_used;
3184 int size = cfun->eh->call_site_data_size;
77d33a84 3185
52a11cbf
RH
3186 if (used >= size)
3187 {
3188 size = (size ? size * 2 : 64);
3189 data = (struct call_site_record *)
3190 xrealloc (data, sizeof (*data) * size);
3191 cfun->eh->call_site_data = data;
3192 cfun->eh->call_site_data_size = size;
3193 }
77d33a84 3194
52a11cbf
RH
3195 data[used].landing_pad = landing_pad;
3196 data[used].action = action;
77d33a84 3197
52a11cbf 3198 cfun->eh->call_site_data_used = used + 1;
77d33a84 3199
52a11cbf 3200 return used + call_site_base;
77d33a84
AM
3201}
3202
52a11cbf
RH
3203/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3204 The new note numbers will not refer to region numbers, but
3205 instead to call site entries. */
77d33a84 3206
52a11cbf
RH
3207void
3208convert_to_eh_region_ranges ()
77d33a84 3209{
52a11cbf
RH
3210 rtx insn, iter, note;
3211 htab_t ar_hash;
3212 int last_action = -3;
3213 rtx last_action_insn = NULL_RTX;
3214 rtx last_landing_pad = NULL_RTX;
3215 rtx first_no_action_insn = NULL_RTX;
3216 int call_site;
77d33a84 3217
52a11cbf
RH
3218 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3219 return;
77d33a84 3220
52a11cbf 3221 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 3222
52a11cbf 3223 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 3224
52a11cbf
RH
3225 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3226 if (INSN_P (iter))
3227 {
3228 struct eh_region *region;
3229 int this_action;
3230 rtx this_landing_pad;
77d33a84 3231
52a11cbf
RH
3232 insn = iter;
3233 if (GET_CODE (insn) == INSN
3234 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3235 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 3236
52a11cbf
RH
3237 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3238 if (!note)
3239 {
3240 if (! (GET_CODE (insn) == CALL_INSN
3241 || (flag_non_call_exceptions
3242 && may_trap_p (PATTERN (insn)))))
3243 continue;
3244 this_action = -1;
3245 region = NULL;
3246 }
3247 else
3248 {
3249 if (INTVAL (XEXP (note, 0)) <= 0)
3250 continue;
3251 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3252 this_action = collect_one_action_chain (ar_hash, region);
3253 }
3254
3255 /* Existence of catch handlers, or must-not-throw regions
3256 implies that an lsda is needed (even if empty). */
3257 if (this_action != -1)
3258 cfun->uses_eh_lsda = 1;
3259
3260 /* Delay creation of region notes for no-action regions
3261 until we're sure that an lsda will be required. */
3262 else if (last_action == -3)
3263 {
3264 first_no_action_insn = iter;
3265 last_action = -1;
3266 }
1ef1bf06 3267
52a11cbf
RH
3268 /* Cleanups and handlers may share action chains but not
3269 landing pads. Collect the landing pad for this region. */
3270 if (this_action >= 0)
3271 {
3272 struct eh_region *o;
3273 for (o = region; ! o->landing_pad ; o = o->outer)
3274 continue;
3275 this_landing_pad = o->landing_pad;
3276 }
3277 else
3278 this_landing_pad = NULL_RTX;
1ef1bf06 3279
52a11cbf
RH
3280 /* Differing actions or landing pads implies a change in call-site
3281 info, which implies some EH_REGION note should be emitted. */
3282 if (last_action != this_action
3283 || last_landing_pad != this_landing_pad)
3284 {
3285 /* If we'd not seen a previous action (-3) or the previous
3286 action was must-not-throw (-2), then we do not need an
3287 end note. */
3288 if (last_action >= -1)
3289 {
3290 /* If we delayed the creation of the begin, do it now. */
3291 if (first_no_action_insn)
3292 {
3293 call_site = add_call_site (NULL_RTX, 0);
3294 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3295 first_no_action_insn);
3296 NOTE_EH_HANDLER (note) = call_site;
3297 first_no_action_insn = NULL_RTX;
3298 }
3299
3300 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3301 last_action_insn);
3302 NOTE_EH_HANDLER (note) = call_site;
3303 }
3304
3305 /* If the new action is must-not-throw, then no region notes
3306 are created. */
3307 if (this_action >= -1)
3308 {
3309 call_site = add_call_site (this_landing_pad,
3310 this_action < 0 ? 0 : this_action);
3311 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3312 NOTE_EH_HANDLER (note) = call_site;
3313 }
3314
3315 last_action = this_action;
3316 last_landing_pad = this_landing_pad;
3317 }
3318 last_action_insn = iter;
3319 }
1ef1bf06 3320
52a11cbf 3321 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3322 {
52a11cbf
RH
3323 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3324 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3325 }
3326
52a11cbf
RH
3327 htab_delete (ar_hash);
3328}
1ef1bf06 3329
52a11cbf
RH
3330\f
3331static void
3332push_uleb128 (data_area, value)
3333 varray_type *data_area;
3334 unsigned int value;
3335{
3336 do
3337 {
3338 unsigned char byte = value & 0x7f;
3339 value >>= 7;
3340 if (value)
3341 byte |= 0x80;
3342 VARRAY_PUSH_UCHAR (*data_area, byte);
3343 }
3344 while (value);
3345}
1ef1bf06 3346
52a11cbf
RH
3347static void
3348push_sleb128 (data_area, value)
3349 varray_type *data_area;
3350 int value;
3351{
3352 unsigned char byte;
3353 int more;
1ef1bf06 3354
52a11cbf 3355 do
1ef1bf06 3356 {
52a11cbf
RH
3357 byte = value & 0x7f;
3358 value >>= 7;
3359 more = ! ((value == 0 && (byte & 0x40) == 0)
3360 || (value == -1 && (byte & 0x40) != 0));
3361 if (more)
3362 byte |= 0x80;
3363 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3364 }
52a11cbf
RH
3365 while (more);
3366}
1ef1bf06 3367
52a11cbf
RH
3368\f
3369#define DW_EH_PE_absptr 0x00
3370#define DW_EH_PE_omit 0xff
3371
3372#define DW_EH_PE_uleb128 0x01
3373#define DW_EH_PE_udata2 0x02
3374#define DW_EH_PE_udata4 0x03
3375#define DW_EH_PE_udata8 0x04
3376#define DW_EH_PE_sleb128 0x09
3377#define DW_EH_PE_sdata2 0x0A
3378#define DW_EH_PE_sdata4 0x0B
3379#define DW_EH_PE_sdata8 0x0C
3380#define DW_EH_PE_signed 0x08
3381
3382#define DW_EH_PE_pcrel 0x10
3383#define DW_EH_PE_textrel 0x20
3384#define DW_EH_PE_datarel 0x30
3385#define DW_EH_PE_funcrel 0x40
3386
3387static const char *
3388eh_data_format_name (format)
3389 int format;
3390{
3391 switch (format)
1ef1bf06 3392 {
52a11cbf
RH
3393 case DW_EH_PE_absptr: return "absolute";
3394 case DW_EH_PE_omit: return "omit";
3395
3396 case DW_EH_PE_uleb128: return "uleb128";
3397 case DW_EH_PE_udata2: return "udata2";
3398 case DW_EH_PE_udata4: return "udata4";
3399 case DW_EH_PE_udata8: return "udata8";
3400 case DW_EH_PE_sleb128: return "sleb128";
3401 case DW_EH_PE_sdata2: return "sdata2";
3402 case DW_EH_PE_sdata4: return "sdata4";
3403 case DW_EH_PE_sdata8: return "sdata8";
3404
3405 case DW_EH_PE_uleb128 | DW_EH_PE_pcrel: return "pcrel uleb128";
3406 case DW_EH_PE_udata2 | DW_EH_PE_pcrel: return "pcrel udata2";
3407 case DW_EH_PE_udata4 | DW_EH_PE_pcrel: return "pcrel udata4";
3408 case DW_EH_PE_udata8 | DW_EH_PE_pcrel: return "pcrel udata8";
3409 case DW_EH_PE_sleb128 | DW_EH_PE_pcrel: return "pcrel sleb128";
3410 case DW_EH_PE_sdata2 | DW_EH_PE_pcrel: return "pcrel sdata2";
3411 case DW_EH_PE_sdata4 | DW_EH_PE_pcrel: return "pcrel sdata4";
3412 case DW_EH_PE_sdata8 | DW_EH_PE_pcrel: return "pcrel sdata8";
3413
3414 case DW_EH_PE_uleb128 | DW_EH_PE_textrel: return "textrel uleb128";
3415 case DW_EH_PE_udata2 | DW_EH_PE_textrel: return "textrel udata2";
3416 case DW_EH_PE_udata4 | DW_EH_PE_textrel: return "textrel udata4";
3417 case DW_EH_PE_udata8 | DW_EH_PE_textrel: return "textrel udata8";
3418 case DW_EH_PE_sleb128 | DW_EH_PE_textrel: return "textrel sleb128";
3419 case DW_EH_PE_sdata2 | DW_EH_PE_textrel: return "textrel sdata2";
3420 case DW_EH_PE_sdata4 | DW_EH_PE_textrel: return "textrel sdata4";
3421 case DW_EH_PE_sdata8 | DW_EH_PE_textrel: return "textrel sdata8";
3422
3423 case DW_EH_PE_uleb128 | DW_EH_PE_datarel: return "datarel uleb128";
3424 case DW_EH_PE_udata2 | DW_EH_PE_datarel: return "datarel udata2";
3425 case DW_EH_PE_udata4 | DW_EH_PE_datarel: return "datarel udata4";
3426 case DW_EH_PE_udata8 | DW_EH_PE_datarel: return "datarel udata8";
3427 case DW_EH_PE_sleb128 | DW_EH_PE_datarel: return "datarel sleb128";
3428 case DW_EH_PE_sdata2 | DW_EH_PE_datarel: return "datarel sdata2";
3429 case DW_EH_PE_sdata4 | DW_EH_PE_datarel: return "datarel sdata4";
3430 case DW_EH_PE_sdata8 | DW_EH_PE_datarel: return "datarel sdata8";
3431
3432 case DW_EH_PE_uleb128 | DW_EH_PE_funcrel: return "funcrel uleb128";
3433 case DW_EH_PE_udata2 | DW_EH_PE_funcrel: return "funcrel udata2";
3434 case DW_EH_PE_udata4 | DW_EH_PE_funcrel: return "funcrel udata4";
3435 case DW_EH_PE_udata8 | DW_EH_PE_funcrel: return "funcrel udata8";
3436 case DW_EH_PE_sleb128 | DW_EH_PE_funcrel: return "funcrel sleb128";
3437 case DW_EH_PE_sdata2 | DW_EH_PE_funcrel: return "funcrel sdata2";
3438 case DW_EH_PE_sdata4 | DW_EH_PE_funcrel: return "funcrel sdata4";
3439 case DW_EH_PE_sdata8 | DW_EH_PE_funcrel: return "funcrel sdata8";
3440
3441 default:
3442 abort ();
1ef1bf06
AM
3443 }
3444}
3445
52a11cbf
RH
3446#ifndef HAVE_AS_LEB128
3447static int
3448dw2_size_of_call_site_table ()
1ef1bf06 3449{
52a11cbf
RH
3450 int n = cfun->eh->call_site_data_used;
3451 int size = n * (4 + 4 + 4);
3452 int i;
1ef1bf06 3453
52a11cbf
RH
3454 for (i = 0; i < n; ++i)
3455 {
3456 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3457 size += size_of_uleb128 (cs->action);
3458 }
fac62ecf 3459
52a11cbf
RH
3460 return size;
3461}
3462
3463static int
3464sjlj_size_of_call_site_table ()
3465{
3466 int n = cfun->eh->call_site_data_used;
3467 int size = 0;
3468 int i;
77d33a84 3469
52a11cbf 3470 for (i = 0; i < n; ++i)
1ef1bf06 3471 {
52a11cbf
RH
3472 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3473 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3474 size += size_of_uleb128 (cs->action);
1ef1bf06 3475 }
52a11cbf
RH
3476
3477 return size;
3478}
3479#endif
3480
3481static void
3482dw2_output_call_site_table ()
3483{
3484 const char *function_start_lab
3485 = IDENTIFIER_POINTER (current_function_func_begin_label);
3486 int n = cfun->eh->call_site_data_used;
3487 int i;
3488
3489 for (i = 0; i < n; ++i)
1ef1bf06 3490 {
52a11cbf
RH
3491 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3492 char reg_start_lab[32];
3493 char reg_end_lab[32];
3494 char landing_pad_lab[32];
3495
3496 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3497 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3498
3499 if (cs->landing_pad)
3500 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3501 CODE_LABEL_NUMBER (cs->landing_pad));
3502
3503 /* ??? Perhaps use insn length scaling if the assembler supports
3504 generic arithmetic. */
3505 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3506 data4 if the function is small enough. */
3507#ifdef HAVE_AS_LEB128
3508 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3509 "region %d start", i);
3510 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3511 "length");
3512 if (cs->landing_pad)
3513 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3514 "landing pad");
3515 else
3516 dw2_asm_output_data_uleb128 (0, "landing pad");
3517#else
3518 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3519 "region %d start", i);
3520 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3521 if (cs->landing_pad)
3522 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3523 "landing pad");
3524 else
3525 dw2_asm_output_data (4, 0, "landing pad");
3526#endif
3527 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3528 }
3529
52a11cbf
RH
3530 call_site_base += n;
3531}
3532
3533static void
3534sjlj_output_call_site_table ()
3535{
3536 int n = cfun->eh->call_site_data_used;
3537 int i;
1ef1bf06 3538
52a11cbf 3539 for (i = 0; i < n; ++i)
1ef1bf06 3540 {
52a11cbf 3541 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3542
52a11cbf
RH
3543 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3544 "region %d landing pad", i);
3545 dw2_asm_output_data_uleb128 (cs->action, "action");
3546 }
4da896b2 3547
52a11cbf 3548 call_site_base += n;
1ef1bf06
AM
3549}
3550
52a11cbf
RH
3551void
3552output_function_exception_table ()
3553{
3554 int format, i, n;
3555#ifdef HAVE_AS_LEB128
3556 char ttype_label[32];
3557 char cs_after_size_label[32];
3558 char cs_end_label[32];
3559#else
3560 int call_site_len;
3561#endif
3562 int have_tt_data;
3563 int funcdef_number;
1ef1bf06 3564
52a11cbf
RH
3565 /* Not all functions need anything. */
3566 if (! cfun->uses_eh_lsda)
3567 return;
fac62ecf 3568
52a11cbf
RH
3569 funcdef_number = (USING_SJLJ_EXCEPTIONS
3570 ? sjlj_funcdef_number
3571 : current_funcdef_number);
1ef1bf06 3572
52a11cbf
RH
3573 exception_section ();
3574
3575 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3576 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3577
3578 if (have_tt_data)
3579 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3580
3581 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3582
3583 /* The LSDA header. */
3584
3585 /* Indicate the format of the landing pad start pointer. An omitted
3586 field implies @LPStart == @Start. */
3587 /* Currently we always put @LPStart == @Start. This field would
3588 be most useful in moving the landing pads completely out of
3589 line to another section, but it could also be used to minimize
3590 the size of uleb128 landing pad offsets. */
3591 format = DW_EH_PE_omit;
3592 dw2_asm_output_data (1, format, "@LPStart format (%s)",
3593 eh_data_format_name (format));
3594
3595 /* @LPStart pointer would go here. */
3596
3597 /* Indicate the format of the @TType entries. */
3598 if (! have_tt_data)
3599 format = DW_EH_PE_omit;
3600 else
3601 {
3602 /* ??? Define a ASM_PREFERRED_DATA_FORMAT to say what
3603 sort of dynamic-relocation-free reference to emit. */
3604 format = 0;
3605#ifdef HAVE_AS_LEB128
3606 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3607#endif
3608 }
3609 dw2_asm_output_data (1, format, "@TType format (%s)",
3610 eh_data_format_name (format));
3611
3612#ifndef HAVE_AS_LEB128
3613 if (USING_SJLJ_EXCEPTIONS)
3614 call_site_len = sjlj_size_of_call_site_table ();
3615 else
3616 call_site_len = dw2_size_of_call_site_table ();
3617#endif
3618
3619 /* A pc-relative 4-byte displacement to the @TType data. */
3620 if (have_tt_data)
3621 {
3622#ifdef HAVE_AS_LEB128
3623 char ttype_after_disp_label[32];
3624 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3625 funcdef_number);
3626 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3627 "@TType base offset");
3628 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3629#else
3630 /* Ug. Alignment queers things. */
3631 unsigned int before_disp, after_disp, last_disp, disp, align;
3632
3633 align = POINTER_SIZE / BITS_PER_UNIT;
3634 before_disp = 1 + 1;
3635 after_disp = (1 + size_of_uleb128 (call_site_len)
3636 + call_site_len
3637 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3638 + VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) * align);
3639
3640 disp = after_disp;
3641 do
1ef1bf06 3642 {
52a11cbf
RH
3643 unsigned int disp_size, pad;
3644
3645 last_disp = disp;
3646 disp_size = size_of_uleb128 (disp);
3647 pad = before_disp + disp_size + after_disp;
3648 if (pad % align)
3649 pad = align - (pad % align);
3650 else
3651 pad = 0;
3652 disp = after_disp + pad;
1ef1bf06 3653 }
52a11cbf
RH
3654 while (disp != last_disp);
3655
3656 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3657#endif
1ef1bf06 3658 }
1ef1bf06 3659
52a11cbf
RH
3660 /* Indicate the format of the call-site offsets. */
3661#ifdef HAVE_AS_LEB128
3662 format = DW_EH_PE_uleb128;
3663#else
3664 format = DW_EH_PE_udata4;
3665#endif
3666 dw2_asm_output_data (1, format, "call-site format (%s)",
3667 eh_data_format_name (format));
3668
3669#ifdef HAVE_AS_LEB128
3670 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3671 funcdef_number);
3672 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3673 funcdef_number);
3674 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3675 "Call-site table length");
3676 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3677 if (USING_SJLJ_EXCEPTIONS)
3678 sjlj_output_call_site_table ();
3679 else
3680 dw2_output_call_site_table ();
3681 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3682#else
3683 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3684 if (USING_SJLJ_EXCEPTIONS)
3685 sjlj_output_call_site_table ();
3686 else
3687 dw2_output_call_site_table ();
3688#endif
3689
3690 /* ??? Decode and interpret the data for flag_debug_asm. */
3691 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3692 for (i = 0; i < n; ++i)
3693 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3694 (i ? NULL : "Action record table"));
1ef1bf06 3695
52a11cbf
RH
3696 if (have_tt_data)
3697 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
1ef1bf06 3698
52a11cbf
RH
3699 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3700 while (i-- > 0)
1ef1bf06 3701 {
52a11cbf
RH
3702 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3703
3704 if (type == NULL_TREE)
3705 type = integer_zero_node;
3706 else
3707 type = lookup_type_for_runtime (type);
3708
3709 /* ??? Handle ASM_PREFERRED_DATA_FORMAT. */
3710 output_constant (type, GET_MODE_SIZE (ptr_mode));
1ef1bf06 3711 }
52a11cbf
RH
3712
3713#ifdef HAVE_AS_LEB128
3714 if (have_tt_data)
3715 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3716#endif
3717
3718 /* ??? Decode and interpret the data for flag_debug_asm. */
3719 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3720 for (i = 0; i < n; ++i)
3721 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3722 (i ? NULL : "Exception specification table"));
3723
3724 function_section (current_function_decl);
3725
3726 if (USING_SJLJ_EXCEPTIONS)
3727 sjlj_funcdef_number += 1;
1ef1bf06 3728}