]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/except.c
* config/h8300/h8300.md: Fix formatting.
[thirdparty/gcc.git] / gcc / except.c
CommitLineData
12670d88 1/* Implements exception handling.
3f2c5d1a 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
c913b6f1 3 1999, 2000, 2001 Free Software Foundation, Inc.
4956d07c
MS
4 Contributed by Mike Stump <mrs@cygnus.com>.
5
1322177d 6This file is part of GCC.
4956d07c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
4956d07c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
4956d07c
MS
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA. */
4956d07c
MS
22
23
12670d88
RK
24/* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
956d6950 27 be transferred to any arbitrary code associated with a function call
12670d88
RK
28 several levels up the stack.
29
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
39
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
46
52a11cbf 47 [ Add updated documentation on how to use this. ] */
4956d07c
MS
48
49
50#include "config.h"
670ee920 51#include "system.h"
4956d07c
MS
52#include "rtl.h"
53#include "tree.h"
54#include "flags.h"
4956d07c 55#include "function.h"
4956d07c 56#include "expr.h"
e78d8e51 57#include "libfuncs.h"
4956d07c 58#include "insn-config.h"
52a11cbf
RH
59#include "except.h"
60#include "integrate.h"
61#include "hard-reg-set.h"
62#include "basic-block.h"
4956d07c 63#include "output.h"
52a11cbf
RH
64#include "dwarf2asm.h"
65#include "dwarf2out.h"
2a1ee410 66#include "dwarf2.h"
10f0ad3d 67#include "toplev.h"
52a11cbf 68#include "hashtab.h"
2b12ffe0 69#include "intl.h"
87ff9c8e 70#include "ggc.h"
b1474bb7 71#include "tm_p.h"
07c9d2eb 72#include "target.h"
52a11cbf
RH
73
74/* Provide defaults for stuff that may not be defined when using
75 sjlj exceptions. */
76#ifndef EH_RETURN_STACKADJ_RTX
77#define EH_RETURN_STACKADJ_RTX 0
78#endif
79#ifndef EH_RETURN_HANDLER_RTX
80#define EH_RETURN_HANDLER_RTX 0
81#endif
82#ifndef EH_RETURN_DATA_REGNO
83#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
461fc4de
RH
84#endif
85
27a36778 86
52a11cbf
RH
87/* Nonzero means enable synchronous exceptions for non-call instructions. */
88int flag_non_call_exceptions;
27a36778 89
52a11cbf
RH
90/* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
e6855a2d 92tree (*lang_protect_cleanup_actions) PARAMS ((void));
27a36778 93
52a11cbf
RH
94/* Return true if type A catches type B. */
95int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
27a36778 96
52a11cbf
RH
97/* Map a type to a runtime object to match type. */
98tree (*lang_eh_runtime_type) PARAMS ((tree));
4956d07c 99
52a11cbf 100/* A list of labels used for exception handlers. */
4956d07c
MS
101rtx exception_handler_labels;
102
52a11cbf 103static int call_site_base;
ae0ed63a 104static unsigned int sjlj_funcdef_number;
52a11cbf
RH
105static htab_t type_to_runtime_map;
106
107/* Describe the SjLj_Function_Context structure. */
108static tree sjlj_fc_type_node;
109static int sjlj_fc_call_site_ofs;
110static int sjlj_fc_data_ofs;
111static int sjlj_fc_personality_ofs;
112static int sjlj_fc_lsda_ofs;
113static int sjlj_fc_jbuf_ofs;
114\f
115/* Describes one exception region. */
116struct eh_region
117{
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
956d6950 120
52a11cbf
RH
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
956d6950 124
52a11cbf
RH
125 /* An identifier for this region. */
126 int region_number;
71038426 127
52a11cbf
RH
128 /* Each region does exactly one thing. */
129 enum eh_region_type
130 {
572202a7
RK
131 ERT_UNKNOWN = 0,
132 ERT_CLEANUP,
52a11cbf
RH
133 ERT_TRY,
134 ERT_CATCH,
135 ERT_ALLOWED_EXCEPTIONS,
136 ERT_MUST_NOT_THROW,
137 ERT_THROW,
138 ERT_FIXUP
139 } type;
140
eaec9b3d 141 /* Holds the action to perform based on the preceding type. */
52a11cbf
RH
142 union {
143 /* A list of catch blocks, a surrounding try block,
144 and the label for continuing after a catch. */
145 struct {
146 struct eh_region *catch;
147 struct eh_region *last_catch;
148 struct eh_region *prev_try;
149 rtx continue_label;
150 } try;
151
6d41a92f
OH
152 /* The list through the catch handlers, the list of type objects
153 matched, and the list of associated filters. */
52a11cbf
RH
154 struct {
155 struct eh_region *next_catch;
156 struct eh_region *prev_catch;
6d41a92f
OH
157 tree type_list;
158 tree filter_list;
52a11cbf
RH
159 } catch;
160
161 /* A tree_list of allowed types. */
162 struct {
163 tree type_list;
164 int filter;
165 } allowed;
166
3f2c5d1a 167 /* The type given by a call to "throw foo();", or discovered
52a11cbf
RH
168 for a throw. */
169 struct {
170 tree type;
171 } throw;
172
173 /* Retain the cleanup expression even after expansion so that
174 we can match up fixup regions. */
175 struct {
176 tree exp;
177 } cleanup;
178
179 /* The real region (by expression and by pointer) that fixup code
180 should live in. */
181 struct {
182 tree cleanup_exp;
183 struct eh_region *real_region;
184 } fixup;
185 } u;
186
47c84870
JM
187 /* Entry point for this region's handler before landing pads are built. */
188 rtx label;
52a11cbf 189
47c84870 190 /* Entry point for this region's handler from the runtime eh library. */
52a11cbf
RH
191 rtx landing_pad;
192
47c84870 193 /* Entry point for this region's handler from an inner region. */
52a11cbf 194 rtx post_landing_pad;
47c84870
JM
195
196 /* The RESX insn for handing off control to the next outermost handler,
197 if appropriate. */
198 rtx resume;
52a11cbf 199};
71038426 200
52a11cbf
RH
201/* Used to save exception status for each function. */
202struct eh_status
203{
204 /* The tree of all regions for this function. */
205 struct eh_region *region_tree;
e6cfb550 206
52a11cbf
RH
207 /* The same information as an indexable array. */
208 struct eh_region **region_array;
e6cfb550 209
52a11cbf
RH
210 /* The most recently open region. */
211 struct eh_region *cur_region;
e6cfb550 212
52a11cbf
RH
213 /* This is the region for which we are processing catch blocks. */
214 struct eh_region *try_region;
71038426 215
52a11cbf
RH
216 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
217 node is itself a TREE_CHAINed list of handlers for regions that
218 are not yet closed. The TREE_VALUE of each entry contains the
219 handler for the corresponding entry on the ehstack. */
220 tree protect_list;
1e4ceb6f 221
52a11cbf
RH
222 rtx filter;
223 rtx exc_ptr;
4956d07c 224
52a11cbf
RH
225 int built_landing_pads;
226 int last_region_number;
e6cfb550 227
52a11cbf
RH
228 varray_type ttype_data;
229 varray_type ehspec_data;
230 varray_type action_record_data;
6814a8a0 231
52a11cbf
RH
232 struct call_site_record
233 {
234 rtx landing_pad;
235 int action;
236 } *call_site_data;
237 int call_site_data_used;
238 int call_site_data_size;
239
240 rtx ehr_stackadj;
241 rtx ehr_handler;
242 rtx ehr_label;
243
244 rtx sjlj_fc;
245 rtx sjlj_exit_after;
246};
e6cfb550 247
52a11cbf
RH
248\f
249static void mark_eh_region PARAMS ((struct eh_region *));
250
251static int t2r_eq PARAMS ((const PTR,
252 const PTR));
253static hashval_t t2r_hash PARAMS ((const PTR));
254static int t2r_mark_1 PARAMS ((PTR *, PTR));
255static void t2r_mark PARAMS ((PTR));
256static void add_type_for_runtime PARAMS ((tree));
257static tree lookup_type_for_runtime PARAMS ((tree));
258
259static struct eh_region *expand_eh_region_end PARAMS ((void));
260
86c99549 261static rtx get_exception_filter PARAMS ((struct function *));
47c84870 262
52a11cbf
RH
263static void collect_eh_region_array PARAMS ((void));
264static void resolve_fixup_regions PARAMS ((void));
265static void remove_fixup_regions PARAMS ((void));
266static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
267
268static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
269 struct inline_remap *));
270static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
271 struct eh_region **));
272static int ttypes_filter_eq PARAMS ((const PTR,
273 const PTR));
274static hashval_t ttypes_filter_hash PARAMS ((const PTR));
275static int ehspec_filter_eq PARAMS ((const PTR,
276 const PTR));
277static hashval_t ehspec_filter_hash PARAMS ((const PTR));
278static int add_ttypes_entry PARAMS ((htab_t, tree));
279static int add_ehspec_entry PARAMS ((htab_t, htab_t,
280 tree));
281static void assign_filter_values PARAMS ((void));
282static void build_post_landing_pads PARAMS ((void));
283static void connect_post_landing_pads PARAMS ((void));
284static void dw2_build_landing_pads PARAMS ((void));
285
286struct sjlj_lp_info;
287static bool sjlj_find_directly_reachable_regions
288 PARAMS ((struct sjlj_lp_info *));
289static void sjlj_assign_call_site_values
290 PARAMS ((rtx, struct sjlj_lp_info *));
291static void sjlj_mark_call_sites
292 PARAMS ((struct sjlj_lp_info *));
293static void sjlj_emit_function_enter PARAMS ((rtx));
294static void sjlj_emit_function_exit PARAMS ((void));
295static void sjlj_emit_dispatch_table
296 PARAMS ((rtx, struct sjlj_lp_info *));
297static void sjlj_build_landing_pads PARAMS ((void));
298
299static void remove_exception_handler_label PARAMS ((rtx));
300static void remove_eh_handler PARAMS ((struct eh_region *));
301
302struct reachable_info;
303
304/* The return value of reachable_next_level. */
305enum reachable_code
306{
307 /* The given exception is not processed by the given region. */
308 RNL_NOT_CAUGHT,
309 /* The given exception may need processing by the given region. */
310 RNL_MAYBE_CAUGHT,
311 /* The given exception is completely processed by the given region. */
312 RNL_CAUGHT,
313 /* The given exception is completely processed by the runtime. */
314 RNL_BLOCKED
315};
e6cfb550 316
52a11cbf
RH
317static int check_handled PARAMS ((tree, tree));
318static void add_reachable_handler
319 PARAMS ((struct reachable_info *, struct eh_region *,
320 struct eh_region *));
321static enum reachable_code reachable_next_level
322 PARAMS ((struct eh_region *, tree, struct reachable_info *));
323
324static int action_record_eq PARAMS ((const PTR,
325 const PTR));
326static hashval_t action_record_hash PARAMS ((const PTR));
327static int add_action_record PARAMS ((htab_t, int, int));
328static int collect_one_action_chain PARAMS ((htab_t,
329 struct eh_region *));
330static int add_call_site PARAMS ((rtx, int));
331
332static void push_uleb128 PARAMS ((varray_type *,
333 unsigned int));
334static void push_sleb128 PARAMS ((varray_type *, int));
52a11cbf
RH
335#ifndef HAVE_AS_LEB128
336static int dw2_size_of_call_site_table PARAMS ((void));
337static int sjlj_size_of_call_site_table PARAMS ((void));
338#endif
339static void dw2_output_call_site_table PARAMS ((void));
340static void sjlj_output_call_site_table PARAMS ((void));
e6cfb550 341
52a11cbf
RH
342\f
343/* Routine to see if exception handling is turned on.
344 DO_WARN is non-zero if we want to inform the user that exception
3f2c5d1a 345 handling is turned off.
4956d07c 346
52a11cbf
RH
347 This is used to ensure that -fexceptions has been specified if the
348 compiler tries to use any exception-specific functions. */
4956d07c 349
52a11cbf
RH
350int
351doing_eh (do_warn)
352 int do_warn;
353{
354 if (! flag_exceptions)
355 {
356 static int warned = 0;
357 if (! warned && do_warn)
358 {
359 error ("exception handling disabled, use -fexceptions to enable");
360 warned = 1;
361 }
362 return 0;
363 }
364 return 1;
4956d07c
MS
365}
366
52a11cbf
RH
367\f
368void
369init_eh ()
4956d07c 370{
52a11cbf 371 ggc_add_rtx_root (&exception_handler_labels, 1);
4956d07c 372
52a11cbf
RH
373 if (! flag_exceptions)
374 return;
4956d07c 375
52a11cbf
RH
376 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
377 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
4956d07c 378
52a11cbf
RH
379 /* Create the SjLj_Function_Context structure. This should match
380 the definition in unwind-sjlj.c. */
381 if (USING_SJLJ_EXCEPTIONS)
382 {
383 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
4956d07c 384
52a11cbf
RH
385 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
386 ggc_add_tree_root (&sjlj_fc_type_node, 1);
9a0d1e1b 387
52a11cbf
RH
388 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
389 build_pointer_type (sjlj_fc_type_node));
390 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
9a0d1e1b 391
52a11cbf
RH
392 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
393 integer_type_node);
394 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
4956d07c 395
52a11cbf
RH
396 tmp = build_index_type (build_int_2 (4 - 1, 0));
397 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
398 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
399 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
9a0d1e1b 400
52a11cbf
RH
401 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
402 ptr_type_node);
403 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
4956d07c 404
52a11cbf
RH
405 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
406 ptr_type_node);
407 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
6814a8a0 408
52a11cbf
RH
409#ifdef DONT_USE_BUILTIN_SETJMP
410#ifdef JMP_BUF_SIZE
411 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
412#else
413 /* Should be large enough for most systems, if it is not,
414 JMP_BUF_SIZE should be defined with the proper value. It will
415 also tend to be larger than necessary for most systems, a more
416 optimal port will define JMP_BUF_SIZE. */
417 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
418#endif
419#else
420 /* This is 2 for builtin_setjmp, plus whatever the target requires
421 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
422 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
423 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
424#endif
425 tmp = build_index_type (tmp);
426 tmp = build_array_type (ptr_type_node, tmp);
427 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
428#ifdef DONT_USE_BUILTIN_SETJMP
429 /* We don't know what the alignment requirements of the
430 runtime's jmp_buf has. Overestimate. */
431 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
432 DECL_USER_ALIGN (f_jbuf) = 1;
433#endif
434 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
435
436 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
437 TREE_CHAIN (f_prev) = f_cs;
438 TREE_CHAIN (f_cs) = f_data;
439 TREE_CHAIN (f_data) = f_per;
440 TREE_CHAIN (f_per) = f_lsda;
441 TREE_CHAIN (f_lsda) = f_jbuf;
442
443 layout_type (sjlj_fc_type_node);
444
445 /* Cache the interesting field offsets so that we have
446 easy access from rtl. */
447 sjlj_fc_call_site_ofs
448 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
449 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
450 sjlj_fc_data_ofs
451 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
452 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
453 sjlj_fc_personality_ofs
454 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
455 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
456 sjlj_fc_lsda_ofs
457 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
458 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
459 sjlj_fc_jbuf_ofs
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
462 }
4956d07c
MS
463}
464
52a11cbf
RH
465void
466init_eh_for_function ()
4956d07c 467{
52a11cbf 468 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
4956d07c
MS
469}
470
52a11cbf 471/* Mark EH for GC. */
4956d07c
MS
472
473static void
52a11cbf
RH
474mark_eh_region (region)
475 struct eh_region *region;
4956d07c 476{
52a11cbf
RH
477 if (! region)
478 return;
4956d07c 479
52a11cbf
RH
480 switch (region->type)
481 {
572202a7
RK
482 case ERT_UNKNOWN:
483 /* This can happen if a nested function is inside the body of a region
484 and we do a GC as part of processing it. */
485 break;
52a11cbf
RH
486 case ERT_CLEANUP:
487 ggc_mark_tree (region->u.cleanup.exp);
488 break;
489 case ERT_TRY:
490 ggc_mark_rtx (region->u.try.continue_label);
491 break;
492 case ERT_CATCH:
6d41a92f
OH
493 ggc_mark_tree (region->u.catch.type_list);
494 ggc_mark_tree (region->u.catch.filter_list);
52a11cbf
RH
495 break;
496 case ERT_ALLOWED_EXCEPTIONS:
497 ggc_mark_tree (region->u.allowed.type_list);
498 break;
499 case ERT_MUST_NOT_THROW:
500 break;
501 case ERT_THROW:
502 ggc_mark_tree (region->u.throw.type);
503 break;
504 case ERT_FIXUP:
505 ggc_mark_tree (region->u.fixup.cleanup_exp);
506 break;
507 default:
508 abort ();
509 }
4956d07c 510
52a11cbf 511 ggc_mark_rtx (region->label);
47c84870 512 ggc_mark_rtx (region->resume);
52a11cbf
RH
513 ggc_mark_rtx (region->landing_pad);
514 ggc_mark_rtx (region->post_landing_pad);
4956d07c
MS
515}
516
52a11cbf
RH
517void
518mark_eh_status (eh)
519 struct eh_status *eh;
4956d07c 520{
52a11cbf
RH
521 int i;
522
523 if (eh == 0)
524 return;
525
526 /* If we've called collect_eh_region_array, use it. Otherwise walk
527 the tree non-recursively. */
528 if (eh->region_array)
529 {
530 for (i = eh->last_region_number; i > 0; --i)
531 {
532 struct eh_region *r = eh->region_array[i];
533 if (r && r->region_number == i)
534 mark_eh_region (r);
535 }
536 }
537 else if (eh->region_tree)
538 {
539 struct eh_region *r = eh->region_tree;
540 while (1)
541 {
542 mark_eh_region (r);
543 if (r->inner)
544 r = r->inner;
545 else if (r->next_peer)
546 r = r->next_peer;
547 else
548 {
549 do {
550 r = r->outer;
551 if (r == NULL)
552 goto tree_done;
553 } while (r->next_peer == NULL);
554 r = r->next_peer;
555 }
556 }
557 tree_done:;
558 }
4956d07c 559
52a11cbf
RH
560 ggc_mark_tree (eh->protect_list);
561 ggc_mark_rtx (eh->filter);
562 ggc_mark_rtx (eh->exc_ptr);
563 ggc_mark_tree_varray (eh->ttype_data);
4956d07c 564
52a11cbf
RH
565 if (eh->call_site_data)
566 {
567 for (i = eh->call_site_data_used - 1; i >= 0; --i)
568 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
569 }
4956d07c 570
52a11cbf
RH
571 ggc_mark_rtx (eh->ehr_stackadj);
572 ggc_mark_rtx (eh->ehr_handler);
573 ggc_mark_rtx (eh->ehr_label);
4956d07c 574
52a11cbf
RH
575 ggc_mark_rtx (eh->sjlj_fc);
576 ggc_mark_rtx (eh->sjlj_exit_after);
4956d07c 577}
9a0d1e1b 578
52a11cbf
RH
579void
580free_eh_status (f)
581 struct function *f;
9a0d1e1b 582{
52a11cbf 583 struct eh_status *eh = f->eh;
250d07b6 584
52a11cbf 585 if (eh->region_array)
250d07b6 586 {
52a11cbf
RH
587 int i;
588 for (i = eh->last_region_number; i > 0; --i)
589 {
590 struct eh_region *r = eh->region_array[i];
591 /* Mind we don't free a region struct more than once. */
592 if (r && r->region_number == i)
593 free (r);
594 }
595 free (eh->region_array);
250d07b6 596 }
52a11cbf 597 else if (eh->region_tree)
250d07b6 598 {
52a11cbf
RH
599 struct eh_region *next, *r = eh->region_tree;
600 while (1)
601 {
602 if (r->inner)
603 r = r->inner;
604 else if (r->next_peer)
605 {
606 next = r->next_peer;
607 free (r);
608 r = next;
609 }
610 else
611 {
612 do {
613 next = r->outer;
614 free (r);
615 r = next;
616 if (r == NULL)
617 goto tree_done;
618 } while (r->next_peer == NULL);
619 next = r->next_peer;
620 free (r);
621 r = next;
622 }
623 }
624 tree_done:;
250d07b6
RH
625 }
626
52a11cbf
RH
627 VARRAY_FREE (eh->ttype_data);
628 VARRAY_FREE (eh->ehspec_data);
629 VARRAY_FREE (eh->action_record_data);
630 if (eh->call_site_data)
631 free (eh->call_site_data);
632
633 free (eh);
634 f->eh = NULL;
9a0d1e1b
AM
635}
636
52a11cbf
RH
637\f
638/* Start an exception handling region. All instructions emitted
639 after this point are considered to be part of the region until
640 expand_eh_region_end is invoked. */
9a0d1e1b 641
52a11cbf
RH
642void
643expand_eh_region_start ()
9a0d1e1b 644{
52a11cbf
RH
645 struct eh_region *new_region;
646 struct eh_region *cur_region;
647 rtx note;
9a0d1e1b 648
52a11cbf
RH
649 if (! doing_eh (0))
650 return;
9a0d1e1b 651
52a11cbf
RH
652 /* Insert a new blank region as a leaf in the tree. */
653 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
654 cur_region = cfun->eh->cur_region;
655 new_region->outer = cur_region;
656 if (cur_region)
9a0d1e1b 657 {
52a11cbf
RH
658 new_region->next_peer = cur_region->inner;
659 cur_region->inner = new_region;
660 }
e6cfb550 661 else
9a0d1e1b 662 {
52a11cbf
RH
663 new_region->next_peer = cfun->eh->region_tree;
664 cfun->eh->region_tree = new_region;
9a0d1e1b 665 }
52a11cbf
RH
666 cfun->eh->cur_region = new_region;
667
668 /* Create a note marking the start of this region. */
669 new_region->region_number = ++cfun->eh->last_region_number;
6496a589 670 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
52a11cbf 671 NOTE_EH_HANDLER (note) = new_region->region_number;
9a0d1e1b
AM
672}
673
52a11cbf 674/* Common code to end a region. Returns the region just ended. */
9f8e6243 675
52a11cbf
RH
676static struct eh_region *
677expand_eh_region_end ()
9f8e6243 678{
52a11cbf
RH
679 struct eh_region *cur_region = cfun->eh->cur_region;
680 rtx note;
681
a1f300c0 682 /* Create a note marking the end of this region. */
6496a589 683 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
52a11cbf
RH
684 NOTE_EH_HANDLER (note) = cur_region->region_number;
685
686 /* Pop. */
687 cfun->eh->cur_region = cur_region->outer;
688
52a11cbf 689 return cur_region;
9f8e6243
AM
690}
691
52a11cbf
RH
692/* End an exception handling region for a cleanup. HANDLER is an
693 expression to expand for the cleanup. */
9c606f69 694
52a11cbf
RH
695void
696expand_eh_region_end_cleanup (handler)
697 tree handler;
9c606f69 698{
52a11cbf 699 struct eh_region *region;
e6855a2d 700 tree protect_cleanup_actions;
52a11cbf 701 rtx around_label;
47c84870 702 rtx data_save[2];
52a11cbf
RH
703
704 if (! doing_eh (0))
705 return;
9c606f69 706
52a11cbf
RH
707 region = expand_eh_region_end ();
708 region->type = ERT_CLEANUP;
709 region->label = gen_label_rtx ();
710 region->u.cleanup.exp = handler;
9c606f69 711
52a11cbf
RH
712 around_label = gen_label_rtx ();
713 emit_jump (around_label);
9c606f69 714
52a11cbf 715 emit_label (region->label);
9c606f69 716
e6855a2d 717 /* Give the language a chance to specify an action to be taken if an
a1f300c0 718 exception is thrown that would propagate out of the HANDLER. */
3f2c5d1a
RS
719 protect_cleanup_actions
720 = (lang_protect_cleanup_actions
721 ? (*lang_protect_cleanup_actions) ()
e6855a2d
MM
722 : NULL_TREE);
723
52a11cbf
RH
724 if (protect_cleanup_actions)
725 expand_eh_region_start ();
9c606f69 726
47c84870
JM
727 /* In case this cleanup involves an inline destructor with a try block in
728 it, we need to save the EH return data registers around it. */
729 data_save[0] = gen_reg_rtx (Pmode);
86c99549 730 emit_move_insn (data_save[0], get_exception_pointer (cfun));
16842c15 731 data_save[1] = gen_reg_rtx (word_mode);
86c99549 732 emit_move_insn (data_save[1], get_exception_filter (cfun));
47c84870 733
52a11cbf 734 expand_expr (handler, const0_rtx, VOIDmode, 0);
9c606f69 735
47c84870
JM
736 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
737 emit_move_insn (cfun->eh->filter, data_save[1]);
738
52a11cbf
RH
739 if (protect_cleanup_actions)
740 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
a9f0664a 741
c10f3adf
AH
742 /* We need any stack adjustment complete before the around_label. */
743 do_pending_stack_adjust ();
744
52a11cbf
RH
745 /* We delay the generation of the _Unwind_Resume until we generate
746 landing pads. We emit a marker here so as to get good control
747 flow data in the meantime. */
47c84870
JM
748 region->resume
749 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
52a11cbf
RH
750 emit_barrier ();
751
52a11cbf 752 emit_label (around_label);
9c606f69
AM
753}
754
52a11cbf
RH
755/* End an exception handling region for a try block, and prepares
756 for subsequent calls to expand_start_catch. */
9a0d1e1b 757
52a11cbf
RH
758void
759expand_start_all_catch ()
9a0d1e1b 760{
52a11cbf 761 struct eh_region *region;
9a0d1e1b 762
52a11cbf
RH
763 if (! doing_eh (1))
764 return;
9a0d1e1b 765
52a11cbf
RH
766 region = expand_eh_region_end ();
767 region->type = ERT_TRY;
768 region->u.try.prev_try = cfun->eh->try_region;
769 region->u.try.continue_label = gen_label_rtx ();
9a0d1e1b 770
52a11cbf
RH
771 cfun->eh->try_region = region;
772
773 emit_jump (region->u.try.continue_label);
774}
9a0d1e1b 775
6d41a92f
OH
776/* Begin a catch clause. TYPE is the type caught, a list of such types, or
777 null if this is a catch-all clause. Providing a type list enables to
778 associate the catch region with potentially several exception types, which
779 is useful e.g. for Ada. */
9a0d1e1b 780
52a11cbf 781void
6d41a92f
OH
782expand_start_catch (type_or_list)
783 tree type_or_list;
9a0d1e1b 784{
52a11cbf 785 struct eh_region *t, *c, *l;
6d41a92f 786 tree type_list;
52a11cbf
RH
787
788 if (! doing_eh (0))
789 return;
790
6d41a92f
OH
791 type_list = type_or_list;
792
793 if (type_or_list)
794 {
795 /* Ensure to always end up with a type list to normalize further
796 processing, then register each type against the runtime types
797 map. */
798 tree type_node;
799
800 if (TREE_CODE (type_or_list) != TREE_LIST)
801 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
802
803 type_node = type_list;
804 for (; type_node; type_node = TREE_CHAIN (type_node))
805 add_type_for_runtime (TREE_VALUE (type_node));
806 }
807
52a11cbf
RH
808 expand_eh_region_start ();
809
810 t = cfun->eh->try_region;
811 c = cfun->eh->cur_region;
812 c->type = ERT_CATCH;
6d41a92f 813 c->u.catch.type_list = type_list;
52a11cbf
RH
814 c->label = gen_label_rtx ();
815
816 l = t->u.try.last_catch;
817 c->u.catch.prev_catch = l;
818 if (l)
819 l->u.catch.next_catch = c;
820 else
821 t->u.try.catch = c;
822 t->u.try.last_catch = c;
9a0d1e1b 823
52a11cbf 824 emit_label (c->label);
9a0d1e1b
AM
825}
826
52a11cbf 827/* End a catch clause. Control will resume after the try/catch block. */
9a0d1e1b 828
52a11cbf
RH
829void
830expand_end_catch ()
9a0d1e1b 831{
52a11cbf
RH
832 struct eh_region *try_region, *catch_region;
833
834 if (! doing_eh (0))
835 return;
836
837 catch_region = expand_eh_region_end ();
838 try_region = cfun->eh->try_region;
839
840 emit_jump (try_region->u.try.continue_label);
9a0d1e1b
AM
841}
842
52a11cbf 843/* End a sequence of catch handlers for a try block. */
9a0d1e1b 844
52a11cbf
RH
845void
846expand_end_all_catch ()
9a0d1e1b 847{
52a11cbf
RH
848 struct eh_region *try_region;
849
850 if (! doing_eh (0))
851 return;
852
853 try_region = cfun->eh->try_region;
854 cfun->eh->try_region = try_region->u.try.prev_try;
855
856 emit_label (try_region->u.try.continue_label);
9a0d1e1b
AM
857}
858
52a11cbf
RH
859/* End an exception region for an exception type filter. ALLOWED is a
860 TREE_LIST of types to be matched by the runtime. FAILURE is an
ff7cc307 861 expression to invoke if a mismatch occurs.
b4e49397
JM
862
863 ??? We could use these semantics for calls to rethrow, too; if we can
864 see the surrounding catch clause, we know that the exception we're
865 rethrowing satisfies the "filter" of the catch type. */
9a0d1e1b 866
52a11cbf
RH
867void
868expand_eh_region_end_allowed (allowed, failure)
869 tree allowed, failure;
9a0d1e1b 870{
52a11cbf
RH
871 struct eh_region *region;
872 rtx around_label;
9a0d1e1b 873
52a11cbf
RH
874 if (! doing_eh (0))
875 return;
e6cfb550 876
52a11cbf
RH
877 region = expand_eh_region_end ();
878 region->type = ERT_ALLOWED_EXCEPTIONS;
879 region->u.allowed.type_list = allowed;
880 region->label = gen_label_rtx ();
9a0d1e1b 881
52a11cbf
RH
882 for (; allowed ; allowed = TREE_CHAIN (allowed))
883 add_type_for_runtime (TREE_VALUE (allowed));
9a0d1e1b 884
52a11cbf
RH
885 /* We must emit the call to FAILURE here, so that if this function
886 throws a different exception, that it will be processed by the
887 correct region. */
9a0d1e1b 888
52a11cbf
RH
889 around_label = gen_label_rtx ();
890 emit_jump (around_label);
891
892 emit_label (region->label);
893 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
b912bca0
MM
894 /* We must adjust the stack before we reach the AROUND_LABEL because
895 the call to FAILURE does not occur on all paths to the
896 AROUND_LABEL. */
897 do_pending_stack_adjust ();
9a0d1e1b 898
52a11cbf 899 emit_label (around_label);
9a0d1e1b
AM
900}
901
52a11cbf
RH
902/* End an exception region for a must-not-throw filter. FAILURE is an
903 expression invoke if an uncaught exception propagates this far.
e6cfb550 904
52a11cbf
RH
905 This is conceptually identical to expand_eh_region_end_allowed with
906 an empty allowed list (if you passed "std::terminate" instead of
907 "__cxa_call_unexpected"), but they are represented differently in
908 the C++ LSDA. */
6814a8a0 909
52a11cbf
RH
910void
911expand_eh_region_end_must_not_throw (failure)
912 tree failure;
e6cfb550 913{
52a11cbf
RH
914 struct eh_region *region;
915 rtx around_label;
e6cfb550 916
52a11cbf
RH
917 if (! doing_eh (0))
918 return;
6814a8a0 919
52a11cbf
RH
920 region = expand_eh_region_end ();
921 region->type = ERT_MUST_NOT_THROW;
922 region->label = gen_label_rtx ();
e6cfb550 923
52a11cbf
RH
924 /* We must emit the call to FAILURE here, so that if this function
925 throws a different exception, that it will be processed by the
926 correct region. */
6814a8a0 927
52a11cbf
RH
928 around_label = gen_label_rtx ();
929 emit_jump (around_label);
6814a8a0 930
52a11cbf
RH
931 emit_label (region->label);
932 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
6814a8a0 933
52a11cbf 934 emit_label (around_label);
e6cfb550
AM
935}
936
52a11cbf
RH
937/* End an exception region for a throw. No handling goes on here,
938 but it's the easiest way for the front-end to indicate what type
939 is being thrown. */
6814a8a0 940
52a11cbf
RH
941void
942expand_eh_region_end_throw (type)
943 tree type;
e6cfb550 944{
52a11cbf
RH
945 struct eh_region *region;
946
947 if (! doing_eh (0))
948 return;
949
950 region = expand_eh_region_end ();
951 region->type = ERT_THROW;
952 region->u.throw.type = type;
e6cfb550
AM
953}
954
52a11cbf
RH
955/* End a fixup region. Within this region the cleanups for the immediately
956 enclosing region are _not_ run. This is used for goto cleanup to avoid
957 destroying an object twice.
12670d88 958
52a11cbf
RH
959 This would be an extraordinarily simple prospect, were it not for the
960 fact that we don't actually know what the immediately enclosing region
961 is. This surprising fact is because expand_cleanups is currently
962 generating a sequence that it will insert somewhere else. We collect
963 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
4956d07c 964
52a11cbf
RH
965void
966expand_eh_region_end_fixup (handler)
967 tree handler;
4956d07c 968{
52a11cbf
RH
969 struct eh_region *fixup;
970
971 if (! doing_eh (0))
972 return;
973
974 fixup = expand_eh_region_end ();
975 fixup->type = ERT_FIXUP;
976 fixup->u.fixup.cleanup_exp = handler;
4956d07c
MS
977}
978
47c84870 979/* Return an rtl expression for a pointer to the exception object
52a11cbf 980 within a handler. */
4956d07c
MS
981
982rtx
86c99549
RH
983get_exception_pointer (fun)
984 struct function *fun;
4956d07c 985{
86c99549
RH
986 rtx exc_ptr = fun->eh->exc_ptr;
987 if (fun == cfun && ! exc_ptr)
52a11cbf
RH
988 {
989 exc_ptr = gen_reg_rtx (Pmode);
86c99549 990 fun->eh->exc_ptr = exc_ptr;
52a11cbf
RH
991 }
992 return exc_ptr;
993}
4956d07c 994
47c84870
JM
995/* Return an rtl expression for the exception dispatch filter
996 within a handler. */
997
998static rtx
86c99549
RH
999get_exception_filter (fun)
1000 struct function *fun;
47c84870 1001{
86c99549
RH
1002 rtx filter = fun->eh->filter;
1003 if (fun == cfun && ! filter)
47c84870 1004 {
041c9d5a 1005 filter = gen_reg_rtx (word_mode);
86c99549 1006 fun->eh->filter = filter;
47c84870
JM
1007 }
1008 return filter;
1009}
52a11cbf
RH
1010\f
1011/* Begin a region that will contain entries created with
1012 add_partial_entry. */
4956d07c 1013
52a11cbf
RH
1014void
1015begin_protect_partials ()
1016{
1017 /* Push room for a new list. */
1018 cfun->eh->protect_list
1019 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
4956d07c
MS
1020}
1021
27a36778
MS
1022/* Start a new exception region for a region of code that has a
1023 cleanup action and push the HANDLER for the region onto
1024 protect_list. All of the regions created with add_partial_entry
009c3281
JM
1025 will be ended when end_protect_partials is invoked.
1026
1027 ??? The only difference between this purpose and that of
1028 expand_decl_cleanup is that in this case, we only want the cleanup to
1029 run if an exception is thrown. This should also be handled using
1030 binding levels. */
12670d88
RK
1031
1032void
1033add_partial_entry (handler)
1034 tree handler;
1035{
1036 expand_eh_region_start ();
1037
52a11cbf
RH
1038 /* ??? This comment was old before the most recent rewrite. We
1039 really ought to fix the callers at some point. */
76fc91c7
MM
1040 /* For backwards compatibility, we allow callers to omit calls to
1041 begin_protect_partials for the outermost region. So, we must
1042 explicitly do so here. */
52a11cbf 1043 if (!cfun->eh->protect_list)
76fc91c7
MM
1044 begin_protect_partials ();
1045
1046 /* Add this entry to the front of the list. */
3f2c5d1a 1047 TREE_VALUE (cfun->eh->protect_list)
52a11cbf 1048 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
12670d88
RK
1049}
1050
52a11cbf 1051/* End all the pending exception regions on protect_list. */
27a36778 1052
52a11cbf
RH
1053void
1054end_protect_partials ()
27a36778 1055{
52a11cbf 1056 tree t;
638e6ebc 1057
52a11cbf
RH
1058 /* ??? This comment was old before the most recent rewrite. We
1059 really ought to fix the callers at some point. */
1060 /* For backwards compatibility, we allow callers to omit the call to
1061 begin_protect_partials for the outermost region. So,
1062 PROTECT_LIST may be NULL. */
1063 if (!cfun->eh->protect_list)
1064 return;
bb727b5a 1065
52a11cbf
RH
1066 /* Pop the topmost entry. */
1067 t = TREE_VALUE (cfun->eh->protect_list);
1068 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
bb727b5a 1069
52a11cbf
RH
1070 /* End all the exception regions. */
1071 for (; t; t = TREE_CHAIN (t))
1072 expand_eh_region_end_cleanup (TREE_VALUE (t));
154bba13
TT
1073}
1074
52a11cbf
RH
1075\f
1076/* This section is for the exception handling specific optimization pass. */
154bba13 1077
52a11cbf
RH
1078/* Random access the exception region tree. It's just as simple to
1079 collect the regions this way as in expand_eh_region_start, but
1080 without having to realloc memory. */
154bba13 1081
52a11cbf
RH
1082static void
1083collect_eh_region_array ()
154bba13 1084{
52a11cbf 1085 struct eh_region **array, *i;
154bba13 1086
52a11cbf
RH
1087 i = cfun->eh->region_tree;
1088 if (! i)
1089 return;
154bba13 1090
52a11cbf
RH
1091 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1092 cfun->eh->region_array = array;
154bba13 1093
52a11cbf
RH
1094 while (1)
1095 {
1096 array[i->region_number] = i;
1097
1098 /* If there are sub-regions, process them. */
1099 if (i->inner)
1100 i = i->inner;
1101 /* If there are peers, process them. */
1102 else if (i->next_peer)
1103 i = i->next_peer;
1104 /* Otherwise, step back up the tree to the next peer. */
1105 else
1106 {
1107 do {
1108 i = i->outer;
1109 if (i == NULL)
1110 return;
1111 } while (i->next_peer == NULL);
1112 i = i->next_peer;
1113 }
1114 }
27a36778
MS
1115}
1116
52a11cbf
RH
1117static void
1118resolve_fixup_regions ()
27a36778 1119{
52a11cbf 1120 int i, j, n = cfun->eh->last_region_number;
27a36778 1121
52a11cbf
RH
1122 for (i = 1; i <= n; ++i)
1123 {
1124 struct eh_region *fixup = cfun->eh->region_array[i];
ea446801 1125 struct eh_region *cleanup = 0;
27a36778 1126
52a11cbf
RH
1127 if (! fixup || fixup->type != ERT_FIXUP)
1128 continue;
27a36778 1129
52a11cbf
RH
1130 for (j = 1; j <= n; ++j)
1131 {
1132 cleanup = cfun->eh->region_array[j];
1133 if (cleanup->type == ERT_CLEANUP
1134 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1135 break;
1136 }
1137 if (j > n)
1138 abort ();
27a36778 1139
52a11cbf
RH
1140 fixup->u.fixup.real_region = cleanup->outer;
1141 }
27a36778 1142}
27a36778 1143
52a11cbf
RH
1144/* Now that we've discovered what region actually encloses a fixup,
1145 we can shuffle pointers and remove them from the tree. */
27a36778
MS
1146
1147static void
52a11cbf 1148remove_fixup_regions ()
27a36778 1149{
52a11cbf 1150 int i;
45053eaf
RH
1151 rtx insn, note;
1152 struct eh_region *fixup;
27a36778 1153
45053eaf
RH
1154 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1155 for instructions referencing fixup regions. This is only
1156 strictly necessary for fixup regions with no parent, but
1157 doesn't hurt to do it for all regions. */
1158 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1159 if (INSN_P (insn)
1160 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1161 && INTVAL (XEXP (note, 0)) > 0
1162 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1163 && fixup->type == ERT_FIXUP)
1164 {
1165 if (fixup->u.fixup.real_region)
2b1e2382 1166 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
45053eaf
RH
1167 else
1168 remove_note (insn, note);
1169 }
1170
1171 /* Remove the fixup regions from the tree. */
52a11cbf
RH
1172 for (i = cfun->eh->last_region_number; i > 0; --i)
1173 {
45053eaf 1174 fixup = cfun->eh->region_array[i];
52a11cbf
RH
1175 if (! fixup)
1176 continue;
27a36778 1177
52a11cbf
RH
1178 /* Allow GC to maybe free some memory. */
1179 if (fixup->type == ERT_CLEANUP)
1180 fixup->u.cleanup.exp = NULL_TREE;
27a36778 1181
52a11cbf
RH
1182 if (fixup->type != ERT_FIXUP)
1183 continue;
27a36778 1184
52a11cbf
RH
1185 if (fixup->inner)
1186 {
1187 struct eh_region *parent, *p, **pp;
27a36778 1188
52a11cbf 1189 parent = fixup->u.fixup.real_region;
27a36778 1190
52a11cbf
RH
1191 /* Fix up the children's parent pointers; find the end of
1192 the list. */
1193 for (p = fixup->inner; ; p = p->next_peer)
1194 {
1195 p->outer = parent;
1196 if (! p->next_peer)
1197 break;
1198 }
27a36778 1199
52a11cbf
RH
1200 /* In the tree of cleanups, only outer-inner ordering matters.
1201 So link the children back in anywhere at the correct level. */
1202 if (parent)
1203 pp = &parent->inner;
1204 else
1205 pp = &cfun->eh->region_tree;
1206 p->next_peer = *pp;
1207 *pp = fixup->inner;
1208 fixup->inner = NULL;
1209 }
27a36778 1210
52a11cbf
RH
1211 remove_eh_handler (fixup);
1212 }
27a36778
MS
1213}
1214
52a11cbf
RH
1215/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1216 can_throw instruction in the region. */
27a36778
MS
1217
1218static void
52a11cbf
RH
1219convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1220 rtx *pinsns;
1221 int *orig_sp;
1222 int cur;
27a36778 1223{
52a11cbf
RH
1224 int *sp = orig_sp;
1225 rtx insn, next;
27a36778 1226
52a11cbf
RH
1227 for (insn = *pinsns; insn ; insn = next)
1228 {
1229 next = NEXT_INSN (insn);
1230 if (GET_CODE (insn) == NOTE)
1231 {
1232 int kind = NOTE_LINE_NUMBER (insn);
1233 if (kind == NOTE_INSN_EH_REGION_BEG
1234 || kind == NOTE_INSN_EH_REGION_END)
1235 {
1236 if (kind == NOTE_INSN_EH_REGION_BEG)
1237 {
1238 struct eh_region *r;
27a36778 1239
52a11cbf
RH
1240 *sp++ = cur;
1241 cur = NOTE_EH_HANDLER (insn);
27a36778 1242
52a11cbf
RH
1243 r = cfun->eh->region_array[cur];
1244 if (r->type == ERT_FIXUP)
1245 {
1246 r = r->u.fixup.real_region;
1247 cur = r ? r->region_number : 0;
1248 }
1249 else if (r->type == ERT_CATCH)
1250 {
1251 r = r->outer;
1252 cur = r ? r->region_number : 0;
1253 }
1254 }
1255 else
1256 cur = *--sp;
1257
1258 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1259 requires extra care to adjust sequence start. */
1260 if (insn == *pinsns)
1261 *pinsns = next;
1262 remove_insn (insn);
1263 continue;
1264 }
1265 }
1266 else if (INSN_P (insn))
1267 {
1268 if (cur > 0
1269 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1270 /* Calls can always potentially throw exceptions, unless
1271 they have a REG_EH_REGION note with a value of 0 or less.
1272 Which should be the only possible kind so far. */
1273 && (GET_CODE (insn) == CALL_INSN
1274 /* If we wanted exceptions for non-call insns, then
1275 any may_trap_p instruction could throw. */
1276 || (flag_non_call_exceptions
d7730f7a
RH
1277 && GET_CODE (PATTERN (insn)) != CLOBBER
1278 && GET_CODE (PATTERN (insn)) != USE
52a11cbf
RH
1279 && may_trap_p (PATTERN (insn)))))
1280 {
1281 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1282 REG_NOTES (insn));
1283 }
27a36778 1284
52a11cbf
RH
1285 if (GET_CODE (insn) == CALL_INSN
1286 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1287 {
1288 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1289 sp, cur);
1290 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1291 sp, cur);
1292 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1293 sp, cur);
1294 }
1295 }
1296 }
27a36778 1297
52a11cbf
RH
1298 if (sp != orig_sp)
1299 abort ();
1300}
27a36778 1301
52a11cbf
RH
1302void
1303convert_from_eh_region_ranges ()
1304{
1305 int *stack;
1306 rtx insns;
27a36778 1307
52a11cbf
RH
1308 collect_eh_region_array ();
1309 resolve_fixup_regions ();
27a36778 1310
52a11cbf
RH
1311 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1312 insns = get_insns ();
1313 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1314 free (stack);
27a36778 1315
52a11cbf 1316 remove_fixup_regions ();
27a36778
MS
1317}
1318
52a11cbf
RH
1319void
1320find_exception_handler_labels ()
27a36778 1321{
52a11cbf
RH
1322 rtx list = NULL_RTX;
1323 int i;
27a36778 1324
52a11cbf 1325 free_EXPR_LIST_list (&exception_handler_labels);
27a36778 1326
52a11cbf
RH
1327 if (cfun->eh->region_tree == NULL)
1328 return;
27a36778 1329
52a11cbf
RH
1330 for (i = cfun->eh->last_region_number; i > 0; --i)
1331 {
1332 struct eh_region *region = cfun->eh->region_array[i];
1333 rtx lab;
27a36778 1334
52a11cbf
RH
1335 if (! region)
1336 continue;
1337 if (cfun->eh->built_landing_pads)
1338 lab = region->landing_pad;
1339 else
1340 lab = region->label;
27a36778 1341
52a11cbf
RH
1342 if (lab)
1343 list = alloc_EXPR_LIST (0, lab, list);
27a36778
MS
1344 }
1345
52a11cbf
RH
1346 /* For sjlj exceptions, need the return label to remain live until
1347 after landing pad generation. */
1348 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1349 list = alloc_EXPR_LIST (0, return_label, list);
27a36778 1350
52a11cbf 1351 exception_handler_labels = list;
27a36778
MS
1352}
1353
52a11cbf
RH
1354\f
1355static struct eh_region *
1356duplicate_eh_region_1 (o, map)
1357 struct eh_region *o;
1358 struct inline_remap *map;
4956d07c 1359{
52a11cbf
RH
1360 struct eh_region *n
1361 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
4956d07c 1362
52a11cbf
RH
1363 n->region_number = o->region_number + cfun->eh->last_region_number;
1364 n->type = o->type;
4956d07c 1365
52a11cbf
RH
1366 switch (n->type)
1367 {
1368 case ERT_CLEANUP:
1369 case ERT_MUST_NOT_THROW:
1370 break;
27a36778 1371
52a11cbf
RH
1372 case ERT_TRY:
1373 if (o->u.try.continue_label)
1374 n->u.try.continue_label
1375 = get_label_from_map (map,
1376 CODE_LABEL_NUMBER (o->u.try.continue_label));
1377 break;
27a36778 1378
52a11cbf 1379 case ERT_CATCH:
6d41a92f 1380 n->u.catch.type_list = o->u.catch.type_list;
52a11cbf 1381 break;
27a36778 1382
52a11cbf
RH
1383 case ERT_ALLOWED_EXCEPTIONS:
1384 n->u.allowed.type_list = o->u.allowed.type_list;
1385 break;
1386
1387 case ERT_THROW:
1388 n->u.throw.type = o->u.throw.type;
3f2c5d1a 1389
52a11cbf
RH
1390 default:
1391 abort ();
1392 }
1393
1394 if (o->label)
1395 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
47c84870 1396 if (o->resume)
e7b9b18e 1397 {
47c84870
JM
1398 n->resume = map->insn_map[INSN_UID (o->resume)];
1399 if (n->resume == NULL)
52a11cbf 1400 abort ();
27a36778 1401 }
4956d07c 1402
52a11cbf 1403 return n;
4956d07c
MS
1404}
1405
52a11cbf
RH
1406static void
1407duplicate_eh_region_2 (o, n_array)
1408 struct eh_region *o;
1409 struct eh_region **n_array;
4c581243 1410{
52a11cbf 1411 struct eh_region *n = n_array[o->region_number];
4c581243 1412
52a11cbf
RH
1413 switch (n->type)
1414 {
1415 case ERT_TRY:
1416 n->u.try.catch = n_array[o->u.try.catch->region_number];
1417 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1418 break;
12670d88 1419
52a11cbf
RH
1420 case ERT_CATCH:
1421 if (o->u.catch.next_catch)
1422 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1423 if (o->u.catch.prev_catch)
1424 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1425 break;
12670d88 1426
52a11cbf
RH
1427 default:
1428 break;
1429 }
4956d07c 1430
52a11cbf
RH
1431 if (o->outer)
1432 n->outer = n_array[o->outer->region_number];
1433 if (o->inner)
1434 n->inner = n_array[o->inner->region_number];
1435 if (o->next_peer)
1436 n->next_peer = n_array[o->next_peer->region_number];
3f2c5d1a 1437}
52a11cbf
RH
1438
1439int
1440duplicate_eh_regions (ifun, map)
1441 struct function *ifun;
1442 struct inline_remap *map;
4956d07c 1443{
52a11cbf
RH
1444 int ifun_last_region_number = ifun->eh->last_region_number;
1445 struct eh_region **n_array, *root, *cur;
1446 int i;
4956d07c 1447
52a11cbf
RH
1448 if (ifun_last_region_number == 0)
1449 return 0;
4956d07c 1450
52a11cbf 1451 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
4956d07c 1452
52a11cbf 1453 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1454 {
52a11cbf
RH
1455 cur = ifun->eh->region_array[i];
1456 if (!cur || cur->region_number != i)
1457 continue;
1458 n_array[i] = duplicate_eh_region_1 (cur, map);
27a36778 1459 }
52a11cbf 1460 for (i = 1; i <= ifun_last_region_number; ++i)
27a36778 1461 {
52a11cbf
RH
1462 cur = ifun->eh->region_array[i];
1463 if (!cur || cur->region_number != i)
1464 continue;
1465 duplicate_eh_region_2 (cur, n_array);
1466 }
27a36778 1467
52a11cbf
RH
1468 root = n_array[ifun->eh->region_tree->region_number];
1469 cur = cfun->eh->cur_region;
1470 if (cur)
1471 {
1472 struct eh_region *p = cur->inner;
1473 if (p)
1474 {
1475 while (p->next_peer)
1476 p = p->next_peer;
1477 p->next_peer = root;
1478 }
1479 else
1480 cur->inner = root;
27a36778 1481
52a11cbf 1482 for (i = 1; i <= ifun_last_region_number; ++i)
b24a9e88 1483 if (n_array[i] && n_array[i]->outer == NULL)
52a11cbf
RH
1484 n_array[i]->outer = cur;
1485 }
1486 else
1487 {
1488 struct eh_region *p = cfun->eh->region_tree;
1489 if (p)
1490 {
1491 while (p->next_peer)
1492 p = p->next_peer;
1493 p->next_peer = root;
1494 }
1495 else
1496 cfun->eh->region_tree = root;
27a36778 1497 }
1e4ceb6f 1498
52a11cbf 1499 free (n_array);
1e4ceb6f 1500
52a11cbf
RH
1501 i = cfun->eh->last_region_number;
1502 cfun->eh->last_region_number = i + ifun_last_region_number;
1503 return i;
4956d07c
MS
1504}
1505
52a11cbf 1506\f
52a11cbf
RH
1507static int
1508t2r_eq (pentry, pdata)
1509 const PTR pentry;
1510 const PTR pdata;
9762d48d 1511{
52a11cbf
RH
1512 tree entry = (tree) pentry;
1513 tree data = (tree) pdata;
9762d48d 1514
52a11cbf 1515 return TREE_PURPOSE (entry) == data;
9762d48d
JM
1516}
1517
52a11cbf
RH
1518static hashval_t
1519t2r_hash (pentry)
1520 const PTR pentry;
1521{
1522 tree entry = (tree) pentry;
1523 return TYPE_HASH (TREE_PURPOSE (entry));
1524}
9762d48d 1525
52a11cbf
RH
1526static int
1527t2r_mark_1 (slot, data)
1528 PTR *slot;
1529 PTR data ATTRIBUTE_UNUSED;
9762d48d 1530{
52a11cbf
RH
1531 tree contents = (tree) *slot;
1532 ggc_mark_tree (contents);
1533 return 1;
1534}
9762d48d 1535
52a11cbf
RH
1536static void
1537t2r_mark (addr)
1538 PTR addr;
1539{
1540 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1541}
9762d48d 1542
52a11cbf
RH
1543static void
1544add_type_for_runtime (type)
1545 tree type;
1546{
1547 tree *slot;
9762d48d 1548
52a11cbf
RH
1549 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1550 TYPE_HASH (type), INSERT);
1551 if (*slot == NULL)
1552 {
1553 tree runtime = (*lang_eh_runtime_type) (type);
1554 *slot = tree_cons (type, runtime, NULL_TREE);
1555 }
1556}
3f2c5d1a 1557
52a11cbf
RH
1558static tree
1559lookup_type_for_runtime (type)
1560 tree type;
1561{
1562 tree *slot;
b37f006b 1563
52a11cbf
RH
1564 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1565 TYPE_HASH (type), NO_INSERT);
b37f006b 1566
a1f300c0 1567 /* We should have always inserted the data earlier. */
52a11cbf
RH
1568 return TREE_VALUE (*slot);
1569}
9762d48d 1570
52a11cbf
RH
1571\f
1572/* Represent an entry in @TTypes for either catch actions
1573 or exception filter actions. */
1574struct ttypes_filter
1575{
1576 tree t;
1577 int filter;
1578};
b37f006b 1579
52a11cbf
RH
1580/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1581 (a tree) for a @TTypes type node we are thinking about adding. */
b37f006b 1582
52a11cbf
RH
1583static int
1584ttypes_filter_eq (pentry, pdata)
1585 const PTR pentry;
1586 const PTR pdata;
1587{
1588 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1589 tree data = (tree) pdata;
b37f006b 1590
52a11cbf 1591 return entry->t == data;
9762d48d
JM
1592}
1593
52a11cbf
RH
1594static hashval_t
1595ttypes_filter_hash (pentry)
1596 const PTR pentry;
1597{
1598 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1599 return TYPE_HASH (entry->t);
1600}
4956d07c 1601
52a11cbf
RH
1602/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1603 exception specification list we are thinking about adding. */
1604/* ??? Currently we use the type lists in the order given. Someone
1605 should put these in some canonical order. */
1606
1607static int
1608ehspec_filter_eq (pentry, pdata)
1609 const PTR pentry;
1610 const PTR pdata;
4956d07c 1611{
52a11cbf
RH
1612 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1613 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1614
1615 return type_list_equal (entry->t, data->t);
4956d07c
MS
1616}
1617
52a11cbf 1618/* Hash function for exception specification lists. */
4956d07c 1619
52a11cbf
RH
1620static hashval_t
1621ehspec_filter_hash (pentry)
1622 const PTR pentry;
4956d07c 1623{
52a11cbf
RH
1624 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1625 hashval_t h = 0;
1626 tree list;
1627
1628 for (list = entry->t; list ; list = TREE_CHAIN (list))
1629 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1630 return h;
4956d07c
MS
1631}
1632
52a11cbf
RH
1633/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1634 up the search. Return the filter value to be used. */
4956d07c 1635
52a11cbf
RH
1636static int
1637add_ttypes_entry (ttypes_hash, type)
1638 htab_t ttypes_hash;
1639 tree type;
4956d07c 1640{
52a11cbf 1641 struct ttypes_filter **slot, *n;
4956d07c 1642
52a11cbf
RH
1643 slot = (struct ttypes_filter **)
1644 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1645
1646 if ((n = *slot) == NULL)
4956d07c 1647 {
52a11cbf 1648 /* Filter value is a 1 based table index. */
12670d88 1649
52a11cbf
RH
1650 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1651 n->t = type;
1652 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1653 *slot = n;
1654
1655 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
4956d07c 1656 }
52a11cbf
RH
1657
1658 return n->filter;
4956d07c
MS
1659}
1660
52a11cbf
RH
1661/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1662 to speed up the search. Return the filter value to be used. */
1663
1664static int
1665add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1666 htab_t ehspec_hash;
1667 htab_t ttypes_hash;
1668 tree list;
12670d88 1669{
52a11cbf
RH
1670 struct ttypes_filter **slot, *n;
1671 struct ttypes_filter dummy;
12670d88 1672
52a11cbf
RH
1673 dummy.t = list;
1674 slot = (struct ttypes_filter **)
1675 htab_find_slot (ehspec_hash, &dummy, INSERT);
1676
1677 if ((n = *slot) == NULL)
1678 {
1679 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1680
1681 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1682 n->t = list;
1683 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1684 *slot = n;
1685
1686 /* Look up each type in the list and encode its filter
1687 value as a uleb128. Terminate the list with 0. */
1688 for (; list ; list = TREE_CHAIN (list))
3f2c5d1a 1689 push_uleb128 (&cfun->eh->ehspec_data,
52a11cbf
RH
1690 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1691 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1692 }
1693
1694 return n->filter;
12670d88
RK
1695}
1696
52a11cbf
RH
1697/* Generate the action filter values to be used for CATCH and
1698 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1699 we use lots of landing pads, and so every type or list can share
1700 the same filter value, which saves table space. */
1701
1702static void
1703assign_filter_values ()
9a0d1e1b 1704{
52a11cbf
RH
1705 int i;
1706 htab_t ttypes, ehspec;
9a9deafc 1707
52a11cbf
RH
1708 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1709 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
9a9deafc 1710
52a11cbf
RH
1711 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1712 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
9a0d1e1b 1713
52a11cbf
RH
1714 for (i = cfun->eh->last_region_number; i > 0; --i)
1715 {
1716 struct eh_region *r = cfun->eh->region_array[i];
9a0d1e1b 1717
52a11cbf
RH
1718 /* Mind we don't process a region more than once. */
1719 if (!r || r->region_number != i)
1720 continue;
9a0d1e1b 1721
52a11cbf
RH
1722 switch (r->type)
1723 {
1724 case ERT_CATCH:
6d41a92f
OH
1725 /* Whatever type_list is (NULL or true list), we build a list
1726 of filters for the region. */
1727 r->u.catch.filter_list = NULL_TREE;
1728
1729 if (r->u.catch.type_list != NULL)
1730 {
1731 /* Get a filter value for each of the types caught and store
1732 them in the region's dedicated list. */
1733 tree tp_node = r->u.catch.type_list;
1734
1735 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1736 {
1737 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1738 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1739
1740 r->u.catch.filter_list
6d41a92f
OH
1741 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1742 }
1743 }
1744 else
1745 {
1746 /* Get a filter value for the NULL list also since it will need
1747 an action record anyway. */
1748 int flt = add_ttypes_entry (ttypes, NULL);
1749 tree flt_node = build_int_2 (flt, 0);
3f2c5d1a
RS
1750
1751 r->u.catch.filter_list
6d41a92f
OH
1752 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1753 }
3f2c5d1a 1754
52a11cbf 1755 break;
bf71cd2e 1756
52a11cbf
RH
1757 case ERT_ALLOWED_EXCEPTIONS:
1758 r->u.allowed.filter
1759 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1760 break;
bf71cd2e 1761
52a11cbf
RH
1762 default:
1763 break;
1764 }
1765 }
1766
1767 htab_delete (ttypes);
1768 htab_delete (ehspec);
1769}
1770
1771static void
1772build_post_landing_pads ()
1773{
1774 int i;
bf71cd2e 1775
52a11cbf 1776 for (i = cfun->eh->last_region_number; i > 0; --i)
bf71cd2e 1777 {
52a11cbf
RH
1778 struct eh_region *region = cfun->eh->region_array[i];
1779 rtx seq;
bf71cd2e 1780
52a11cbf
RH
1781 /* Mind we don't process a region more than once. */
1782 if (!region || region->region_number != i)
1783 continue;
1784
1785 switch (region->type)
987009bf 1786 {
52a11cbf
RH
1787 case ERT_TRY:
1788 /* ??? Collect the set of all non-overlapping catch handlers
1789 all the way up the chain until blocked by a cleanup. */
1790 /* ??? Outer try regions can share landing pads with inner
1791 try regions if the types are completely non-overlapping,
a1f300c0 1792 and there are no intervening cleanups. */
bf71cd2e 1793
52a11cbf 1794 region->post_landing_pad = gen_label_rtx ();
bf71cd2e 1795
52a11cbf 1796 start_sequence ();
bf71cd2e 1797
52a11cbf 1798 emit_label (region->post_landing_pad);
bf71cd2e 1799
52a11cbf
RH
1800 /* ??? It is mighty inconvenient to call back into the
1801 switch statement generation code in expand_end_case.
1802 Rapid prototyping sez a sequence of ifs. */
1803 {
1804 struct eh_region *c;
1805 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1806 {
1807 /* ??? _Unwind_ForcedUnwind wants no match here. */
6d41a92f 1808 if (c->u.catch.type_list == NULL)
52a11cbf
RH
1809 emit_jump (c->label);
1810 else
6d41a92f
OH
1811 {
1812 /* Need for one cmp/jump per type caught. Each type
1813 list entry has a matching entry in the filter list
1814 (see assign_filter_values). */
1815 tree tp_node = c->u.catch.type_list;
1816 tree flt_node = c->u.catch.filter_list;
1817
1818 for (; tp_node; )
1819 {
1820 emit_cmp_and_jump_insns
1821 (cfun->eh->filter,
1822 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1823 EQ, NULL_RTX, word_mode, 0, c->label);
1824
1825 tp_node = TREE_CHAIN (tp_node);
1826 flt_node = TREE_CHAIN (flt_node);
1827 }
1828 }
52a11cbf
RH
1829 }
1830 }
bf71cd2e 1831
47c84870
JM
1832 /* We delay the generation of the _Unwind_Resume until we generate
1833 landing pads. We emit a marker here so as to get good control
1834 flow data in the meantime. */
1835 region->resume
1836 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1837 emit_barrier ();
1838
52a11cbf
RH
1839 seq = get_insns ();
1840 end_sequence ();
e6cfb550 1841
47c84870 1842 emit_insns_before (seq, region->u.try.catch->label);
52a11cbf 1843 break;
bf71cd2e 1844
52a11cbf
RH
1845 case ERT_ALLOWED_EXCEPTIONS:
1846 region->post_landing_pad = gen_label_rtx ();
9a0d1e1b 1847
52a11cbf 1848 start_sequence ();
f54a7f6f 1849
52a11cbf 1850 emit_label (region->post_landing_pad);
f54a7f6f 1851
52a11cbf
RH
1852 emit_cmp_and_jump_insns (cfun->eh->filter,
1853 GEN_INT (region->u.allowed.filter),
a06ef755 1854 EQ, NULL_RTX, word_mode, 0, region->label);
f54a7f6f 1855
47c84870
JM
1856 /* We delay the generation of the _Unwind_Resume until we generate
1857 landing pads. We emit a marker here so as to get good control
1858 flow data in the meantime. */
1859 region->resume
1860 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1861 emit_barrier ();
1862
52a11cbf
RH
1863 seq = get_insns ();
1864 end_sequence ();
1865
47c84870 1866 emit_insns_before (seq, region->label);
52a11cbf 1867 break;
f54a7f6f 1868
52a11cbf
RH
1869 case ERT_CLEANUP:
1870 case ERT_MUST_NOT_THROW:
1871 region->post_landing_pad = region->label;
1872 break;
1873
1874 case ERT_CATCH:
1875 case ERT_THROW:
1876 /* Nothing to do. */
1877 break;
1878
1879 default:
1880 abort ();
1881 }
1882 }
1883}
1e4ceb6f 1884
47c84870
JM
1885/* Replace RESX patterns with jumps to the next handler if any, or calls to
1886 _Unwind_Resume otherwise. */
1887
1e4ceb6f 1888static void
52a11cbf 1889connect_post_landing_pads ()
1e4ceb6f 1890{
52a11cbf 1891 int i;
76fc91c7 1892
52a11cbf
RH
1893 for (i = cfun->eh->last_region_number; i > 0; --i)
1894 {
1895 struct eh_region *region = cfun->eh->region_array[i];
1896 struct eh_region *outer;
47c84870 1897 rtx seq;
1e4ceb6f 1898
52a11cbf
RH
1899 /* Mind we don't process a region more than once. */
1900 if (!region || region->region_number != i)
1901 continue;
1e4ceb6f 1902
47c84870
JM
1903 /* If there is no RESX, or it has been deleted by flow, there's
1904 nothing to fix up. */
1905 if (! region->resume || INSN_DELETED_P (region->resume))
52a11cbf 1906 continue;
76fc91c7 1907
52a11cbf
RH
1908 /* Search for another landing pad in this function. */
1909 for (outer = region->outer; outer ; outer = outer->outer)
1910 if (outer->post_landing_pad)
1911 break;
1e4ceb6f 1912
52a11cbf 1913 start_sequence ();
12670d88 1914
52a11cbf
RH
1915 if (outer)
1916 emit_jump (outer->post_landing_pad);
1917 else
9555a122 1918 emit_library_call (unwind_resume_libfunc, LCT_THROW,
52a11cbf 1919 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
4956d07c 1920
52a11cbf
RH
1921 seq = get_insns ();
1922 end_sequence ();
47c84870 1923 emit_insns_before (seq, region->resume);
53c17031 1924 delete_insn (region->resume);
52a11cbf
RH
1925 }
1926}
1927
1928\f
1929static void
1930dw2_build_landing_pads ()
4956d07c 1931{
ae0ed63a
JM
1932 int i;
1933 unsigned int j;
4956d07c 1934
52a11cbf
RH
1935 for (i = cfun->eh->last_region_number; i > 0; --i)
1936 {
1937 struct eh_region *region = cfun->eh->region_array[i];
1938 rtx seq;
5c701bb1 1939 bool clobbers_hard_regs = false;
4956d07c 1940
52a11cbf
RH
1941 /* Mind we don't process a region more than once. */
1942 if (!region || region->region_number != i)
1943 continue;
1418bb67 1944
52a11cbf
RH
1945 if (region->type != ERT_CLEANUP
1946 && region->type != ERT_TRY
1947 && region->type != ERT_ALLOWED_EXCEPTIONS)
1948 continue;
12670d88 1949
52a11cbf 1950 start_sequence ();
4956d07c 1951
52a11cbf
RH
1952 region->landing_pad = gen_label_rtx ();
1953 emit_label (region->landing_pad);
4956d07c 1954
52a11cbf
RH
1955#ifdef HAVE_exception_receiver
1956 if (HAVE_exception_receiver)
1957 emit_insn (gen_exception_receiver ());
1958 else
1959#endif
1960#ifdef HAVE_nonlocal_goto_receiver
1961 if (HAVE_nonlocal_goto_receiver)
1962 emit_insn (gen_nonlocal_goto_receiver ());
1963 else
1964#endif
1965 { /* Nothing */ }
4956d07c 1966
52a11cbf
RH
1967 /* If the eh_return data registers are call-saved, then we
1968 won't have considered them clobbered from the call that
1969 threw. Kill them now. */
1970 for (j = 0; ; ++j)
1971 {
1972 unsigned r = EH_RETURN_DATA_REGNO (j);
1973 if (r == INVALID_REGNUM)
1974 break;
1975 if (! call_used_regs[r])
5c701bb1
JS
1976 {
1977 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1978 clobbers_hard_regs = true;
1979 }
1980 }
1981
1982 if (clobbers_hard_regs)
1983 {
1984 /* @@@ This is a kludge. Not all machine descriptions define a
1985 blockage insn, but we must not allow the code we just generated
1986 to be reordered by scheduling. So emit an ASM_INPUT to act as
2ba84f36 1987 blockage insn. */
5c701bb1 1988 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
52a11cbf 1989 }
e701eb4d 1990
52a11cbf
RH
1991 emit_move_insn (cfun->eh->exc_ptr,
1992 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1993 emit_move_insn (cfun->eh->filter,
9e800206 1994 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
9a0d1e1b 1995
52a11cbf
RH
1996 seq = get_insns ();
1997 end_sequence ();
5816cb14 1998
52a11cbf
RH
1999 emit_insns_before (seq, region->post_landing_pad);
2000 }
4956d07c
MS
2001}
2002
52a11cbf
RH
2003\f
2004struct sjlj_lp_info
2005{
2006 int directly_reachable;
2007 int action_index;
2008 int dispatch_index;
2009 int call_site_index;
2010};
4956d07c 2011
52a11cbf
RH
2012static bool
2013sjlj_find_directly_reachable_regions (lp_info)
2014 struct sjlj_lp_info *lp_info;
4956d07c 2015{
52a11cbf
RH
2016 rtx insn;
2017 bool found_one = false;
4956d07c 2018
52a11cbf
RH
2019 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2020 {
2021 struct eh_region *region;
98ce21b3 2022 enum reachable_code rc;
52a11cbf
RH
2023 tree type_thrown;
2024 rtx note;
4956d07c 2025
52a11cbf
RH
2026 if (! INSN_P (insn))
2027 continue;
0d3453df 2028
52a11cbf
RH
2029 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2030 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2031 continue;
5dfa7520 2032
52a11cbf 2033 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
5dfa7520 2034
52a11cbf
RH
2035 type_thrown = NULL_TREE;
2036 if (region->type == ERT_THROW)
2037 {
2038 type_thrown = region->u.throw.type;
2039 region = region->outer;
2040 }
12670d88 2041
52a11cbf
RH
2042 /* Find the first containing region that might handle the exception.
2043 That's the landing pad to which we will transfer control. */
98ce21b3 2044 rc = RNL_NOT_CAUGHT;
52a11cbf 2045 for (; region; region = region->outer)
98ce21b3
RH
2046 {
2047 rc = reachable_next_level (region, type_thrown, 0);
2048 if (rc != RNL_NOT_CAUGHT)
2049 break;
2050 }
2051 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
52a11cbf
RH
2052 {
2053 lp_info[region->region_number].directly_reachable = 1;
2054 found_one = true;
2055 }
2056 }
4956d07c 2057
52a11cbf
RH
2058 return found_one;
2059}
e701eb4d
JM
2060
2061static void
52a11cbf
RH
2062sjlj_assign_call_site_values (dispatch_label, lp_info)
2063 rtx dispatch_label;
2064 struct sjlj_lp_info *lp_info;
e701eb4d 2065{
52a11cbf
RH
2066 htab_t ar_hash;
2067 int i, index;
2068
2069 /* First task: build the action table. */
2070
2071 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2072 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2073
2074 for (i = cfun->eh->last_region_number; i > 0; --i)
2075 if (lp_info[i].directly_reachable)
e6cfb550 2076 {
52a11cbf
RH
2077 struct eh_region *r = cfun->eh->region_array[i];
2078 r->landing_pad = dispatch_label;
2079 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2080 if (lp_info[i].action_index != -1)
2081 cfun->uses_eh_lsda = 1;
e6cfb550 2082 }
e701eb4d 2083
52a11cbf 2084 htab_delete (ar_hash);
76fc91c7 2085
52a11cbf
RH
2086 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2087 landing pad label for the region. For sjlj though, there is one
2088 common landing pad from which we dispatch to the post-landing pads.
76fc91c7 2089
52a11cbf
RH
2090 A region receives a dispatch index if it is directly reachable
2091 and requires in-function processing. Regions that share post-landing
eaec9b3d 2092 pads may share dispatch indices. */
52a11cbf
RH
2093 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2094 (see build_post_landing_pads) so we don't bother checking for it. */
4956d07c 2095
52a11cbf
RH
2096 index = 0;
2097 for (i = cfun->eh->last_region_number; i > 0; --i)
98ce21b3 2098 if (lp_info[i].directly_reachable)
52a11cbf 2099 lp_info[i].dispatch_index = index++;
76fc91c7 2100
52a11cbf
RH
2101 /* Finally: assign call-site values. If dwarf2 terms, this would be
2102 the region number assigned by convert_to_eh_region_ranges, but
2103 handles no-action and must-not-throw differently. */
76fc91c7 2104
52a11cbf
RH
2105 call_site_base = 1;
2106 for (i = cfun->eh->last_region_number; i > 0; --i)
2107 if (lp_info[i].directly_reachable)
2108 {
2109 int action = lp_info[i].action_index;
2110
2111 /* Map must-not-throw to otherwise unused call-site index 0. */
2112 if (action == -2)
2113 index = 0;
2114 /* Map no-action to otherwise unused call-site index -1. */
2115 else if (action == -1)
2116 index = -1;
2117 /* Otherwise, look it up in the table. */
2118 else
2119 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2120
2121 lp_info[i].call_site_index = index;
2122 }
4956d07c 2123}
27a36778 2124
52a11cbf
RH
2125static void
2126sjlj_mark_call_sites (lp_info)
2127 struct sjlj_lp_info *lp_info;
27a36778 2128{
52a11cbf
RH
2129 int last_call_site = -2;
2130 rtx insn, mem;
2131
52a11cbf 2132 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
27a36778 2133 {
52a11cbf
RH
2134 struct eh_region *region;
2135 int this_call_site;
2136 rtx note, before, p;
27a36778 2137
52a11cbf
RH
2138 /* Reset value tracking at extended basic block boundaries. */
2139 if (GET_CODE (insn) == CODE_LABEL)
2140 last_call_site = -2;
27a36778 2141
52a11cbf
RH
2142 if (! INSN_P (insn))
2143 continue;
27a36778 2144
52a11cbf
RH
2145 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2146 if (!note)
2147 {
2148 /* Calls (and trapping insns) without notes are outside any
2149 exception handling region in this function. Mark them as
2150 no action. */
2151 if (GET_CODE (insn) == CALL_INSN
2152 || (flag_non_call_exceptions
2153 && may_trap_p (PATTERN (insn))))
2154 this_call_site = -1;
2155 else
2156 continue;
2157 }
2158 else
2159 {
2160 /* Calls that are known to not throw need not be marked. */
2161 if (INTVAL (XEXP (note, 0)) <= 0)
2162 continue;
27a36778 2163
52a11cbf
RH
2164 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2165 this_call_site = lp_info[region->region_number].call_site_index;
2166 }
27a36778 2167
52a11cbf
RH
2168 if (this_call_site == last_call_site)
2169 continue;
2170
2171 /* Don't separate a call from it's argument loads. */
2172 before = insn;
2173 if (GET_CODE (insn) == CALL_INSN)
833366d6 2174 before = find_first_parameter_load (insn, NULL_RTX);
4956d07c 2175
52a11cbf 2176 start_sequence ();
fd2c57a9
AH
2177 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2178 sjlj_fc_call_site_ofs);
52a11cbf
RH
2179 emit_move_insn (mem, GEN_INT (this_call_site));
2180 p = get_insns ();
2181 end_sequence ();
12670d88 2182
52a11cbf
RH
2183 emit_insns_before (p, before);
2184 last_call_site = this_call_site;
2185 }
2186}
4956d07c 2187
52a11cbf
RH
2188/* Construct the SjLj_Function_Context. */
2189
2190static void
2191sjlj_emit_function_enter (dispatch_label)
2192 rtx dispatch_label;
4956d07c 2193{
52a11cbf 2194 rtx fn_begin, fc, mem, seq;
4956d07c 2195
52a11cbf 2196 fc = cfun->eh->sjlj_fc;
4956d07c 2197
52a11cbf 2198 start_sequence ();
8a4451aa 2199
8979edec
JL
2200 /* We're storing this libcall's address into memory instead of
2201 calling it directly. Thus, we must call assemble_external_libcall
2202 here, as we can not depend on emit_library_call to do it for us. */
2203 assemble_external_libcall (eh_personality_libfunc);
f4ef873c 2204 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
52a11cbf
RH
2205 emit_move_insn (mem, eh_personality_libfunc);
2206
f4ef873c 2207 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
52a11cbf
RH
2208 if (cfun->uses_eh_lsda)
2209 {
2210 char buf[20];
2211 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2212 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
8a4451aa 2213 }
52a11cbf
RH
2214 else
2215 emit_move_insn (mem, const0_rtx);
3f2c5d1a 2216
52a11cbf
RH
2217#ifdef DONT_USE_BUILTIN_SETJMP
2218 {
2219 rtx x, note;
9defc9b7 2220 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
52a11cbf
RH
2221 TYPE_MODE (integer_type_node), 1,
2222 plus_constant (XEXP (fc, 0),
2223 sjlj_fc_jbuf_ofs), Pmode);
2224
2225 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2226 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2227
2228 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
a06ef755 2229 TYPE_MODE (integer_type_node), 0, dispatch_label);
52a11cbf
RH
2230 }
2231#else
2232 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2233 dispatch_label);
4956d07c 2234#endif
4956d07c 2235
52a11cbf
RH
2236 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2237 1, XEXP (fc, 0), Pmode);
12670d88 2238
52a11cbf
RH
2239 seq = get_insns ();
2240 end_sequence ();
4956d07c 2241
52a11cbf
RH
2242 /* ??? Instead of doing this at the beginning of the function,
2243 do this in a block that is at loop level 0 and dominates all
2244 can_throw_internal instructions. */
4956d07c 2245
52a11cbf
RH
2246 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2247 if (GET_CODE (fn_begin) == NOTE
2248 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2249 break;
2250 emit_insns_after (seq, fn_begin);
4956d07c
MS
2251}
2252
52a11cbf
RH
2253/* Call back from expand_function_end to know where we should put
2254 the call to unwind_sjlj_unregister_libfunc if needed. */
12670d88 2255
52a11cbf
RH
2256void
2257sjlj_emit_function_exit_after (after)
2258 rtx after;
2259{
2260 cfun->eh->sjlj_exit_after = after;
2261}
4956d07c
MS
2262
2263static void
52a11cbf
RH
2264sjlj_emit_function_exit ()
2265{
2266 rtx seq;
4956d07c 2267
52a11cbf 2268 start_sequence ();
ce152ef8 2269
52a11cbf
RH
2270 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2271 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
e6cfb550 2272
52a11cbf
RH
2273 seq = get_insns ();
2274 end_sequence ();
4956d07c 2275
52a11cbf
RH
2276 /* ??? Really this can be done in any block at loop level 0 that
2277 post-dominates all can_throw_internal instructions. This is
2278 the last possible moment. */
9a0d1e1b 2279
52a11cbf 2280 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
9a0d1e1b
AM
2281}
2282
52a11cbf
RH
2283static void
2284sjlj_emit_dispatch_table (dispatch_label, lp_info)
2285 rtx dispatch_label;
2286 struct sjlj_lp_info *lp_info;
ce152ef8 2287{
52a11cbf
RH
2288 int i, first_reachable;
2289 rtx mem, dispatch, seq, fc;
2290
2291 fc = cfun->eh->sjlj_fc;
2292
2293 start_sequence ();
2294
2295 emit_label (dispatch_label);
3f2c5d1a 2296
52a11cbf
RH
2297#ifndef DONT_USE_BUILTIN_SETJMP
2298 expand_builtin_setjmp_receiver (dispatch_label);
2299#endif
2300
2301 /* Load up dispatch index, exc_ptr and filter values from the
2302 function context. */
f4ef873c
RK
2303 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2304 sjlj_fc_call_site_ofs);
52a11cbf
RH
2305 dispatch = copy_to_reg (mem);
2306
f4ef873c 2307 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
52a11cbf
RH
2308 if (word_mode != Pmode)
2309 {
2310#ifdef POINTERS_EXTEND_UNSIGNED
2311 mem = convert_memory_address (Pmode, mem);
2312#else
2313 mem = convert_to_mode (Pmode, mem, 0);
2314#endif
2315 }
2316 emit_move_insn (cfun->eh->exc_ptr, mem);
2317
f4ef873c 2318 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
52a11cbf 2319 emit_move_insn (cfun->eh->filter, mem);
4956d07c 2320
52a11cbf
RH
2321 /* Jump to one of the directly reachable regions. */
2322 /* ??? This really ought to be using a switch statement. */
2323
2324 first_reachable = 0;
2325 for (i = cfun->eh->last_region_number; i > 0; --i)
a1622f83 2326 {
98ce21b3 2327 if (! lp_info[i].directly_reachable)
52a11cbf 2328 continue;
a1622f83 2329
52a11cbf
RH
2330 if (! first_reachable)
2331 {
2332 first_reachable = i;
2333 continue;
2334 }
e6cfb550 2335
a06ef755
RK
2336 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2337 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
52a11cbf 2338 cfun->eh->region_array[i]->post_landing_pad);
a1622f83 2339 }
9a0d1e1b 2340
52a11cbf
RH
2341 seq = get_insns ();
2342 end_sequence ();
4956d07c 2343
52a11cbf
RH
2344 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2345 ->post_landing_pad));
ce152ef8
AM
2346}
2347
52a11cbf
RH
2348static void
2349sjlj_build_landing_pads ()
ce152ef8 2350{
52a11cbf 2351 struct sjlj_lp_info *lp_info;
ce152ef8 2352
52a11cbf
RH
2353 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2354 sizeof (struct sjlj_lp_info));
ce152ef8 2355
52a11cbf
RH
2356 if (sjlj_find_directly_reachable_regions (lp_info))
2357 {
2358 rtx dispatch_label = gen_label_rtx ();
ce152ef8 2359
52a11cbf
RH
2360 cfun->eh->sjlj_fc
2361 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2362 int_size_in_bytes (sjlj_fc_type_node),
2363 TYPE_ALIGN (sjlj_fc_type_node));
4956d07c 2364
52a11cbf
RH
2365 sjlj_assign_call_site_values (dispatch_label, lp_info);
2366 sjlj_mark_call_sites (lp_info);
a1622f83 2367
52a11cbf
RH
2368 sjlj_emit_function_enter (dispatch_label);
2369 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2370 sjlj_emit_function_exit ();
2371 }
a1622f83 2372
52a11cbf 2373 free (lp_info);
4956d07c 2374}
ce152ef8 2375
ce152ef8 2376void
52a11cbf 2377finish_eh_generation ()
ce152ef8 2378{
52a11cbf
RH
2379 /* Nothing to do if no regions created. */
2380 if (cfun->eh->region_tree == NULL)
ce152ef8
AM
2381 return;
2382
52a11cbf
RH
2383 /* The object here is to provide find_basic_blocks with detailed
2384 information (via reachable_handlers) on how exception control
2385 flows within the function. In this first pass, we can include
2386 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2387 regions, and hope that it will be useful in deleting unreachable
2388 handlers. Subsequently, we will generate landing pads which will
2389 connect many of the handlers, and then type information will not
2390 be effective. Still, this is a win over previous implementations. */
2391
4793dca1 2392 rebuild_jump_labels (get_insns ());
52a11cbf 2393 find_basic_blocks (get_insns (), max_reg_num (), 0);
0068fd96 2394 cleanup_cfg (CLEANUP_PRE_LOOP);
52a11cbf
RH
2395
2396 /* These registers are used by the landing pads. Make sure they
2397 have been generated. */
86c99549
RH
2398 get_exception_pointer (cfun);
2399 get_exception_filter (cfun);
52a11cbf
RH
2400
2401 /* Construct the landing pads. */
2402
2403 assign_filter_values ();
2404 build_post_landing_pads ();
2405 connect_post_landing_pads ();
2406 if (USING_SJLJ_EXCEPTIONS)
2407 sjlj_build_landing_pads ();
2408 else
2409 dw2_build_landing_pads ();
ce152ef8 2410
52a11cbf 2411 cfun->eh->built_landing_pads = 1;
ce152ef8 2412
52a11cbf
RH
2413 /* We've totally changed the CFG. Start over. */
2414 find_exception_handler_labels ();
4793dca1 2415 rebuild_jump_labels (get_insns ());
52a11cbf 2416 find_basic_blocks (get_insns (), max_reg_num (), 0);
0068fd96 2417 cleanup_cfg (CLEANUP_PRE_LOOP);
ce152ef8 2418}
4956d07c 2419\f
52a11cbf 2420/* This section handles removing dead code for flow. */
154bba13 2421
52a11cbf 2422/* Remove LABEL from the exception_handler_labels list. */
154bba13 2423
52a11cbf
RH
2424static void
2425remove_exception_handler_label (label)
2426 rtx label;
154bba13 2427{
52a11cbf 2428 rtx *pl, l;
100d81d4 2429
52a11cbf
RH
2430 for (pl = &exception_handler_labels, l = *pl;
2431 XEXP (l, 0) != label;
2432 pl = &XEXP (l, 1), l = *pl)
2433 continue;
154bba13 2434
52a11cbf
RH
2435 *pl = XEXP (l, 1);
2436 free_EXPR_LIST_node (l);
154bba13
TT
2437}
2438
52a11cbf 2439/* Splice REGION from the region tree etc. */
12670d88 2440
f19c9228 2441static void
52a11cbf
RH
2442remove_eh_handler (region)
2443 struct eh_region *region;
4956d07c 2444{
52a11cbf
RH
2445 struct eh_region **pp, *p;
2446 rtx lab;
2447 int i;
4956d07c 2448
52a11cbf
RH
2449 /* For the benefit of efficiently handling REG_EH_REGION notes,
2450 replace this region in the region array with its containing
2451 region. Note that previous region deletions may result in
2452 multiple copies of this region in the array, so we have to
2453 search the whole thing. */
2454 for (i = cfun->eh->last_region_number; i > 0; --i)
2455 if (cfun->eh->region_array[i] == region)
2456 cfun->eh->region_array[i] = region->outer;
2457
2458 if (cfun->eh->built_landing_pads)
2459 lab = region->landing_pad;
2460 else
2461 lab = region->label;
2462 if (lab)
2463 remove_exception_handler_label (lab);
2464
2465 if (region->outer)
2466 pp = &region->outer->inner;
2467 else
2468 pp = &cfun->eh->region_tree;
2469 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2470 continue;
12670d88 2471
52a11cbf 2472 if (region->inner)
4956d07c 2473 {
52a11cbf
RH
2474 for (p = region->inner; p->next_peer ; p = p->next_peer)
2475 p->outer = region->outer;
2476 p->next_peer = region->next_peer;
2477 p->outer = region->outer;
2478 *pp = region->inner;
4956d07c 2479 }
52a11cbf
RH
2480 else
2481 *pp = region->next_peer;
f19c9228 2482
52a11cbf
RH
2483 if (region->type == ERT_CATCH)
2484 {
2485 struct eh_region *try, *next, *prev;
f19c9228 2486
52a11cbf
RH
2487 for (try = region->next_peer;
2488 try->type == ERT_CATCH;
2489 try = try->next_peer)
2490 continue;
2491 if (try->type != ERT_TRY)
2492 abort ();
f19c9228 2493
52a11cbf
RH
2494 next = region->u.catch.next_catch;
2495 prev = region->u.catch.prev_catch;
f19c9228 2496
52a11cbf
RH
2497 if (next)
2498 next->u.catch.prev_catch = prev;
2499 else
2500 try->u.try.last_catch = prev;
2501 if (prev)
2502 prev->u.catch.next_catch = next;
2503 else
2504 {
2505 try->u.try.catch = next;
2506 if (! next)
2507 remove_eh_handler (try);
2508 }
2509 }
988cea7d 2510
52a11cbf 2511 free (region);
4956d07c
MS
2512}
2513
52a11cbf
RH
2514/* LABEL heads a basic block that is about to be deleted. If this
2515 label corresponds to an exception region, we may be able to
2516 delete the region. */
4956d07c
MS
2517
2518void
52a11cbf
RH
2519maybe_remove_eh_handler (label)
2520 rtx label;
4956d07c 2521{
52a11cbf 2522 int i;
4956d07c 2523
52a11cbf
RH
2524 /* ??? After generating landing pads, it's not so simple to determine
2525 if the region data is completely unused. One must examine the
2526 landing pad and the post landing pad, and whether an inner try block
2527 is referencing the catch handlers directly. */
2528 if (cfun->eh->built_landing_pads)
4956d07c
MS
2529 return;
2530
52a11cbf 2531 for (i = cfun->eh->last_region_number; i > 0; --i)
87ff9c8e 2532 {
52a11cbf
RH
2533 struct eh_region *region = cfun->eh->region_array[i];
2534 if (region && region->label == label)
87ff9c8e 2535 {
52a11cbf
RH
2536 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2537 because there is no path to the fallback call to terminate.
2538 But the region continues to affect call-site data until there
2539 are no more contained calls, which we don't see here. */
2540 if (region->type == ERT_MUST_NOT_THROW)
2541 {
2542 remove_exception_handler_label (region->label);
2543 region->label = NULL_RTX;
2544 }
2545 else
2546 remove_eh_handler (region);
2547 break;
87ff9c8e 2548 }
87ff9c8e
RH
2549 }
2550}
2551
52a11cbf
RH
2552\f
2553/* This section describes CFG exception edges for flow. */
87ff9c8e 2554
52a11cbf
RH
2555/* For communicating between calls to reachable_next_level. */
2556struct reachable_info
87ff9c8e 2557{
52a11cbf
RH
2558 tree types_caught;
2559 tree types_allowed;
2560 rtx handlers;
2561};
87ff9c8e 2562
52a11cbf
RH
2563/* A subroutine of reachable_next_level. Return true if TYPE, or a
2564 base class of TYPE, is in HANDLED. */
87ff9c8e 2565
52a11cbf
RH
2566static int
2567check_handled (handled, type)
2568 tree handled, type;
87ff9c8e 2569{
52a11cbf
RH
2570 tree t;
2571
2572 /* We can check for exact matches without front-end help. */
2573 if (! lang_eh_type_covers)
f54a7f6f 2574 {
52a11cbf
RH
2575 for (t = handled; t ; t = TREE_CHAIN (t))
2576 if (TREE_VALUE (t) == type)
2577 return 1;
2578 }
2579 else
2580 {
2581 for (t = handled; t ; t = TREE_CHAIN (t))
2582 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2583 return 1;
f54a7f6f 2584 }
52a11cbf
RH
2585
2586 return 0;
87ff9c8e
RH
2587}
2588
52a11cbf
RH
2589/* A subroutine of reachable_next_level. If we are collecting a list
2590 of handlers, add one. After landing pad generation, reference
2591 it instead of the handlers themselves. Further, the handlers are
3f2c5d1a 2592 all wired together, so by referencing one, we've got them all.
52a11cbf
RH
2593 Before landing pad generation we reference each handler individually.
2594
2595 LP_REGION contains the landing pad; REGION is the handler. */
87ff9c8e
RH
2596
2597static void
52a11cbf
RH
2598add_reachable_handler (info, lp_region, region)
2599 struct reachable_info *info;
2600 struct eh_region *lp_region;
2601 struct eh_region *region;
87ff9c8e 2602{
52a11cbf
RH
2603 if (! info)
2604 return;
2605
2606 if (cfun->eh->built_landing_pads)
87ff9c8e 2607 {
52a11cbf
RH
2608 if (! info->handlers)
2609 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
87ff9c8e 2610 }
52a11cbf
RH
2611 else
2612 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
87ff9c8e
RH
2613}
2614
3f2c5d1a 2615/* Process one level of exception regions for reachability.
52a11cbf
RH
2616 If TYPE_THROWN is non-null, then it is the *exact* type being
2617 propagated. If INFO is non-null, then collect handler labels
2618 and caught/allowed type information between invocations. */
87ff9c8e 2619
52a11cbf
RH
2620static enum reachable_code
2621reachable_next_level (region, type_thrown, info)
2622 struct eh_region *region;
2623 tree type_thrown;
2624 struct reachable_info *info;
87ff9c8e 2625{
52a11cbf
RH
2626 switch (region->type)
2627 {
2628 case ERT_CLEANUP:
2629 /* Before landing-pad generation, we model control flow
2630 directly to the individual handlers. In this way we can
2631 see that catch handler types may shadow one another. */
2632 add_reachable_handler (info, region, region);
2633 return RNL_MAYBE_CAUGHT;
2634
2635 case ERT_TRY:
2636 {
2637 struct eh_region *c;
2638 enum reachable_code ret = RNL_NOT_CAUGHT;
fa51b01b 2639
52a11cbf
RH
2640 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2641 {
2642 /* A catch-all handler ends the search. */
2643 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2644 to be run as well. */
6d41a92f 2645 if (c->u.catch.type_list == NULL)
52a11cbf
RH
2646 {
2647 add_reachable_handler (info, region, c);
2648 return RNL_CAUGHT;
2649 }
2650
2651 if (type_thrown)
2652 {
6d41a92f
OH
2653 /* If we have a at least one type match, end the search. */
2654 tree tp_node = c->u.catch.type_list;
3f2c5d1a 2655
6d41a92f 2656 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
52a11cbf 2657 {
6d41a92f
OH
2658 tree type = TREE_VALUE (tp_node);
2659
2660 if (type == type_thrown
2661 || (lang_eh_type_covers
2662 && (*lang_eh_type_covers) (type, type_thrown)))
2663 {
2664 add_reachable_handler (info, region, c);
2665 return RNL_CAUGHT;
2666 }
52a11cbf
RH
2667 }
2668
2669 /* If we have definitive information of a match failure,
2670 the catch won't trigger. */
2671 if (lang_eh_type_covers)
2672 return RNL_NOT_CAUGHT;
2673 }
2674
6d41a92f
OH
2675 /* At this point, we either don't know what type is thrown or
2676 don't have front-end assistance to help deciding if it is
2677 covered by one of the types in the list for this region.
3f2c5d1a 2678
6d41a92f
OH
2679 We'd then like to add this region to the list of reachable
2680 handlers since it is indeed potentially reachable based on the
3f2c5d1a
RS
2681 information we have.
2682
6d41a92f
OH
2683 Actually, this handler is for sure not reachable if all the
2684 types it matches have already been caught. That is, it is only
2685 potentially reachable if at least one of the types it catches
2686 has not been previously caught. */
2687
52a11cbf
RH
2688 if (! info)
2689 ret = RNL_MAYBE_CAUGHT;
6d41a92f 2690 else
52a11cbf 2691 {
6d41a92f
OH
2692 tree tp_node = c->u.catch.type_list;
2693 bool maybe_reachable = false;
52a11cbf 2694
6d41a92f
OH
2695 /* Compute the potential reachability of this handler and
2696 update the list of types caught at the same time. */
2697 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2698 {
2699 tree type = TREE_VALUE (tp_node);
2700
2701 if (! check_handled (info->types_caught, type))
2702 {
2703 info->types_caught
2704 = tree_cons (NULL, type, info->types_caught);
3f2c5d1a 2705
6d41a92f
OH
2706 maybe_reachable = true;
2707 }
2708 }
3f2c5d1a 2709
6d41a92f
OH
2710 if (maybe_reachable)
2711 {
2712 add_reachable_handler (info, region, c);
3f2c5d1a 2713
6d41a92f
OH
2714 /* ??? If the catch type is a base class of every allowed
2715 type, then we know we can stop the search. */
2716 ret = RNL_MAYBE_CAUGHT;
2717 }
52a11cbf
RH
2718 }
2719 }
87ff9c8e 2720
52a11cbf
RH
2721 return ret;
2722 }
87ff9c8e 2723
52a11cbf
RH
2724 case ERT_ALLOWED_EXCEPTIONS:
2725 /* An empty list of types definitely ends the search. */
2726 if (region->u.allowed.type_list == NULL_TREE)
2727 {
2728 add_reachable_handler (info, region, region);
2729 return RNL_CAUGHT;
2730 }
87ff9c8e 2731
52a11cbf
RH
2732 /* Collect a list of lists of allowed types for use in detecting
2733 when a catch may be transformed into a catch-all. */
2734 if (info)
2735 info->types_allowed = tree_cons (NULL_TREE,
2736 region->u.allowed.type_list,
2737 info->types_allowed);
3f2c5d1a 2738
684d9f3b 2739 /* If we have definitive information about the type hierarchy,
52a11cbf
RH
2740 then we can tell if the thrown type will pass through the
2741 filter. */
2742 if (type_thrown && lang_eh_type_covers)
2743 {
2744 if (check_handled (region->u.allowed.type_list, type_thrown))
2745 return RNL_NOT_CAUGHT;
2746 else
2747 {
2748 add_reachable_handler (info, region, region);
2749 return RNL_CAUGHT;
2750 }
2751 }
21cd906e 2752
52a11cbf
RH
2753 add_reachable_handler (info, region, region);
2754 return RNL_MAYBE_CAUGHT;
21cd906e 2755
52a11cbf
RH
2756 case ERT_CATCH:
2757 /* Catch regions are handled by their controling try region. */
2758 return RNL_NOT_CAUGHT;
21cd906e 2759
52a11cbf
RH
2760 case ERT_MUST_NOT_THROW:
2761 /* Here we end our search, since no exceptions may propagate.
2762 If we've touched down at some landing pad previous, then the
2763 explicit function call we generated may be used. Otherwise
2764 the call is made by the runtime. */
2765 if (info && info->handlers)
21cd906e 2766 {
52a11cbf
RH
2767 add_reachable_handler (info, region, region);
2768 return RNL_CAUGHT;
21cd906e 2769 }
52a11cbf
RH
2770 else
2771 return RNL_BLOCKED;
21cd906e 2772
52a11cbf
RH
2773 case ERT_THROW:
2774 case ERT_FIXUP:
3f2c5d1a 2775 case ERT_UNKNOWN:
52a11cbf
RH
2776 /* Shouldn't see these here. */
2777 break;
21cd906e 2778 }
fa51b01b 2779
52a11cbf 2780 abort ();
fa51b01b 2781}
4956d07c 2782
52a11cbf
RH
2783/* Retrieve a list of labels of exception handlers which can be
2784 reached by a given insn. */
4956d07c 2785
52a11cbf
RH
2786rtx
2787reachable_handlers (insn)
4956d07c
MS
2788 rtx insn;
2789{
52a11cbf
RH
2790 struct reachable_info info;
2791 struct eh_region *region;
2792 tree type_thrown;
2793 int region_number;
fb13d4d0 2794
52a11cbf
RH
2795 if (GET_CODE (insn) == JUMP_INSN
2796 && GET_CODE (PATTERN (insn)) == RESX)
2797 region_number = XINT (PATTERN (insn), 0);
2798 else
1ef1bf06
AM
2799 {
2800 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
52a11cbf
RH
2801 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2802 return NULL;
2803 region_number = INTVAL (XEXP (note, 0));
27a36778 2804 }
4956d07c 2805
52a11cbf 2806 memset (&info, 0, sizeof (info));
4956d07c 2807
52a11cbf 2808 region = cfun->eh->region_array[region_number];
fb13d4d0 2809
52a11cbf 2810 type_thrown = NULL_TREE;
7f206d8f
RH
2811 if (GET_CODE (insn) == JUMP_INSN
2812 && GET_CODE (PATTERN (insn)) == RESX)
2813 {
2814 /* A RESX leaves a region instead of entering it. Thus the
2815 region itself may have been deleted out from under us. */
2816 if (region == NULL)
2817 return NULL;
2818 region = region->outer;
2819 }
2820 else if (region->type == ERT_THROW)
52a11cbf
RH
2821 {
2822 type_thrown = region->u.throw.type;
2823 region = region->outer;
2824 }
fac62ecf 2825
52a11cbf
RH
2826 for (; region; region = region->outer)
2827 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2828 break;
fb13d4d0 2829
52a11cbf 2830 return info.handlers;
fb13d4d0
JM
2831}
2832
52a11cbf
RH
2833/* Determine if the given INSN can throw an exception that is caught
2834 within the function. */
4956d07c 2835
52a11cbf
RH
2836bool
2837can_throw_internal (insn)
4956d07c 2838 rtx insn;
4956d07c 2839{
52a11cbf
RH
2840 struct eh_region *region;
2841 tree type_thrown;
2842 rtx note;
e6cfb550 2843
52a11cbf
RH
2844 if (! INSN_P (insn))
2845 return false;
12670d88 2846
52a11cbf
RH
2847 if (GET_CODE (insn) == INSN
2848 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2849 insn = XVECEXP (PATTERN (insn), 0, 0);
4956d07c 2850
52a11cbf
RH
2851 if (GET_CODE (insn) == CALL_INSN
2852 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2853 {
52a11cbf
RH
2854 int i;
2855 for (i = 0; i < 3; ++i)
4956d07c 2856 {
52a11cbf
RH
2857 rtx sub = XEXP (PATTERN (insn), i);
2858 for (; sub ; sub = NEXT_INSN (sub))
2859 if (can_throw_internal (sub))
2860 return true;
4956d07c 2861 }
52a11cbf 2862 return false;
4956d07c
MS
2863 }
2864
52a11cbf
RH
2865 /* Every insn that might throw has an EH_REGION note. */
2866 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2867 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2868 return false;
4956d07c 2869
52a11cbf 2870 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
4956d07c 2871
52a11cbf
RH
2872 type_thrown = NULL_TREE;
2873 if (region->type == ERT_THROW)
2874 {
2875 type_thrown = region->u.throw.type;
2876 region = region->outer;
2877 }
4956d07c 2878
52a11cbf
RH
2879 /* If this exception is ignored by each and every containing region,
2880 then control passes straight out. The runtime may handle some
2881 regions, which also do not require processing internally. */
2882 for (; region; region = region->outer)
2883 {
2884 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2885 if (how == RNL_BLOCKED)
2886 return false;
2887 if (how != RNL_NOT_CAUGHT)
2888 return true;
4956d07c 2889 }
4956d07c 2890
52a11cbf
RH
2891 return false;
2892}
4956d07c 2893
52a11cbf
RH
2894/* Determine if the given INSN can throw an exception that is
2895 visible outside the function. */
4956d07c 2896
52a11cbf
RH
2897bool
2898can_throw_external (insn)
2899 rtx insn;
4956d07c 2900{
52a11cbf
RH
2901 struct eh_region *region;
2902 tree type_thrown;
2903 rtx note;
4956d07c 2904
52a11cbf
RH
2905 if (! INSN_P (insn))
2906 return false;
2907
2908 if (GET_CODE (insn) == INSN
2909 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2910 insn = XVECEXP (PATTERN (insn), 0, 0);
2911
2912 if (GET_CODE (insn) == CALL_INSN
2913 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
4956d07c 2914 {
52a11cbf
RH
2915 int i;
2916 for (i = 0; i < 3; ++i)
4956d07c 2917 {
52a11cbf
RH
2918 rtx sub = XEXP (PATTERN (insn), i);
2919 for (; sub ; sub = NEXT_INSN (sub))
2920 if (can_throw_external (sub))
2921 return true;
4956d07c 2922 }
52a11cbf 2923 return false;
4956d07c 2924 }
52a11cbf
RH
2925
2926 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2927 if (!note)
2928 {
2929 /* Calls (and trapping insns) without notes are outside any
2930 exception handling region in this function. We have to
2931 assume it might throw. Given that the front end and middle
2932 ends mark known NOTHROW functions, this isn't so wildly
2933 inaccurate. */
2934 return (GET_CODE (insn) == CALL_INSN
2935 || (flag_non_call_exceptions
2936 && may_trap_p (PATTERN (insn))));
2937 }
2938 if (INTVAL (XEXP (note, 0)) <= 0)
2939 return false;
2940
2941 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2942
2943 type_thrown = NULL_TREE;
2944 if (region->type == ERT_THROW)
2945 {
2946 type_thrown = region->u.throw.type;
2947 region = region->outer;
2948 }
2949
2950 /* If the exception is caught or blocked by any containing region,
2951 then it is not seen by any calling function. */
2952 for (; region ; region = region->outer)
2953 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2954 return false;
2955
2956 return true;
4956d07c 2957}
1ef1bf06 2958
52a11cbf 2959/* True if nothing in this function can throw outside this function. */
6814a8a0 2960
52a11cbf
RH
2961bool
2962nothrow_function_p ()
1ef1bf06
AM
2963{
2964 rtx insn;
1ef1bf06 2965
52a11cbf
RH
2966 if (! flag_exceptions)
2967 return true;
1ef1bf06 2968
1ef1bf06 2969 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
52a11cbf
RH
2970 if (can_throw_external (insn))
2971 return false;
2972 for (insn = current_function_epilogue_delay_list; insn;
2973 insn = XEXP (insn, 1))
2974 if (can_throw_external (insn))
2975 return false;
4da896b2 2976
52a11cbf 2977 return true;
1ef1bf06 2978}
52a11cbf 2979
ca55abae 2980\f
52a11cbf 2981/* Various hooks for unwind library. */
ca55abae
JM
2982
2983/* Do any necessary initialization to access arbitrary stack frames.
2984 On the SPARC, this means flushing the register windows. */
2985
2986void
2987expand_builtin_unwind_init ()
2988{
2989 /* Set this so all the registers get saved in our frame; we need to be
30f7a378 2990 able to copy the saved values for any registers from frames we unwind. */
ca55abae
JM
2991 current_function_has_nonlocal_label = 1;
2992
2993#ifdef SETUP_FRAME_ADDRESSES
2994 SETUP_FRAME_ADDRESSES ();
2995#endif
2996}
2997
52a11cbf
RH
2998rtx
2999expand_builtin_eh_return_data_regno (arglist)
3000 tree arglist;
3001{
3002 tree which = TREE_VALUE (arglist);
3003 unsigned HOST_WIDE_INT iwhich;
3004
3005 if (TREE_CODE (which) != INTEGER_CST)
3006 {
3007 error ("argument of `__builtin_eh_return_regno' must be constant");
3008 return constm1_rtx;
3009 }
3010
3011 iwhich = tree_low_cst (which, 1);
3012 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3013 if (iwhich == INVALID_REGNUM)
3014 return constm1_rtx;
3015
3016#ifdef DWARF_FRAME_REGNUM
3017 iwhich = DWARF_FRAME_REGNUM (iwhich);
3018#else
3019 iwhich = DBX_REGISTER_NUMBER (iwhich);
3020#endif
3021
3f2c5d1a 3022 return GEN_INT (iwhich);
52a11cbf
RH
3023}
3024
ca55abae
JM
3025/* Given a value extracted from the return address register or stack slot,
3026 return the actual address encoded in that value. */
3027
3028rtx
3029expand_builtin_extract_return_addr (addr_tree)
3030 tree addr_tree;
3031{
3032 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
52a11cbf
RH
3033
3034 /* First mask out any unwanted bits. */
3035#ifdef MASK_RETURN_ADDR
3036 expand_and (addr, MASK_RETURN_ADDR, addr);
3037#endif
3038
3039 /* Then adjust to find the real return address. */
3040#if defined (RETURN_ADDR_OFFSET)
3041 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3042#endif
3043
3044 return addr;
ca55abae
JM
3045}
3046
3047/* Given an actual address in addr_tree, do any necessary encoding
3048 and return the value to be stored in the return address register or
3049 stack slot so the epilogue will return to that address. */
3050
3051rtx
3052expand_builtin_frob_return_addr (addr_tree)
3053 tree addr_tree;
3054{
4b6c1672 3055 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
52a11cbf 3056
be128cd9 3057#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
3058 if (GET_MODE (addr) != Pmode)
3059 addr = convert_memory_address (Pmode, addr);
be128cd9
RK
3060#endif
3061
ca55abae 3062#ifdef RETURN_ADDR_OFFSET
52a11cbf 3063 addr = force_reg (Pmode, addr);
ca55abae
JM
3064 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3065#endif
52a11cbf 3066
ca55abae
JM
3067 return addr;
3068}
3069
52a11cbf
RH
3070/* Set up the epilogue with the magic bits we'll need to return to the
3071 exception handler. */
ca55abae 3072
52a11cbf
RH
3073void
3074expand_builtin_eh_return (stackadj_tree, handler_tree)
3075 tree stackadj_tree, handler_tree;
ca55abae 3076{
52a11cbf 3077 rtx stackadj, handler;
ca55abae 3078
52a11cbf
RH
3079 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3080 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
ca55abae 3081
be128cd9 3082#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
3083 if (GET_MODE (stackadj) != Pmode)
3084 stackadj = convert_memory_address (Pmode, stackadj);
3085
3086 if (GET_MODE (handler) != Pmode)
3087 handler = convert_memory_address (Pmode, handler);
be128cd9
RK
3088#endif
3089
52a11cbf
RH
3090 if (! cfun->eh->ehr_label)
3091 {
3092 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3093 cfun->eh->ehr_handler = copy_to_reg (handler);
3094 cfun->eh->ehr_label = gen_label_rtx ();
3095 }
ca55abae 3096 else
ca55abae 3097 {
52a11cbf
RH
3098 if (stackadj != cfun->eh->ehr_stackadj)
3099 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3100 if (handler != cfun->eh->ehr_handler)
3101 emit_move_insn (cfun->eh->ehr_handler, handler);
ca55abae
JM
3102 }
3103
52a11cbf 3104 emit_jump (cfun->eh->ehr_label);
a1622f83
AM
3105}
3106
71038426
RH
3107void
3108expand_eh_return ()
ca55abae 3109{
52a11cbf 3110 rtx sa, ra, around_label;
ca55abae 3111
52a11cbf 3112 if (! cfun->eh->ehr_label)
71038426 3113 return;
ca55abae 3114
52a11cbf
RH
3115 sa = EH_RETURN_STACKADJ_RTX;
3116 if (! sa)
71038426 3117 {
52a11cbf 3118 error ("__builtin_eh_return not supported on this target");
71038426
RH
3119 return;
3120 }
ca55abae 3121
52a11cbf 3122 current_function_calls_eh_return = 1;
ca55abae 3123
52a11cbf
RH
3124 around_label = gen_label_rtx ();
3125 emit_move_insn (sa, const0_rtx);
3126 emit_jump (around_label);
ca55abae 3127
52a11cbf
RH
3128 emit_label (cfun->eh->ehr_label);
3129 clobber_return_register ();
ca55abae 3130
52a11cbf
RH
3131#ifdef HAVE_eh_return
3132 if (HAVE_eh_return)
3133 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3134 else
71038426 3135#endif
52a11cbf
RH
3136 {
3137 ra = EH_RETURN_HANDLER_RTX;
3138 if (! ra)
3139 {
3140 error ("__builtin_eh_return not supported on this target");
3141 ra = gen_reg_rtx (Pmode);
3142 }
71038426 3143
52a11cbf 3144 emit_move_insn (sa, cfun->eh->ehr_stackadj);
be128cd9 3145 emit_move_insn (ra, cfun->eh->ehr_handler);
52a11cbf 3146 }
71038426 3147
52a11cbf 3148 emit_label (around_label);
71038426 3149}
77d33a84 3150\f
949f197f 3151/* In the following functions, we represent entries in the action table
eaec9b3d 3152 as 1-based indices. Special cases are:
949f197f
RH
3153
3154 0: null action record, non-null landing pad; implies cleanups
3155 -1: null action record, null landing pad; implies no action
3156 -2: no call-site entry; implies must_not_throw
3157 -3: we have yet to process outer regions
3158
3159 Further, no special cases apply to the "next" field of the record.
3160 For next, 0 means end of list. */
3161
52a11cbf
RH
3162struct action_record
3163{
3164 int offset;
3165 int filter;
3166 int next;
3167};
77d33a84 3168
52a11cbf
RH
3169static int
3170action_record_eq (pentry, pdata)
3171 const PTR pentry;
3172 const PTR pdata;
3173{
3174 const struct action_record *entry = (const struct action_record *) pentry;
3175 const struct action_record *data = (const struct action_record *) pdata;
3176 return entry->filter == data->filter && entry->next == data->next;
3177}
77d33a84 3178
52a11cbf
RH
3179static hashval_t
3180action_record_hash (pentry)
3181 const PTR pentry;
3182{
3183 const struct action_record *entry = (const struct action_record *) pentry;
3184 return entry->next * 1009 + entry->filter;
3185}
77d33a84 3186
52a11cbf
RH
3187static int
3188add_action_record (ar_hash, filter, next)
3189 htab_t ar_hash;
3190 int filter, next;
77d33a84 3191{
52a11cbf
RH
3192 struct action_record **slot, *new, tmp;
3193
3194 tmp.filter = filter;
3195 tmp.next = next;
3196 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
77d33a84 3197
52a11cbf 3198 if ((new = *slot) == NULL)
77d33a84 3199 {
52a11cbf
RH
3200 new = (struct action_record *) xmalloc (sizeof (*new));
3201 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3202 new->filter = filter;
3203 new->next = next;
3204 *slot = new;
3205
3206 /* The filter value goes in untouched. The link to the next
3207 record is a "self-relative" byte offset, or zero to indicate
3208 that there is no next record. So convert the absolute 1 based
eaec9b3d 3209 indices we've been carrying around into a displacement. */
52a11cbf
RH
3210
3211 push_sleb128 (&cfun->eh->action_record_data, filter);
3212 if (next)
3213 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3214 push_sleb128 (&cfun->eh->action_record_data, next);
77d33a84 3215 }
77d33a84 3216
52a11cbf
RH
3217 return new->offset;
3218}
77d33a84 3219
52a11cbf
RH
3220static int
3221collect_one_action_chain (ar_hash, region)
3222 htab_t ar_hash;
3223 struct eh_region *region;
77d33a84 3224{
52a11cbf
RH
3225 struct eh_region *c;
3226 int next;
77d33a84 3227
52a11cbf
RH
3228 /* If we've reached the top of the region chain, then we have
3229 no actions, and require no landing pad. */
3230 if (region == NULL)
3231 return -1;
3232
3233 switch (region->type)
77d33a84 3234 {
52a11cbf
RH
3235 case ERT_CLEANUP:
3236 /* A cleanup adds a zero filter to the beginning of the chain, but
3237 there are special cases to look out for. If there are *only*
3238 cleanups along a path, then it compresses to a zero action.
3239 Further, if there are multiple cleanups along a path, we only
3240 need to represent one of them, as that is enough to trigger
3241 entry to the landing pad at runtime. */
3242 next = collect_one_action_chain (ar_hash, region->outer);
3243 if (next <= 0)
3244 return 0;
3245 for (c = region->outer; c ; c = c->outer)
3246 if (c->type == ERT_CLEANUP)
3247 return next;
3248 return add_action_record (ar_hash, 0, next);
3249
3250 case ERT_TRY:
3251 /* Process the associated catch regions in reverse order.
3252 If there's a catch-all handler, then we don't need to
3253 search outer regions. Use a magic -3 value to record
a1f300c0 3254 that we haven't done the outer search. */
52a11cbf
RH
3255 next = -3;
3256 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3257 {
6d41a92f
OH
3258 if (c->u.catch.type_list == NULL)
3259 {
3260 /* Retrieve the filter from the head of the filter list
3261 where we have stored it (see assign_filter_values). */
3f2c5d1a 3262 int filter
6d41a92f
OH
3263 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3264
3265 next = add_action_record (ar_hash, filter, 0);
3266 }
52a11cbf
RH
3267 else
3268 {
6d41a92f
OH
3269 /* Once the outer search is done, trigger an action record for
3270 each filter we have. */
3271 tree flt_node;
3272
52a11cbf
RH
3273 if (next == -3)
3274 {
3275 next = collect_one_action_chain (ar_hash, region->outer);
949f197f
RH
3276
3277 /* If there is no next action, terminate the chain. */
3278 if (next == -1)
52a11cbf 3279 next = 0;
949f197f
RH
3280 /* If all outer actions are cleanups or must_not_throw,
3281 we'll have no action record for it, since we had wanted
3282 to encode these states in the call-site record directly.
3283 Add a cleanup action to the chain to catch these. */
3284 else if (next <= 0)
3285 next = add_action_record (ar_hash, 0, 0);
52a11cbf 3286 }
3f2c5d1a 3287
6d41a92f
OH
3288 flt_node = c->u.catch.filter_list;
3289 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3290 {
3291 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3292 next = add_action_record (ar_hash, filter, next);
3293 }
52a11cbf
RH
3294 }
3295 }
3296 return next;
3297
3298 case ERT_ALLOWED_EXCEPTIONS:
3299 /* An exception specification adds its filter to the
3300 beginning of the chain. */
3301 next = collect_one_action_chain (ar_hash, region->outer);
3302 return add_action_record (ar_hash, region->u.allowed.filter,
3303 next < 0 ? 0 : next);
3304
3305 case ERT_MUST_NOT_THROW:
3306 /* A must-not-throw region with no inner handlers or cleanups
3307 requires no call-site entry. Note that this differs from
3308 the no handler or cleanup case in that we do require an lsda
3309 to be generated. Return a magic -2 value to record this. */
3310 return -2;
3311
3312 case ERT_CATCH:
3313 case ERT_THROW:
3314 /* CATCH regions are handled in TRY above. THROW regions are
3315 for optimization information only and produce no output. */
3316 return collect_one_action_chain (ar_hash, region->outer);
3317
3318 default:
3319 abort ();
77d33a84
AM
3320 }
3321}
3322
52a11cbf
RH
3323static int
3324add_call_site (landing_pad, action)
3325 rtx landing_pad;
3326 int action;
77d33a84 3327{
52a11cbf
RH
3328 struct call_site_record *data = cfun->eh->call_site_data;
3329 int used = cfun->eh->call_site_data_used;
3330 int size = cfun->eh->call_site_data_size;
77d33a84 3331
52a11cbf
RH
3332 if (used >= size)
3333 {
3334 size = (size ? size * 2 : 64);
3335 data = (struct call_site_record *)
3336 xrealloc (data, sizeof (*data) * size);
3337 cfun->eh->call_site_data = data;
3338 cfun->eh->call_site_data_size = size;
3339 }
77d33a84 3340
52a11cbf
RH
3341 data[used].landing_pad = landing_pad;
3342 data[used].action = action;
77d33a84 3343
52a11cbf 3344 cfun->eh->call_site_data_used = used + 1;
77d33a84 3345
52a11cbf 3346 return used + call_site_base;
77d33a84
AM
3347}
3348
52a11cbf
RH
3349/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3350 The new note numbers will not refer to region numbers, but
3351 instead to call site entries. */
77d33a84 3352
52a11cbf
RH
3353void
3354convert_to_eh_region_ranges ()
77d33a84 3355{
52a11cbf
RH
3356 rtx insn, iter, note;
3357 htab_t ar_hash;
3358 int last_action = -3;
3359 rtx last_action_insn = NULL_RTX;
3360 rtx last_landing_pad = NULL_RTX;
3361 rtx first_no_action_insn = NULL_RTX;
ae0ed63a 3362 int call_site = 0;
77d33a84 3363
52a11cbf
RH
3364 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3365 return;
77d33a84 3366
52a11cbf 3367 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
77d33a84 3368
52a11cbf 3369 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
77d33a84 3370
52a11cbf
RH
3371 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3372 if (INSN_P (iter))
3373 {
3374 struct eh_region *region;
3375 int this_action;
3376 rtx this_landing_pad;
77d33a84 3377
52a11cbf
RH
3378 insn = iter;
3379 if (GET_CODE (insn) == INSN
3380 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3381 insn = XVECEXP (PATTERN (insn), 0, 0);
1ef1bf06 3382
52a11cbf
RH
3383 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3384 if (!note)
3385 {
3386 if (! (GET_CODE (insn) == CALL_INSN
3387 || (flag_non_call_exceptions
3388 && may_trap_p (PATTERN (insn)))))
3389 continue;
3390 this_action = -1;
3391 region = NULL;
3392 }
3393 else
3394 {
3395 if (INTVAL (XEXP (note, 0)) <= 0)
3396 continue;
3397 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3398 this_action = collect_one_action_chain (ar_hash, region);
3399 }
3400
3401 /* Existence of catch handlers, or must-not-throw regions
3402 implies that an lsda is needed (even if empty). */
3403 if (this_action != -1)
3404 cfun->uses_eh_lsda = 1;
3405
3406 /* Delay creation of region notes for no-action regions
3407 until we're sure that an lsda will be required. */
3408 else if (last_action == -3)
3409 {
3410 first_no_action_insn = iter;
3411 last_action = -1;
3412 }
1ef1bf06 3413
52a11cbf
RH
3414 /* Cleanups and handlers may share action chains but not
3415 landing pads. Collect the landing pad for this region. */
3416 if (this_action >= 0)
3417 {
3418 struct eh_region *o;
3419 for (o = region; ! o->landing_pad ; o = o->outer)
3420 continue;
3421 this_landing_pad = o->landing_pad;
3422 }
3423 else
3424 this_landing_pad = NULL_RTX;
1ef1bf06 3425
52a11cbf
RH
3426 /* Differing actions or landing pads implies a change in call-site
3427 info, which implies some EH_REGION note should be emitted. */
3428 if (last_action != this_action
3429 || last_landing_pad != this_landing_pad)
3430 {
3431 /* If we'd not seen a previous action (-3) or the previous
3432 action was must-not-throw (-2), then we do not need an
3433 end note. */
3434 if (last_action >= -1)
3435 {
3436 /* If we delayed the creation of the begin, do it now. */
3437 if (first_no_action_insn)
3438 {
3439 call_site = add_call_site (NULL_RTX, 0);
3440 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3441 first_no_action_insn);
3442 NOTE_EH_HANDLER (note) = call_site;
3443 first_no_action_insn = NULL_RTX;
3444 }
3445
3446 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3447 last_action_insn);
3448 NOTE_EH_HANDLER (note) = call_site;
3449 }
3450
3451 /* If the new action is must-not-throw, then no region notes
3452 are created. */
3453 if (this_action >= -1)
3454 {
3f2c5d1a 3455 call_site = add_call_site (this_landing_pad,
52a11cbf
RH
3456 this_action < 0 ? 0 : this_action);
3457 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3458 NOTE_EH_HANDLER (note) = call_site;
3459 }
3460
3461 last_action = this_action;
3462 last_landing_pad = this_landing_pad;
3463 }
3464 last_action_insn = iter;
3465 }
1ef1bf06 3466
52a11cbf 3467 if (last_action >= -1 && ! first_no_action_insn)
1ef1bf06 3468 {
52a11cbf
RH
3469 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3470 NOTE_EH_HANDLER (note) = call_site;
1ef1bf06
AM
3471 }
3472
52a11cbf
RH
3473 htab_delete (ar_hash);
3474}
1ef1bf06 3475
52a11cbf
RH
3476\f
3477static void
3478push_uleb128 (data_area, value)
3479 varray_type *data_area;
3480 unsigned int value;
3481{
3482 do
3483 {
3484 unsigned char byte = value & 0x7f;
3485 value >>= 7;
3486 if (value)
3487 byte |= 0x80;
3488 VARRAY_PUSH_UCHAR (*data_area, byte);
3489 }
3490 while (value);
3491}
1ef1bf06 3492
52a11cbf
RH
3493static void
3494push_sleb128 (data_area, value)
3495 varray_type *data_area;
3496 int value;
3497{
3498 unsigned char byte;
3499 int more;
1ef1bf06 3500
52a11cbf 3501 do
1ef1bf06 3502 {
52a11cbf
RH
3503 byte = value & 0x7f;
3504 value >>= 7;
3505 more = ! ((value == 0 && (byte & 0x40) == 0)
3506 || (value == -1 && (byte & 0x40) != 0));
3507 if (more)
3508 byte |= 0x80;
3509 VARRAY_PUSH_UCHAR (*data_area, byte);
1ef1bf06 3510 }
52a11cbf
RH
3511 while (more);
3512}
1ef1bf06 3513
52a11cbf 3514\f
52a11cbf
RH
3515#ifndef HAVE_AS_LEB128
3516static int
3517dw2_size_of_call_site_table ()
1ef1bf06 3518{
52a11cbf
RH
3519 int n = cfun->eh->call_site_data_used;
3520 int size = n * (4 + 4 + 4);
3521 int i;
1ef1bf06 3522
52a11cbf
RH
3523 for (i = 0; i < n; ++i)
3524 {
3525 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3526 size += size_of_uleb128 (cs->action);
3527 }
fac62ecf 3528
52a11cbf
RH
3529 return size;
3530}
3531
3532static int
3533sjlj_size_of_call_site_table ()
3534{
3535 int n = cfun->eh->call_site_data_used;
3536 int size = 0;
3537 int i;
77d33a84 3538
52a11cbf 3539 for (i = 0; i < n; ++i)
1ef1bf06 3540 {
52a11cbf
RH
3541 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3542 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3543 size += size_of_uleb128 (cs->action);
1ef1bf06 3544 }
52a11cbf
RH
3545
3546 return size;
3547}
3548#endif
3549
3550static void
3551dw2_output_call_site_table ()
3552{
83182544 3553 const char *const function_start_lab
52a11cbf
RH
3554 = IDENTIFIER_POINTER (current_function_func_begin_label);
3555 int n = cfun->eh->call_site_data_used;
3556 int i;
3557
3558 for (i = 0; i < n; ++i)
1ef1bf06 3559 {
52a11cbf
RH
3560 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3561 char reg_start_lab[32];
3562 char reg_end_lab[32];
3563 char landing_pad_lab[32];
3564
3565 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3566 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3567
3568 if (cs->landing_pad)
3569 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3570 CODE_LABEL_NUMBER (cs->landing_pad));
3571
3572 /* ??? Perhaps use insn length scaling if the assembler supports
3573 generic arithmetic. */
3574 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3575 data4 if the function is small enough. */
3576#ifdef HAVE_AS_LEB128
3577 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3578 "region %d start", i);
3579 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3580 "length");
3581 if (cs->landing_pad)
3582 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3583 "landing pad");
3584 else
3585 dw2_asm_output_data_uleb128 (0, "landing pad");
3586#else
3587 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3588 "region %d start", i);
3589 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3590 if (cs->landing_pad)
3591 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3592 "landing pad");
3593 else
3594 dw2_asm_output_data (4, 0, "landing pad");
3595#endif
3596 dw2_asm_output_data_uleb128 (cs->action, "action");
1ef1bf06
AM
3597 }
3598
52a11cbf
RH
3599 call_site_base += n;
3600}
3601
3602static void
3603sjlj_output_call_site_table ()
3604{
3605 int n = cfun->eh->call_site_data_used;
3606 int i;
1ef1bf06 3607
52a11cbf 3608 for (i = 0; i < n; ++i)
1ef1bf06 3609 {
52a11cbf 3610 struct call_site_record *cs = &cfun->eh->call_site_data[i];
4da896b2 3611
52a11cbf
RH
3612 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3613 "region %d landing pad", i);
3614 dw2_asm_output_data_uleb128 (cs->action, "action");
3615 }
4da896b2 3616
52a11cbf 3617 call_site_base += n;
1ef1bf06
AM
3618}
3619
52a11cbf
RH
3620void
3621output_function_exception_table ()
3622{
2a1ee410 3623 int tt_format, cs_format, lp_format, i, n;
52a11cbf
RH
3624#ifdef HAVE_AS_LEB128
3625 char ttype_label[32];
3626 char cs_after_size_label[32];
3627 char cs_end_label[32];
3628#else
3629 int call_site_len;
3630#endif
3631 int have_tt_data;
3632 int funcdef_number;
ae0ed63a 3633 int tt_format_size = 0;
1ef1bf06 3634
52a11cbf
RH
3635 /* Not all functions need anything. */
3636 if (! cfun->uses_eh_lsda)
3637 return;
fac62ecf 3638
52a11cbf
RH
3639 funcdef_number = (USING_SJLJ_EXCEPTIONS
3640 ? sjlj_funcdef_number
3641 : current_funcdef_number);
1ef1bf06 3642
2a1ee410
RH
3643#ifdef IA64_UNWIND_INFO
3644 fputs ("\t.personality\t", asm_out_file);
3645 output_addr_const (asm_out_file, eh_personality_libfunc);
3646 fputs ("\n\t.handlerdata\n", asm_out_file);
3647 /* Note that varasm still thinks we're in the function's code section.
3648 The ".endp" directive that will immediately follow will take us back. */
3649#else
07c9d2eb 3650 (*targetm.asm_out.exception_section) ();
2a1ee410 3651#endif
52a11cbf
RH
3652
3653 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3654 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3655
b627d6fe
RH
3656 /* Indicate the format of the @TType entries. */
3657 if (! have_tt_data)
3658 tt_format = DW_EH_PE_omit;
3659 else
3660 {
3661 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3662#ifdef HAVE_AS_LEB128
3663 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3664#endif
3665 tt_format_size = size_of_encoded_value (tt_format);
3666
7a900ebc 3667 assemble_align (tt_format_size * BITS_PER_UNIT);
b627d6fe 3668 }
52a11cbf
RH
3669
3670 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3671
3672 /* The LSDA header. */
3673
3674 /* Indicate the format of the landing pad start pointer. An omitted
3675 field implies @LPStart == @Start. */
3676 /* Currently we always put @LPStart == @Start. This field would
3677 be most useful in moving the landing pads completely out of
3678 line to another section, but it could also be used to minimize
3679 the size of uleb128 landing pad offsets. */
2a1ee410
RH
3680 lp_format = DW_EH_PE_omit;
3681 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3682 eh_data_format_name (lp_format));
52a11cbf
RH
3683
3684 /* @LPStart pointer would go here. */
3685
2a1ee410
RH
3686 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3687 eh_data_format_name (tt_format));
52a11cbf
RH
3688
3689#ifndef HAVE_AS_LEB128
3690 if (USING_SJLJ_EXCEPTIONS)
3691 call_site_len = sjlj_size_of_call_site_table ();
3692 else
3693 call_site_len = dw2_size_of_call_site_table ();
3694#endif
3695
3696 /* A pc-relative 4-byte displacement to the @TType data. */
3697 if (have_tt_data)
3698 {
3699#ifdef HAVE_AS_LEB128
3700 char ttype_after_disp_label[32];
3f2c5d1a 3701 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
52a11cbf
RH
3702 funcdef_number);
3703 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3704 "@TType base offset");
3705 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3706#else
3707 /* Ug. Alignment queers things. */
b627d6fe 3708 unsigned int before_disp, after_disp, last_disp, disp;
52a11cbf 3709
52a11cbf
RH
3710 before_disp = 1 + 1;
3711 after_disp = (1 + size_of_uleb128 (call_site_len)
3712 + call_site_len
3713 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
b627d6fe
RH
3714 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3715 * tt_format_size));
52a11cbf
RH
3716
3717 disp = after_disp;
3718 do
1ef1bf06 3719 {
52a11cbf
RH
3720 unsigned int disp_size, pad;
3721
3722 last_disp = disp;
3723 disp_size = size_of_uleb128 (disp);
3724 pad = before_disp + disp_size + after_disp;
b627d6fe
RH
3725 if (pad % tt_format_size)
3726 pad = tt_format_size - (pad % tt_format_size);
52a11cbf
RH
3727 else
3728 pad = 0;
3729 disp = after_disp + pad;
1ef1bf06 3730 }
52a11cbf
RH
3731 while (disp != last_disp);
3732
3733 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3734#endif
1ef1bf06 3735 }
1ef1bf06 3736
52a11cbf
RH
3737 /* Indicate the format of the call-site offsets. */
3738#ifdef HAVE_AS_LEB128
2a1ee410 3739 cs_format = DW_EH_PE_uleb128;
52a11cbf 3740#else
2a1ee410 3741 cs_format = DW_EH_PE_udata4;
52a11cbf 3742#endif
2a1ee410
RH
3743 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3744 eh_data_format_name (cs_format));
52a11cbf
RH
3745
3746#ifdef HAVE_AS_LEB128
3747 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3748 funcdef_number);
3749 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3750 funcdef_number);
3751 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3752 "Call-site table length");
3753 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3754 if (USING_SJLJ_EXCEPTIONS)
3755 sjlj_output_call_site_table ();
3756 else
3757 dw2_output_call_site_table ();
3758 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3759#else
3760 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3761 if (USING_SJLJ_EXCEPTIONS)
3762 sjlj_output_call_site_table ();
3763 else
3764 dw2_output_call_site_table ();
3765#endif
3766
3767 /* ??? Decode and interpret the data for flag_debug_asm. */
3768 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3769 for (i = 0; i < n; ++i)
3770 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3771 (i ? NULL : "Action record table"));
1ef1bf06 3772
52a11cbf 3773 if (have_tt_data)
7a900ebc 3774 assemble_align (tt_format_size * BITS_PER_UNIT);
1ef1bf06 3775
52a11cbf
RH
3776 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3777 while (i-- > 0)
1ef1bf06 3778 {
52a11cbf 3779 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
225b9cb9 3780 rtx value;
52a11cbf
RH
3781
3782 if (type == NULL_TREE)
3783 type = integer_zero_node;
3784 else
3785 type = lookup_type_for_runtime (type);
3786
225b9cb9
RH
3787 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3788 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3789 assemble_integer (value, tt_format_size,
3790 tt_format_size * BITS_PER_UNIT, 1);
3791 else
3792 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
1ef1bf06 3793 }
52a11cbf
RH
3794
3795#ifdef HAVE_AS_LEB128
3796 if (have_tt_data)
3797 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3798#endif
3799
3800 /* ??? Decode and interpret the data for flag_debug_asm. */
3801 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3802 for (i = 0; i < n; ++i)
3803 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3804 (i ? NULL : "Exception specification table"));
3805
3806 function_section (current_function_decl);
3807
3808 if (USING_SJLJ_EXCEPTIONS)
3809 sjlj_funcdef_number += 1;
1ef1bf06 3810}