]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/stmt.c
except.c (start_catch_handler): Use emit_cmp_and_jump_insns.
[thirdparty/gcc.git] / gcc / stmt.c
CommitLineData
28d81abb 1/* Expands front end tree to back end RTL for GNU C-Compiler
642cac7b 2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
28d81abb
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e9fa0c7c
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
28d81abb
RK
20
21
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
ccd043a9 38
28d81abb
RK
39#include "rtl.h"
40#include "tree.h"
41#include "flags.h"
6adb4e3a 42#include "except.h"
28d81abb
RK
43#include "function.h"
44#include "insn-flags.h"
45#include "insn-config.h"
46#include "insn-codes.h"
47#include "expr.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
ca695ac9 52#include "machmode.h"
10f0ad3d 53#include "toplev.h"
d6f4ec51 54#include "output.h"
ca695ac9 55
28d81abb
RK
56#define obstack_chunk_alloc xmalloc
57#define obstack_chunk_free free
58struct obstack stmt_obstack;
59
18543a22
ILT
60/* Assume that case vectors are not pc-relative. */
61#ifndef CASE_VECTOR_PC_RELATIVE
62#define CASE_VECTOR_PC_RELATIVE 0
63#endif
64
28d81abb
RK
65/* Filename and line number of last line-number note,
66 whether we actually emitted it or not. */
67char *emit_filename;
68int emit_lineno;
69
70/* Nonzero if within a ({...}) grouping, in which case we must
71 always compute a value for each expr-stmt in case it is the last one. */
72
73int expr_stmts_for_value;
74
75/* Each time we expand an expression-statement,
76 record the expr's type and its RTL value here. */
77
78static tree last_expr_type;
79static rtx last_expr_value;
80
7629c936
RS
81/* Each time we expand the end of a binding contour (in `expand_end_bindings')
82 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
83 This is used by the `remember_end_note' function to record the endpoint
84 of each generated block in its associated BLOCK node. */
85
86static rtx last_block_end_note;
87
28d81abb
RK
88/* Number of binding contours started so far in this function. */
89
90int block_start_count;
91
92/* Nonzero if function being compiled needs to
93 return the address of where it has put a structure value. */
94
95extern int current_function_returns_pcc_struct;
96
97/* Label that will go on parm cleanup code, if any.
98 Jumping to this label runs cleanup code for parameters, if
99 such code must be run. Following this code is the logical return label. */
100
101extern rtx cleanup_label;
102
103/* Label that will go on function epilogue.
104 Jumping to this label serves as a "return" instruction
105 on machines which require execution of the epilogue on all returns. */
106
107extern rtx return_label;
108
28d81abb
RK
109/* Offset to end of allocated area of stack frame.
110 If stack grows down, this is the address of the last stack slot allocated.
111 If stack grows up, this is the address for the next slot. */
112extern int frame_offset;
113
114/* Label to jump back to for tail recursion, or 0 if we have
115 not yet needed one for this function. */
116extern rtx tail_recursion_label;
117
118/* Place after which to insert the tail_recursion_label if we need one. */
119extern rtx tail_recursion_reentry;
120
121/* Location at which to save the argument pointer if it will need to be
122 referenced. There are two cases where this is done: if nonlocal gotos
123 exist, or if vars whose is an offset from the argument pointer will be
124 needed by inner routines. */
125
126extern rtx arg_pointer_save_area;
127
128/* Chain of all RTL_EXPRs that have insns in them. */
129extern tree rtl_expr_chain;
28d81abb
RK
130\f
131/* Functions and data structures for expanding case statements. */
132
133/* Case label structure, used to hold info on labels within case
134 statements. We handle "range" labels; for a single-value label
135 as in C, the high and low limits are the same.
136
5720c7e7
RK
137 An AVL tree of case nodes is initially created, and later transformed
138 to a list linked via the RIGHT fields in the nodes. Nodes with
139 higher case values are later in the list.
28d81abb
RK
140
141 Switch statements can be output in one of two forms. A branch table
142 is used if there are more than a few labels and the labels are dense
143 within the range between the smallest and largest case value. If a
144 branch table is used, no further manipulations are done with the case
145 node chain.
146
147 The alternative to the use of a branch table is to generate a series
148 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
149 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
150 totally unbalanced, with everything on the right. We balance the tree
151 with nodes on the left having lower case values than the parent
28d81abb
RK
152 and nodes on the right having higher values. We then output the tree
153 in order. */
154
155struct case_node
156{
157 struct case_node *left; /* Left son in binary tree */
158 struct case_node *right; /* Right son in binary tree; also node chain */
159 struct case_node *parent; /* Parent of node in binary tree */
160 tree low; /* Lowest index value for this label */
161 tree high; /* Highest index value for this label */
162 tree code_label; /* Label to jump to when node matches */
57641239 163 int balance;
28d81abb
RK
164};
165
166typedef struct case_node case_node;
167typedef struct case_node *case_node_ptr;
168
169/* These are used by estimate_case_costs and balance_case_nodes. */
170
171/* This must be a signed type, and non-ANSI compilers lack signed char. */
172static short *cost_table;
173static int use_cost_table;
28d81abb
RK
174\f
175/* Stack of control and binding constructs we are currently inside.
176
177 These constructs begin when you call `expand_start_WHATEVER'
178 and end when you call `expand_end_WHATEVER'. This stack records
179 info about how the construct began that tells the end-function
180 what to do. It also may provide information about the construct
181 to alter the behavior of other constructs within the body.
182 For example, they may affect the behavior of C `break' and `continue'.
183
184 Each construct gets one `struct nesting' object.
185 All of these objects are chained through the `all' field.
186 `nesting_stack' points to the first object (innermost construct).
187 The position of an entry on `nesting_stack' is in its `depth' field.
188
189 Each type of construct has its own individual stack.
190 For example, loops have `loop_stack'. Each object points to the
191 next object of the same type through the `next' field.
192
193 Some constructs are visible to `break' exit-statements and others
194 are not. Which constructs are visible depends on the language.
195 Therefore, the data structure allows each construct to be visible
196 or not, according to the args given when the construct is started.
197 The construct is visible if the `exit_label' field is non-null.
198 In that case, the value should be a CODE_LABEL rtx. */
199
200struct nesting
201{
202 struct nesting *all;
203 struct nesting *next;
204 int depth;
205 rtx exit_label;
206 union
207 {
208 /* For conds (if-then and if-then-else statements). */
209 struct
210 {
211 /* Label for the end of the if construct.
212 There is none if EXITFLAG was not set
213 and no `else' has been seen yet. */
214 rtx endif_label;
215 /* Label for the end of this alternative.
0f41302f 216 This may be the end of the if or the next else/elseif. */
28d81abb
RK
217 rtx next_label;
218 } cond;
219 /* For loops. */
220 struct
221 {
222 /* Label at the top of the loop; place to loop back to. */
223 rtx start_label;
224 /* Label at the end of the whole construct. */
225 rtx end_label;
8afad312
JW
226 /* Label before a jump that branches to the end of the whole
227 construct. This is where destructors go if any. */
228 rtx alt_end_label;
28d81abb
RK
229 /* Label for `continue' statement to jump to;
230 this is in front of the stepper of the loop. */
231 rtx continue_label;
232 } loop;
233 /* For variable binding contours. */
234 struct
235 {
236 /* Sequence number of this binding contour within the function,
237 in order of entry. */
238 int block_start_count;
b93a436e 239 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
240 rtx stack_level;
241 /* The NOTE that starts this contour.
242 Used by expand_goto to check whether the destination
243 is within each contour or not. */
244 rtx first_insn;
245 /* Innermost containing binding contour that has a stack level. */
246 struct nesting *innermost_stack_block;
247 /* List of cleanups to be run on exit from this contour.
248 This is a list of expressions to be evaluated.
249 The TREE_PURPOSE of each link is the ..._DECL node
250 which the cleanup pertains to. */
251 tree cleanups;
252 /* List of cleanup-lists of blocks containing this block,
253 as they were at the locus where this block appears.
254 There is an element for each containing block,
255 ordered innermost containing block first.
e976b8b2 256 The tail of this list can be 0,
28d81abb
RK
257 if all remaining elements would be empty lists.
258 The element's TREE_VALUE is the cleanup-list of that block,
259 which may be null. */
260 tree outer_cleanups;
261 /* Chain of labels defined inside this binding contour.
262 For contours that have stack levels or cleanups. */
263 struct label_chain *label_chain;
264 /* Number of function calls seen, as of start of this block. */
265 int function_call_count;
e976b8b2
MS
266 /* Nonzero if this is associated with a EH region. */
267 int exception_region;
268 /* The saved target_temp_slot_level from our outer block.
269 We may reset target_temp_slot_level to be the level of
270 this block, if that is done, target_temp_slot_level
271 reverts to the saved target_temp_slot_level at the very
272 end of the block. */
273 int target_temp_slot_level;
274 /* True if we are currently emitting insns in an area of
275 output code that is controlled by a conditional
276 expression. This is used by the cleanup handling code to
277 generate conditional cleanup actions. */
278 int conditional_code;
279 /* A place to move the start of the exception region for any
280 of the conditional cleanups, must be at the end or after
281 the start of the last unconditional cleanup, and before any
282 conditional branch points. */
283 rtx last_unconditional_cleanup;
284 /* When in a conditional context, this is the specific
285 cleanup list associated with last_unconditional_cleanup,
286 where we place the conditionalized cleanups. */
287 tree *cleanup_ptr;
28d81abb
RK
288 } block;
289 /* For switch (C) or case (Pascal) statements,
290 and also for dummies (see `expand_start_case_dummy'). */
291 struct
292 {
293 /* The insn after which the case dispatch should finally
294 be emitted. Zero for a dummy. */
295 rtx start;
57641239
RK
296 /* A list of case labels; it is first built as an AVL tree.
297 During expand_end_case, this is converted to a list, and may be
298 rearranged into a nearly balanced binary tree. */
28d81abb
RK
299 struct case_node *case_list;
300 /* Label to jump to if no case matches. */
301 tree default_label;
302 /* The expression to be dispatched on. */
303 tree index_expr;
304 /* Type that INDEX_EXPR should be converted to. */
305 tree nominal_type;
306 /* Number of range exprs in case statement. */
307 int num_ranges;
308 /* Name of this kind of statement, for warnings. */
309 char *printname;
a11759a3
JR
310 /* Used to save no_line_numbers till we see the first case label.
311 We set this to -1 when we see the first case label in this
312 case statement. */
313 int line_number_status;
28d81abb 314 } case_stmt;
28d81abb
RK
315 } data;
316};
317
318/* Chain of all pending binding contours. */
319struct nesting *block_stack;
320
6ed1d6c5
RS
321/* If any new stacks are added here, add them to POPSTACKS too. */
322
28d81abb
RK
323/* Chain of all pending binding contours that restore stack levels
324 or have cleanups. */
325struct nesting *stack_block_stack;
326
327/* Chain of all pending conditional statements. */
328struct nesting *cond_stack;
329
330/* Chain of all pending loops. */
331struct nesting *loop_stack;
332
333/* Chain of all pending case or switch statements. */
334struct nesting *case_stack;
335
28d81abb
RK
336/* Separate chain including all of the above,
337 chained through the `all' field. */
338struct nesting *nesting_stack;
339
340/* Number of entries on nesting_stack now. */
341int nesting_depth;
342
343/* Allocate and return a new `struct nesting'. */
344
345#define ALLOC_NESTING() \
346 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
347
6ed1d6c5
RS
348/* Pop the nesting stack element by element until we pop off
349 the element which is at the top of STACK.
350 Update all the other stacks, popping off elements from them
351 as we pop them from nesting_stack. */
28d81abb
RK
352
353#define POPSTACK(STACK) \
6ed1d6c5
RS
354do { struct nesting *target = STACK; \
355 struct nesting *this; \
356 do { this = nesting_stack; \
357 if (loop_stack == this) \
358 loop_stack = loop_stack->next; \
359 if (cond_stack == this) \
360 cond_stack = cond_stack->next; \
361 if (block_stack == this) \
362 block_stack = block_stack->next; \
363 if (stack_block_stack == this) \
364 stack_block_stack = stack_block_stack->next; \
365 if (case_stack == this) \
366 case_stack = case_stack->next; \
6ed1d6c5 367 nesting_depth = nesting_stack->depth - 1; \
28d81abb 368 nesting_stack = this->all; \
28d81abb 369 obstack_free (&stmt_obstack, this); } \
6ed1d6c5 370 while (this != target); } while (0)
28d81abb
RK
371\f
372/* In some cases it is impossible to generate code for a forward goto
373 until the label definition is seen. This happens when it may be necessary
374 for the goto to reset the stack pointer: we don't yet know how to do that.
375 So expand_goto puts an entry on this fixup list.
376 Each time a binding contour that resets the stack is exited,
377 we check each fixup.
378 If the target label has now been defined, we can insert the proper code. */
379
380struct goto_fixup
381{
382 /* Points to following fixup. */
383 struct goto_fixup *next;
384 /* Points to the insn before the jump insn.
385 If more code must be inserted, it goes after this insn. */
386 rtx before_jump;
387 /* The LABEL_DECL that this jump is jumping to, or 0
388 for break, continue or return. */
389 tree target;
7629c936
RS
390 /* The BLOCK for the place where this goto was found. */
391 tree context;
28d81abb
RK
392 /* The CODE_LABEL rtx that this is jumping to. */
393 rtx target_rtl;
394 /* Number of binding contours started in current function
395 before the label reference. */
396 int block_start_count;
397 /* The outermost stack level that should be restored for this jump.
398 Each time a binding contour that resets the stack is exited,
399 if the target label is *not* yet defined, this slot is updated. */
400 rtx stack_level;
401 /* List of lists of cleanup expressions to be run by this goto.
402 There is one element for each block that this goto is within.
e976b8b2 403 The tail of this list can be 0,
28d81abb
RK
404 if all remaining elements would be empty.
405 The TREE_VALUE contains the cleanup list of that block as of the
406 time this goto was seen.
407 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
408 tree cleanup_list_list;
409};
410
411static struct goto_fixup *goto_fixup_chain;
412
413/* Within any binding contour that must restore a stack level,
414 all labels are recorded with a chain of these structures. */
415
416struct label_chain
417{
418 /* Points to following fixup. */
419 struct label_chain *next;
420 tree label;
421};
e9a25f70
JL
422
423
424/* Non-zero if we are using EH to handle cleanus. */
425static int using_eh_for_cleanups_p = 0;
426
427
2a230e9d 428static int n_occurrences PROTO((int, char *));
cfc3d13f 429static void expand_goto_internal PROTO((tree, rtx, rtx));
cfc3d13f 430static int expand_fixup PROTO((tree, rtx, rtx));
ba716ac9
BS
431static void expand_nl_handler_label PROTO((rtx, rtx));
432static void expand_nl_goto_receiver PROTO((void));
433static void expand_nl_goto_receivers PROTO((struct nesting *));
cfc3d13f
RK
434static void fixup_gotos PROTO((struct nesting *, rtx, tree,
435 rtx, int));
cfc3d13f 436static void expand_null_return_1 PROTO((rtx, int));
8d800403 437static void expand_value_return PROTO((rtx));
cfc3d13f 438static int tail_recursion_args PROTO((tree, tree));
50d1b7a1 439static void expand_cleanups PROTO((tree, tree, int, int));
0e05e8ea 440static void check_seenlabel PROTO((void));
cfc3d13f
RK
441static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
442static int estimate_case_costs PROTO((case_node_ptr));
443static void group_case_nodes PROTO((case_node_ptr));
444static void balance_case_nodes PROTO((case_node_ptr *,
445 case_node_ptr));
446static int node_has_low_bound PROTO((case_node_ptr, tree));
447static int node_has_high_bound PROTO((case_node_ptr, tree));
448static int node_is_bounded PROTO((case_node_ptr, tree));
449static void emit_jump_if_reachable PROTO((rtx));
450static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
57641239
RK
451static int add_case_node PROTO((tree, tree, tree, tree *));
452static struct case_node *case_tree2list PROTO((case_node *, case_node *));
28d81abb 453\f
e9a25f70
JL
454void
455using_eh_for_cleanups ()
456{
457 using_eh_for_cleanups_p = 1;
458}
459
28d81abb
RK
460void
461init_stmt ()
462{
463 gcc_obstack_init (&stmt_obstack);
6adb4e3a 464 init_eh ();
28d81abb
RK
465}
466
467void
468init_stmt_for_function ()
469{
470 /* We are not currently within any block, conditional, loop or case. */
471 block_stack = 0;
0b931590 472 stack_block_stack = 0;
28d81abb
RK
473 loop_stack = 0;
474 case_stack = 0;
475 cond_stack = 0;
476 nesting_stack = 0;
477 nesting_depth = 0;
478
479 block_start_count = 0;
480
481 /* No gotos have been expanded yet. */
482 goto_fixup_chain = 0;
483
484 /* We are not processing a ({...}) grouping. */
485 expr_stmts_for_value = 0;
486 last_expr_type = 0;
6adb4e3a
MS
487
488 init_eh_for_function ();
28d81abb
RK
489}
490
491void
492save_stmt_status (p)
493 struct function *p;
494{
495 p->block_stack = block_stack;
496 p->stack_block_stack = stack_block_stack;
497 p->cond_stack = cond_stack;
498 p->loop_stack = loop_stack;
499 p->case_stack = case_stack;
500 p->nesting_stack = nesting_stack;
501 p->nesting_depth = nesting_depth;
502 p->block_start_count = block_start_count;
503 p->last_expr_type = last_expr_type;
504 p->last_expr_value = last_expr_value;
505 p->expr_stmts_for_value = expr_stmts_for_value;
506 p->emit_filename = emit_filename;
507 p->emit_lineno = emit_lineno;
508 p->goto_fixup_chain = goto_fixup_chain;
6adb4e3a 509 save_eh_status (p);
28d81abb
RK
510}
511
512void
513restore_stmt_status (p)
514 struct function *p;
515{
516 block_stack = p->block_stack;
517 stack_block_stack = p->stack_block_stack;
518 cond_stack = p->cond_stack;
519 loop_stack = p->loop_stack;
520 case_stack = p->case_stack;
521 nesting_stack = p->nesting_stack;
522 nesting_depth = p->nesting_depth;
523 block_start_count = p->block_start_count;
524 last_expr_type = p->last_expr_type;
525 last_expr_value = p->last_expr_value;
526 expr_stmts_for_value = p->expr_stmts_for_value;
527 emit_filename = p->emit_filename;
528 emit_lineno = p->emit_lineno;
529 goto_fixup_chain = p->goto_fixup_chain;
6adb4e3a 530 restore_eh_status (p);
28d81abb
RK
531}
532\f
533/* Emit a no-op instruction. */
534
535void
536emit_nop ()
537{
ca695ac9
JB
538 rtx last_insn;
539
b93a436e
JL
540 last_insn = get_last_insn ();
541 if (!optimize
542 && (GET_CODE (last_insn) == CODE_LABEL
543 || (GET_CODE (last_insn) == NOTE
544 && prev_real_insn (last_insn) == 0)))
545 emit_insn (gen_nop ());
28d81abb
RK
546}
547\f
548/* Return the rtx-label that corresponds to a LABEL_DECL,
549 creating it if necessary. */
550
551rtx
552label_rtx (label)
553 tree label;
554{
555 if (TREE_CODE (label) != LABEL_DECL)
556 abort ();
557
558 if (DECL_RTL (label))
559 return DECL_RTL (label);
560
561 return DECL_RTL (label) = gen_label_rtx ();
562}
563
564/* Add an unconditional jump to LABEL as the next sequential instruction. */
565
566void
567emit_jump (label)
568 rtx label;
569{
570 do_pending_stack_adjust ();
571 emit_jump_insn (gen_jump (label));
572 emit_barrier ();
573}
574
575/* Emit code to jump to the address
576 specified by the pointer expression EXP. */
577
578void
579expand_computed_goto (exp)
580 tree exp;
581{
b93a436e 582 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1
RK
583
584#ifdef POINTERS_EXTEND_UNSIGNED
b93a436e 585 x = convert_memory_address (Pmode, x);
ed9a9db1 586#endif
ffa1a1ce 587
b93a436e
JL
588 emit_queue ();
589 /* Be sure the function is executable. */
7d384cc0 590 if (current_function_check_memory_usage)
b93a436e
JL
591 emit_library_call (chkr_check_exec_libfunc, 1,
592 VOIDmode, 1, x, ptr_mode);
17f5f329 593
b93a436e
JL
594 do_pending_stack_adjust ();
595 emit_indirect_jump (x);
acd693d1
RH
596
597 current_function_has_computed_jump = 1;
28d81abb
RK
598}
599\f
600/* Handle goto statements and the labels that they can go to. */
601
602/* Specify the location in the RTL code of a label LABEL,
603 which is a LABEL_DECL tree node.
604
605 This is used for the kind of label that the user can jump to with a
606 goto statement, and for alternatives of a switch or case statement.
607 RTL labels generated for loops and conditionals don't go through here;
608 they are generated directly at the RTL level, by other functions below.
609
610 Note that this has nothing to do with defining label *names*.
611 Languages vary in how they do that and what that even means. */
612
613void
614expand_label (label)
615 tree label;
616{
617 struct label_chain *p;
618
619 do_pending_stack_adjust ();
620 emit_label (label_rtx (label));
621 if (DECL_NAME (label))
622 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
623
624 if (stack_block_stack != 0)
625 {
626 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
627 p->next = stack_block_stack->data.block.label_chain;
628 stack_block_stack->data.block.label_chain = p;
629 p->label = label;
630 }
631}
632
633/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
634 from nested functions. */
635
636void
637declare_nonlocal_label (label)
638 tree label;
639{
ba716ac9
BS
640 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
641
28d81abb
RK
642 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
643 LABEL_PRESERVE_P (label_rtx (label)) = 1;
ba716ac9 644 if (nonlocal_goto_handler_slots == 0)
28d81abb 645 {
59257ff7
RK
646 emit_stack_save (SAVE_NONLOCAL,
647 &nonlocal_goto_stack_level,
648 PREV_INSN (tail_recursion_reentry));
28d81abb 649 }
ba716ac9
BS
650 nonlocal_goto_handler_slots
651 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
28d81abb
RK
652}
653
654/* Generate RTL code for a `goto' statement with target label LABEL.
655 LABEL should be a LABEL_DECL tree node that was or will later be
656 defined with `expand_label'. */
657
658void
659expand_goto (label)
660 tree label;
661{
ca695ac9
JB
662 tree context;
663
28d81abb 664 /* Check for a nonlocal goto to a containing function. */
ca695ac9 665 context = decl_function_context (label);
28d81abb
RK
666 if (context != 0 && context != current_function_decl)
667 {
668 struct function *p = find_function_data (context);
38a448ca 669 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
ba716ac9
BS
670 rtx temp, handler_slot;
671 tree link;
672
673 /* Find the corresponding handler slot for this label. */
674 handler_slot = p->nonlocal_goto_handler_slots;
675 for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
676 link = TREE_CHAIN (link))
677 handler_slot = XEXP (handler_slot, 1);
678 handler_slot = XEXP (handler_slot, 0);
dd132134 679
28d81abb 680 p->has_nonlocal_label = 1;
c1255328 681 current_function_has_nonlocal_goto = 1;
dd132134 682 LABEL_REF_NONLOCAL_P (label_ref) = 1;
59257ff7
RK
683
684 /* Copy the rtl for the slots so that they won't be shared in
685 case the virtual stack vars register gets instantiated differently
686 in the parent than in the child. */
687
28d81abb
RK
688#if HAVE_nonlocal_goto
689 if (HAVE_nonlocal_goto)
690 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
ba716ac9 691 copy_rtx (handler_slot),
59257ff7 692 copy_rtx (p->nonlocal_goto_stack_level),
dd132134 693 label_ref));
28d81abb
RK
694 else
695#endif
696 {
59257ff7
RK
697 rtx addr;
698
28d81abb
RK
699 /* Restore frame pointer for containing function.
700 This sets the actual hard register used for the frame pointer
701 to the location of the function's incoming static chain info.
702 The non-local goto handler will then adjust it to contain the
703 proper value and reload the argument pointer, if needed. */
a35ad168 704 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
59257ff7
RK
705
706 /* We have now loaded the frame pointer hardware register with
707 the address of that corresponds to the start of the virtual
708 stack vars. So replace virtual_stack_vars_rtx in all
709 addresses we use with stack_pointer_rtx. */
710
28d81abb
RK
711 /* Get addr of containing function's current nonlocal goto handler,
712 which will do any cleanups and then jump to the label. */
ba716ac9 713 addr = copy_rtx (handler_slot);
59257ff7 714 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
a35ad168 715 hard_frame_pointer_rtx));
59257ff7 716
28d81abb 717 /* Restore the stack pointer. Note this uses fp just restored. */
59257ff7
RK
718 addr = p->nonlocal_goto_stack_level;
719 if (addr)
5e116627 720 addr = replace_rtx (copy_rtx (addr),
a35ad168
DE
721 virtual_stack_vars_rtx,
722 hard_frame_pointer_rtx);
59257ff7 723
37366632 724 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
59257ff7 725
a35ad168 726 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
28d81abb 727 really needed. */
38a448ca
RH
728 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
729 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
28d81abb
RK
730 emit_indirect_jump (temp);
731 }
732 }
733 else
37366632 734 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
735}
736
737/* Generate RTL code for a `goto' statement with target label BODY.
738 LABEL should be a LABEL_REF.
739 LAST_INSN, if non-0, is the rtx we should consider as the last
740 insn emitted (for the purposes of cleaning up a return). */
741
742static void
743expand_goto_internal (body, label, last_insn)
744 tree body;
745 rtx label;
746 rtx last_insn;
747{
748 struct nesting *block;
749 rtx stack_level = 0;
750
751 if (GET_CODE (label) != CODE_LABEL)
752 abort ();
753
754 /* If label has already been defined, we can tell now
755 whether and how we must alter the stack level. */
756
757 if (PREV_INSN (label) != 0)
758 {
759 /* Find the innermost pending block that contains the label.
760 (Check containment by comparing insn-uids.)
761 Then restore the outermost stack level within that block,
762 and do cleanups of all blocks contained in it. */
763 for (block = block_stack; block; block = block->next)
764 {
765 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
766 break;
767 if (block->data.block.stack_level != 0)
768 stack_level = block->data.block.stack_level;
769 /* Execute the cleanups for blocks we are exiting. */
770 if (block->data.block.cleanups != 0)
771 {
50d1b7a1 772 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
28d81abb
RK
773 do_pending_stack_adjust ();
774 }
775 }
776
777 if (stack_level)
778 {
0f41302f
MS
779 /* Ensure stack adjust isn't done by emit_jump, as this
780 would clobber the stack pointer. This one should be
781 deleted as dead by flow. */
28d81abb
RK
782 clear_pending_stack_adjust ();
783 do_pending_stack_adjust ();
37366632 784 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
785 }
786
787 if (body != 0 && DECL_TOO_LATE (body))
788 error ("jump to `%s' invalidly jumps into binding contour",
789 IDENTIFIER_POINTER (DECL_NAME (body)));
790 }
791 /* Label not yet defined: may need to put this goto
792 on the fixup list. */
793 else if (! expand_fixup (body, label, last_insn))
794 {
795 /* No fixup needed. Record that the label is the target
796 of at least one goto that has no fixup. */
797 if (body != 0)
798 TREE_ADDRESSABLE (body) = 1;
799 }
800
801 emit_jump (label);
802}
803\f
804/* Generate if necessary a fixup for a goto
805 whose target label in tree structure (if any) is TREE_LABEL
806 and whose target in rtl is RTL_LABEL.
807
808 If LAST_INSN is nonzero, we pretend that the jump appears
809 after insn LAST_INSN instead of at the current point in the insn stream.
810
023b57e6
RS
811 The fixup will be used later to insert insns just before the goto.
812 Those insns will restore the stack level as appropriate for the
813 target label, and will (in the case of C++) also invoke any object
814 destructors which have to be invoked when we exit the scopes which
815 are exited by the goto.
28d81abb
RK
816
817 Value is nonzero if a fixup is made. */
818
819static int
820expand_fixup (tree_label, rtl_label, last_insn)
821 tree tree_label;
822 rtx rtl_label;
823 rtx last_insn;
824{
825 struct nesting *block, *end_block;
826
827 /* See if we can recognize which block the label will be output in.
828 This is possible in some very common cases.
829 If we succeed, set END_BLOCK to that block.
830 Otherwise, set it to 0. */
831
832 if (cond_stack
833 && (rtl_label == cond_stack->data.cond.endif_label
834 || rtl_label == cond_stack->data.cond.next_label))
835 end_block = cond_stack;
836 /* If we are in a loop, recognize certain labels which
837 are likely targets. This reduces the number of fixups
838 we need to create. */
839 else if (loop_stack
840 && (rtl_label == loop_stack->data.loop.start_label
841 || rtl_label == loop_stack->data.loop.end_label
842 || rtl_label == loop_stack->data.loop.continue_label))
843 end_block = loop_stack;
844 else
845 end_block = 0;
846
847 /* Now set END_BLOCK to the binding level to which we will return. */
848
849 if (end_block)
850 {
851 struct nesting *next_block = end_block->all;
852 block = block_stack;
853
854 /* First see if the END_BLOCK is inside the innermost binding level.
855 If so, then no cleanups or stack levels are relevant. */
856 while (next_block && next_block != block)
857 next_block = next_block->all;
858
859 if (next_block)
860 return 0;
861
862 /* Otherwise, set END_BLOCK to the innermost binding level
863 which is outside the relevant control-structure nesting. */
864 next_block = block_stack->next;
865 for (block = block_stack; block != end_block; block = block->all)
866 if (block == next_block)
867 next_block = next_block->next;
868 end_block = next_block;
869 }
870
871 /* Does any containing block have a stack level or cleanups?
872 If not, no fixup is needed, and that is the normal case
873 (the only case, for standard C). */
874 for (block = block_stack; block != end_block; block = block->next)
875 if (block->data.block.stack_level != 0
876 || block->data.block.cleanups != 0)
877 break;
878
879 if (block != end_block)
880 {
881 /* Ok, a fixup is needed. Add a fixup to the list of such. */
882 struct goto_fixup *fixup
883 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
884 /* In case an old stack level is restored, make sure that comes
885 after any pending stack adjust. */
886 /* ?? If the fixup isn't to come at the present position,
887 doing the stack adjust here isn't useful. Doing it with our
888 settings at that location isn't useful either. Let's hope
889 someone does it! */
890 if (last_insn == 0)
891 do_pending_stack_adjust ();
28d81abb
RK
892 fixup->target = tree_label;
893 fixup->target_rtl = rtl_label;
023b57e6
RS
894
895 /* Create a BLOCK node and a corresponding matched set of
896 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
897 this point. The notes will encapsulate any and all fixup
898 code which we might later insert at this point in the insn
899 stream. Also, the BLOCK node will be the parent (i.e. the
900 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
901 later on when we are expanding the fixup code.
902
903 Note that optimization passes (including expand_end_loop)
904 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
905 as a placeholder. */
023b57e6
RS
906
907 {
908 register rtx original_before_jump
909 = last_insn ? last_insn : get_last_insn ();
0679e3fc 910 rtx start;
023b57e6
RS
911
912 start_sequence ();
913 pushlevel (0);
0679e3fc
JM
914 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
915 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
023b57e6
RS
916 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
917 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
918 end_sequence ();
0679e3fc 919 emit_insns_after (start, original_before_jump);
023b57e6
RS
920 }
921
28d81abb
RK
922 fixup->block_start_count = block_start_count;
923 fixup->stack_level = 0;
924 fixup->cleanup_list_list
e976b8b2 925 = ((block->data.block.outer_cleanups
28d81abb 926 || block->data.block.cleanups)
37366632 927 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
928 block->data.block.outer_cleanups)
929 : 0);
930 fixup->next = goto_fixup_chain;
931 goto_fixup_chain = fixup;
932 }
933
934 return block != 0;
935}
936
ca695ac9 937
cfc3d13f
RK
938\f
939/* Expand any needed fixups in the outputmost binding level of the
940 function. FIRST_INSN is the first insn in the function. */
ca695ac9 941
cfc3d13f
RK
942void
943expand_fixups (first_insn)
944 rtx first_insn;
945{
946 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
947}
ca695ac9 948
28d81abb
RK
949/* When exiting a binding contour, process all pending gotos requiring fixups.
950 THISBLOCK is the structure that describes the block being exited.
951 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
952 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
953 FIRST_INSN is the insn that began this contour.
954
955 Gotos that jump out of this contour must restore the
956 stack level and do the cleanups before actually jumping.
957
958 DONT_JUMP_IN nonzero means report error there is a jump into this
959 contour from before the beginning of the contour.
960 This is also done if STACK_LEVEL is nonzero. */
961
704f4dca 962static void
28d81abb
RK
963fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
964 struct nesting *thisblock;
965 rtx stack_level;
966 tree cleanup_list;
967 rtx first_insn;
968 int dont_jump_in;
969{
970 register struct goto_fixup *f, *prev;
971
972 /* F is the fixup we are considering; PREV is the previous one. */
973 /* We run this loop in two passes so that cleanups of exited blocks
974 are run first, and blocks that are exited are marked so
975 afterwards. */
976
977 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
978 {
979 /* Test for a fixup that is inactive because it is already handled. */
980 if (f->before_jump == 0)
981 {
982 /* Delete inactive fixup from the chain, if that is easy to do. */
983 if (prev != 0)
984 prev->next = f->next;
985 }
986 /* Has this fixup's target label been defined?
987 If so, we can finalize it. */
988 else if (PREV_INSN (f->target_rtl) != 0)
989 {
7629c936 990 register rtx cleanup_insns;
7629c936 991
28d81abb
RK
992 /* Get the first non-label after the label
993 this goto jumps to. If that's before this scope begins,
994 we don't have a jump into the scope. */
995 rtx after_label = f->target_rtl;
996 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
997 after_label = NEXT_INSN (after_label);
998
999 /* If this fixup jumped into this contour from before the beginning
1000 of this contour, report an error. */
1001 /* ??? Bug: this does not detect jumping in through intermediate
1002 blocks that have stack levels or cleanups.
1003 It detects only a problem with the innermost block
1004 around the label. */
1005 if (f->target != 0
1006 && (dont_jump_in || stack_level || cleanup_list)
1007 /* If AFTER_LABEL is 0, it means the jump goes to the end
1008 of the rtl, which means it jumps into this scope. */
1009 && (after_label == 0
1010 || INSN_UID (first_insn) < INSN_UID (after_label))
1011 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 1012 && ! DECL_ERROR_ISSUED (f->target))
28d81abb
RK
1013 {
1014 error_with_decl (f->target,
1015 "label `%s' used before containing binding contour");
1016 /* Prevent multiple errors for one label. */
33bc3ff5 1017 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
1018 }
1019
7629c936
RS
1020 /* We will expand the cleanups into a sequence of their own and
1021 then later on we will attach this new sequence to the insn
1022 stream just ahead of the actual jump insn. */
1023
1024 start_sequence ();
1025
023b57e6
RS
1026 /* Temporarily restore the lexical context where we will
1027 logically be inserting the fixup code. We do this for the
1028 sake of getting the debugging information right. */
1029
7629c936 1030 pushlevel (0);
023b57e6 1031 set_block (f->context);
7629c936
RS
1032
1033 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
1034 if (f->cleanup_list_list)
1035 {
1036 tree lists;
1037 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1038 /* Marked elements correspond to blocks that have been closed.
1039 Do their cleanups. */
1040 if (TREE_ADDRESSABLE (lists)
1041 && TREE_VALUE (lists) != 0)
7629c936 1042 {
50d1b7a1 1043 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
7629c936
RS
1044 /* Pop any pushes done in the cleanups,
1045 in case function is about to return. */
1046 do_pending_stack_adjust ();
1047 }
28d81abb
RK
1048 }
1049
1050 /* Restore stack level for the biggest contour that this
1051 jump jumps out of. */
1052 if (f->stack_level)
59257ff7 1053 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
1054
1055 /* Finish up the sequence containing the insns which implement the
1056 necessary cleanups, and then attach that whole sequence to the
1057 insn stream just ahead of the actual jump insn. Attaching it
1058 at that point insures that any cleanups which are in fact
1059 implicit C++ object destructions (which must be executed upon
1060 leaving the block) appear (to the debugger) to be taking place
1061 in an area of the generated code where the object(s) being
1062 destructed are still "in scope". */
1063
1064 cleanup_insns = get_insns ();
023b57e6 1065 poplevel (1, 0, 0);
7629c936
RS
1066
1067 end_sequence ();
1068 emit_insns_after (cleanup_insns, f->before_jump);
1069
7629c936 1070
28d81abb
RK
1071 f->before_jump = 0;
1072 }
1073 }
1074
6bc2f582
RK
1075 /* For any still-undefined labels, do the cleanups for this block now.
1076 We must do this now since items in the cleanup list may go out
0f41302f 1077 of scope when the block ends. */
28d81abb
RK
1078 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1079 if (f->before_jump != 0
1080 && PREV_INSN (f->target_rtl) == 0
1081 /* Label has still not appeared. If we are exiting a block with
1082 a stack level to restore, that started before the fixup,
1083 mark this stack level as needing restoration
6bc2f582 1084 when the fixup is later finalized. */
28d81abb 1085 && thisblock != 0
6bc2f582
RK
1086 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1087 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
1088 && (thisblock->data.block.block_start_count
1089 <= f->block_start_count))
1090 {
1091 tree lists = f->cleanup_list_list;
6bc2f582
RK
1092 rtx cleanup_insns;
1093
28d81abb
RK
1094 for (; lists; lists = TREE_CHAIN (lists))
1095 /* If the following elt. corresponds to our containing block
1096 then the elt. must be for this block. */
1097 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
1098 {
1099 start_sequence ();
1100 pushlevel (0);
1101 set_block (f->context);
1102 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
f0959e58 1103 do_pending_stack_adjust ();
6bc2f582
RK
1104 cleanup_insns = get_insns ();
1105 poplevel (1, 0, 0);
1106 end_sequence ();
412c00dc
RK
1107 if (cleanup_insns != 0)
1108 f->before_jump
1109 = emit_insns_after (cleanup_insns, f->before_jump);
6bc2f582 1110
e07ed33f 1111 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 1112 }
28d81abb
RK
1113
1114 if (stack_level)
1115 f->stack_level = stack_level;
1116 }
1117}
2a230e9d
BS
1118\f
1119/* Return the number of times character C occurs in string S. */
1120static int
1121n_occurrences (c, s)
1122 int c;
1123 char *s;
1124{
1125 int n = 0;
1126 while (*s)
1127 n += (*s++ == c);
1128 return n;
1129}
28d81abb
RK
1130\f
1131/* Generate RTL for an asm statement (explicit assembler code).
1132 BODY is a STRING_CST node containing the assembler code text,
1133 or an ADDR_EXPR containing a STRING_CST. */
1134
1135void
1136expand_asm (body)
1137 tree body;
1138{
7d384cc0 1139 if (current_function_check_memory_usage)
17f5f329
RK
1140 {
1141 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1142 return;
1143 }
1144
28d81abb
RK
1145 if (TREE_CODE (body) == ADDR_EXPR)
1146 body = TREE_OPERAND (body, 0);
1147
38a448ca
RH
1148 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1149 TREE_STRING_POINTER (body)));
28d81abb
RK
1150 last_expr_type = 0;
1151}
1152
1153/* Generate RTL for an asm statement with arguments.
1154 STRING is the instruction template.
1155 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1156 Each output or input has an expression in the TREE_VALUE and
1157 a constraint-string in the TREE_PURPOSE.
1158 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1159 that is clobbered by this insn.
1160
1161 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1162 Some elements of OUTPUTS may be replaced with trees representing temporary
1163 values. The caller should copy those temporary values to the originally
1164 specified lvalues.
1165
1166 VOL nonzero means the insn is volatile; don't optimize it. */
1167
1168void
1169expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1170 tree string, outputs, inputs, clobbers;
1171 int vol;
1172 char *filename;
1173 int line;
1174{
1175 rtvec argvec, constraints;
1176 rtx body;
1177 int ninputs = list_length (inputs);
1178 int noutputs = list_length (outputs);
235c5021 1179 int ninout = 0;
b4ccaa16 1180 int nclobbers;
28d81abb
RK
1181 tree tail;
1182 register int i;
1183 /* Vector of RTX's of evaluated output operands. */
1184 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
235c5021
RK
1185 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1186 enum machine_mode *inout_mode
1187 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
28d81abb
RK
1188 /* The insn we have emitted. */
1189 rtx insn;
1190
e5e809f4 1191 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1192 if (noutputs == 0)
1193 vol = 1;
1194
7d384cc0 1195 if (current_function_check_memory_usage)
17f5f329
RK
1196 {
1197 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1198 return;
1199 }
1200
b4ccaa16
RS
1201 /* Count the number of meaningful clobbered registers, ignoring what
1202 we would ignore later. */
1203 nclobbers = 0;
1204 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1205 {
1206 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
c09e6498
RS
1207 i = decode_reg_name (regname);
1208 if (i >= 0 || i == -4)
b4ccaa16 1209 ++nclobbers;
7859e3ac
DE
1210 else if (i == -2)
1211 error ("unknown register name `%s' in `asm'", regname);
b4ccaa16
RS
1212 }
1213
28d81abb
RK
1214 last_expr_type = 0;
1215
2a230e9d
BS
1216 /* Check that the number of alternatives is constant across all
1217 operands. */
1218 if (outputs || inputs)
1219 {
1220 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1221 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1222 tree next = inputs;
1223
f62a15e3
BS
1224 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1225 {
1226 error ("too many alternatives in `asm'");
1227 return;
1228 }
1229
2a230e9d
BS
1230 tmp = outputs;
1231 while (tmp)
1232 {
1233 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1234 if (n_occurrences (',', constraint) != nalternatives)
1235 {
1236 error ("operand constraints for `asm' differ in number of alternatives");
1237 return;
1238 }
1239 if (TREE_CHAIN (tmp))
1240 tmp = TREE_CHAIN (tmp);
1241 else
1242 tmp = next, next = 0;
1243 }
1244 }
1245
28d81abb
RK
1246 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1247 {
1248 tree val = TREE_VALUE (tail);
b50a024d 1249 tree type = TREE_TYPE (val);
2a230e9d 1250 char *constraint;
3fbd5c2c 1251 char *p;
2a230e9d 1252 int c_len;
28d81abb 1253 int j;
2a230e9d 1254 int is_inout = 0;
d09a75ae 1255 int allows_reg = 0;
1afbe1c4 1256 int allows_mem = 0;
28d81abb
RK
1257
1258 /* If there's an erroneous arg, emit no insn. */
1259 if (TREE_TYPE (val) == error_mark_node)
1260 return;
1261
d09a75ae
RK
1262 /* Make sure constraint has `=' and does not have `+'. Also, see
1263 if it allows any register. Be liberal on the latter test, since
1264 the worst that happens if we get it wrong is we issue an error
1265 message. */
28d81abb 1266
2a230e9d
BS
1267 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1268 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1269
3fbd5c2c
RH
1270 /* Allow the `=' or `+' to not be at the beginning of the string,
1271 since it wasn't explicitly documented that way, and there is a
1272 large body of code that puts it last. Swap the character to
1273 the front, so as not to uglify any place else. */
1274 switch (c_len)
2a230e9d 1275 {
3fbd5c2c
RH
1276 default:
1277 if ((p = strchr (constraint, '=')) != NULL)
1278 break;
1279 if ((p = strchr (constraint, '+')) != NULL)
1280 break;
1281 case 0:
2a230e9d
BS
1282 error ("output operand constraint lacks `='");
1283 return;
1284 }
1285
3fbd5c2c
RH
1286 if (p != constraint)
1287 {
1288 j = *p;
1289 bcopy (constraint, constraint+1, p-constraint);
1290 *constraint = j;
1291
1292 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1293 }
1294
2a230e9d
BS
1295 is_inout = constraint[0] == '+';
1296 /* Replace '+' with '='. */
1297 constraint[0] = '=';
1298 /* Make sure we can specify the matching operand. */
1299 if (is_inout && i > 9)
1300 {
1301 error ("output operand constraint %d contains `+'", i);
1302 return;
1303 }
1304
1305 for (j = 1; j < c_len; j++)
1306 switch (constraint[j])
d09a75ae
RK
1307 {
1308 case '+':
2a230e9d
BS
1309 case '=':
1310 error ("operand constraint contains '+' or '=' at illegal position.");
1311 return;
1312
1313 case '%':
1314 if (i + 1 == ninputs + noutputs)
235c5021 1315 {
2a230e9d 1316 error ("`%%' constraint used with last operand");
235c5021
RK
1317 return;
1318 }
235c5021 1319 break;
d09a75ae 1320
2a230e9d 1321 case '?': case '!': case '*': case '&':
1afbe1c4 1322 case 'E': case 'F': case 'G': case 'H':
d09a75ae
RK
1323 case 's': case 'i': case 'n':
1324 case 'I': case 'J': case 'K': case 'L': case 'M':
1325 case 'N': case 'O': case 'P': case ',':
1326#ifdef EXTRA_CONSTRAINT
1327 case 'Q': case 'R': case 'S': case 'T': case 'U':
1328#endif
1329 break;
1330
7b7a33b3 1331 case '0': case '1': case '2': case '3': case '4':
cd76ea33
RK
1332 case '5': case '6': case '7': case '8': case '9':
1333 error ("matching constraint not valid in output operand");
1334 break;
1335
1afbe1c4
RH
1336 case 'V': case 'm': case 'o':
1337 allows_mem = 1;
1338 break;
1339
1340 case '<': case '>':
1341 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1342 excepting those that expand_call created. So match memory
1343 and hope. */
1344 allows_mem = 1;
1345 break;
1346
1347 case 'g': case 'X':
1348 allows_reg = 1;
1349 allows_mem = 1;
1350 break;
1351
1352 case 'p': case 'r':
d09a75ae
RK
1353 default:
1354 allows_reg = 1;
1355 break;
1356 }
1357
d09a75ae
RK
1358 /* If an output operand is not a decl or indirect ref and our constraint
1359 allows a register, make a temporary to act as an intermediate.
1360 Make the asm insn write into that, then our caller will copy it to
1361 the real output operand. Likewise for promoted variables. */
28d81abb 1362
1afbe1c4
RH
1363 if ((TREE_CODE (val) == INDIRECT_REF
1364 && allows_mem)
b50a024d 1365 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1afbe1c4 1366 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
b50a024d 1367 && ! (GET_CODE (DECL_RTL (val)) == REG
d09a75ae 1368 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
235c5021 1369 || ! allows_reg
2a230e9d 1370 || is_inout)
d09a75ae
RK
1371 {
1372 if (! allows_reg)
1373 mark_addressable (TREE_VALUE (tail));
1374
1375 output_rtx[i]
17f5f329
RK
1376 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1377 EXPAND_MEMORY_USE_WO);
d09a75ae
RK
1378
1379 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1380 error ("output number %d not directly addressable", i);
1afbe1c4
RH
1381 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1382 error ("output number %d not restored to memory", i);
d09a75ae 1383 }
b50a024d 1384 else
e619bb8d 1385 {
6e81958a 1386 output_rtx[i] = assign_temp (type, 0, 0, 0);
b50a024d
RK
1387 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1388 }
235c5021 1389
2a230e9d 1390 if (is_inout)
235c5021
RK
1391 {
1392 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1393 inout_opnum[ninout++] = i;
1394 }
28d81abb
RK
1395 }
1396
235c5021 1397 ninputs += ninout;
28d81abb
RK
1398 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1399 {
1400 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1401 return;
1402 }
1403
1404 /* Make vectors for the expression-rtx and constraint strings. */
1405
1406 argvec = rtvec_alloc (ninputs);
1407 constraints = rtvec_alloc (ninputs);
1408
38a448ca
RH
1409 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1410 TREE_STRING_POINTER (string), "", 0, argvec,
1411 constraints, filename, line);
c85f7c16 1412
78418280 1413 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1414
1415 /* Eval the inputs and put them into ARGVEC.
1416 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1417
1418 i = 0;
1419 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1420 {
1421 int j;
1f06ee8d
RH
1422 int allows_reg = 0, allows_mem = 0;
1423 char *constraint, *orig_constraint;
2a230e9d 1424 int c_len;
1f06ee8d 1425 rtx op;
28d81abb
RK
1426
1427 /* If there's an erroneous arg, emit no insn,
1428 because the ASM_INPUT would get VOIDmode
1429 and that could cause a crash in reload. */
1430 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1431 return;
2a230e9d
BS
1432
1433 /* ??? Can this happen, and does the error message make any sense? */
28d81abb
RK
1434 if (TREE_PURPOSE (tail) == NULL_TREE)
1435 {
1436 error ("hard register `%s' listed as input operand to `asm'",
1437 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1438 return;
1439 }
1440
2a230e9d
BS
1441 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1442 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1f06ee8d 1443 orig_constraint = constraint;
28d81abb 1444
2a230e9d
BS
1445 /* Make sure constraint has neither `=', `+', nor '&'. */
1446
1447 for (j = 0; j < c_len; j++)
1448 switch (constraint[j])
28d81abb 1449 {
2a230e9d 1450 case '+': case '=': case '&':
1f06ee8d
RH
1451 if (constraint == orig_constraint)
1452 {
1453 error ("input operand constraint contains `%c'", constraint[j]);
1454 return;
1455 }
1456 break;
65fed0cb 1457
2a230e9d 1458 case '%':
1f06ee8d
RH
1459 if (constraint == orig_constraint
1460 && i + 1 == ninputs - ninout)
2a230e9d
BS
1461 {
1462 error ("`%%' constraint used with last operand");
1463 return;
1464 }
1465 break;
1466
1f06ee8d
RH
1467 case 'V': case 'm': case 'o':
1468 allows_mem = 1;
1469 break;
1470
1471 case '<': case '>':
2a230e9d 1472 case '?': case '!': case '*':
65fed0cb
RK
1473 case 'E': case 'F': case 'G': case 'H': case 'X':
1474 case 's': case 'i': case 'n':
1475 case 'I': case 'J': case 'K': case 'L': case 'M':
1476 case 'N': case 'O': case 'P': case ',':
1477#ifdef EXTRA_CONSTRAINT
1478 case 'Q': case 'R': case 'S': case 'T': case 'U':
1479#endif
1480 break;
1481
7b7a33b3
JW
1482 /* Whether or not a numeric constraint allows a register is
1483 decided by the matching constraint, and so there is no need
1484 to do anything special with them. We must handle them in
1485 the default case, so that we don't unnecessarily force
1486 operands to memory. */
1487 case '0': case '1': case '2': case '3': case '4':
cd76ea33 1488 case '5': case '6': case '7': case '8': case '9':
2a230e9d 1489 if (constraint[j] >= '0' + noutputs)
956d6950
JL
1490 {
1491 error
1492 ("matching constraint references invalid operand number");
1493 return;
1494 }
cd76ea33 1495
1f06ee8d 1496 /* Try and find the real constraint for this dup. */
1afbe1c4
RH
1497 if ((j == 0 && c_len == 1)
1498 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1f06ee8d
RH
1499 {
1500 tree o = outputs;
1501 for (j = constraint[j] - '0'; j > 0; --j)
1502 o = TREE_CHAIN (o);
1503
1504 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1505 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1506 j = 0;
1507 break;
1508 }
1509
cd76ea33
RK
1510 /* ... fall through ... */
1511
1f06ee8d 1512 case 'p': case 'r':
65fed0cb
RK
1513 default:
1514 allows_reg = 1;
1515 break;
1f06ee8d
RH
1516
1517 case 'g':
1518 allows_reg = 1;
1519 allows_mem = 1;
1520 break;
28d81abb
RK
1521 }
1522
1f06ee8d 1523 if (! allows_reg && allows_mem)
65fed0cb
RK
1524 mark_addressable (TREE_VALUE (tail));
1525
1f06ee8d 1526 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
65fed0cb 1527
1afbe1c4 1528 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1529 {
1f06ee8d
RH
1530 if (allows_reg)
1531 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1532 else if (!allows_mem)
1533 warning ("asm operand %d probably doesn't match constraints", i);
1534 else if (CONSTANT_P (op))
1535 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1536 op);
1537 else if (GET_CODE (op) == REG
1538 || GET_CODE (op) == SUBREG
1539 || GET_CODE (op) == CONCAT)
1540 {
1541 tree type = TREE_TYPE (TREE_VALUE (tail));
1542 rtx memloc = assign_temp (type, 1, 1, 1);
65fed0cb 1543
1f06ee8d
RH
1544 emit_move_insn (memloc, op);
1545 op = memloc;
1546 }
1547 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1548 /* We won't recognize volatile memory as available a
1549 memory_operand at this point. Ignore it. */
1550 ;
1551 else if (queued_subexp_p (op))
1552 ;
1553 else
1554 /* ??? Leave this only until we have experience with what
1555 happens in combine and elsewhere when constraints are
1556 not satisfied. */
1557 warning ("asm operand %d probably doesn't match constraints", i);
65fed0cb 1558 }
1f06ee8d 1559 XVECEXP (body, 3, i) = op;
2a230e9d 1560
28d81abb 1561 XVECEXP (body, 4, i) /* constraints */
38a448ca 1562 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1f06ee8d 1563 orig_constraint);
28d81abb
RK
1564 i++;
1565 }
1566
1567 /* Protect all the operands from the queue,
1568 now that they have all been evaluated. */
1569
235c5021 1570 for (i = 0; i < ninputs - ninout; i++)
28d81abb
RK
1571 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1572
1573 for (i = 0; i < noutputs; i++)
1574 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1575
235c5021
RK
1576 /* For in-out operands, copy output rtx to input rtx. */
1577 for (i = 0; i < ninout; i++)
1578 {
1579 static char match[9+1][2]
1580 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1581 int j = inout_opnum[i];
1582
1583 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1584 = output_rtx[j];
1585 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
38a448ca 1586 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
235c5021
RK
1587 }
1588
28d81abb
RK
1589 /* Now, for each output, construct an rtx
1590 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1591 ARGVEC CONSTRAINTS))
1592 If there is more than one, put them inside a PARALLEL. */
1593
1594 if (noutputs == 1 && nclobbers == 0)
1595 {
1596 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
38a448ca 1597 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb
RK
1598 }
1599 else if (noutputs == 0 && nclobbers == 0)
1600 {
1601 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1602 insn = emit_insn (body);
1603 }
1604 else
1605 {
1606 rtx obody = body;
1607 int num = noutputs;
1608 if (num == 0) num = 1;
38a448ca 1609 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1610
1611 /* For each output operand, store a SET. */
1612
1613 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1614 {
1615 XVECEXP (body, 0, i)
38a448ca
RH
1616 = gen_rtx_SET (VOIDmode,
1617 output_rtx[i],
1618 gen_rtx_ASM_OPERANDS (VOIDmode,
1619 TREE_STRING_POINTER (string),
1620 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1621 i, argvec, constraints,
1622 filename, line));
28d81abb
RK
1623 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1624 }
1625
1626 /* If there are no outputs (but there are some clobbers)
1627 store the bare ASM_OPERANDS into the PARALLEL. */
1628
1629 if (i == 0)
1630 XVECEXP (body, 0, i++) = obody;
1631
1632 /* Store (clobber REG) for each clobbered register specified. */
1633
b4ccaa16 1634 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1635 {
28d81abb 1636 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1637 int j = decode_reg_name (regname);
28d81abb 1638
b4ac57ab 1639 if (j < 0)
28d81abb 1640 {
c09e6498 1641 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1642 continue;
1643
c09e6498
RS
1644 if (j == -4) /* `memory', don't cache memory across asm */
1645 {
bffc6177 1646 XVECEXP (body, 0, i++)
38a448ca
RH
1647 = gen_rtx_CLOBBER (VOIDmode,
1648 gen_rtx_MEM (BLKmode,
1649 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1650 continue;
1651 }
1652
956d6950 1653 /* Ignore unknown register, error already signaled. */
cc1f5387 1654 continue;
28d81abb
RK
1655 }
1656
1657 /* Use QImode since that's guaranteed to clobber just one reg. */
b4ccaa16 1658 XVECEXP (body, 0, i++)
38a448ca 1659 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
28d81abb
RK
1660 }
1661
1662 insn = emit_insn (body);
1663 }
1664
1665 free_temp_slots ();
1666}
1667\f
1668/* Generate RTL to evaluate the expression EXP
1669 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1670
1671void
1672expand_expr_stmt (exp)
1673 tree exp;
1674{
1675 /* If -W, warn about statements with no side effects,
1676 except for an explicit cast to void (e.g. for assert()), and
1677 except inside a ({...}) where they may be useful. */
1678 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1679 {
1680 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1681 && !(TREE_CODE (exp) == CONVERT_EXPR
1682 && TREE_TYPE (exp) == void_type_node))
1683 warning_with_file_and_line (emit_filename, emit_lineno,
1684 "statement with no effect");
1685 else if (warn_unused)
1686 warn_if_unused_value (exp);
1687 }
b6ec8c5f
RK
1688
1689 /* If EXP is of function type and we are expanding statements for
1690 value, convert it to pointer-to-function. */
1691 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1692 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1693
28d81abb 1694 last_expr_type = TREE_TYPE (exp);
b941bcbc
CB
1695 if (flag_syntax_only && ! expr_stmts_for_value)
1696 last_expr_value = 0;
1697 else
37366632
RK
1698 last_expr_value = expand_expr (exp,
1699 (expr_stmts_for_value
1700 ? NULL_RTX : const0_rtx),
28d81abb
RK
1701 VOIDmode, 0);
1702
1703 /* If all we do is reference a volatile value in memory,
1704 copy it to a register to be sure it is actually touched. */
1705 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1706 && TREE_THIS_VOLATILE (exp))
1707 {
6a5bbbe6
RS
1708 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1709 ;
1710 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
28d81abb
RK
1711 copy_to_reg (last_expr_value);
1712 else
ddbe9812
RS
1713 {
1714 rtx lab = gen_label_rtx ();
1715
1716 /* Compare the value with itself to reference it. */
c5d5d461
JL
1717 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1718 expand_expr (TYPE_SIZE (last_expr_type),
1719 NULL_RTX, VOIDmode, 0),
1720 BLKmode, 0,
1721 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1722 lab);
ddbe9812
RS
1723 emit_label (lab);
1724 }
28d81abb
RK
1725 }
1726
1727 /* If this expression is part of a ({...}) and is in memory, we may have
1728 to preserve temporaries. */
1729 preserve_temp_slots (last_expr_value);
1730
1731 /* Free any temporaries used to evaluate this expression. Any temporary
1732 used as a result of this expression will already have been preserved
1733 above. */
1734 free_temp_slots ();
1735
1736 emit_queue ();
1737}
1738
1739/* Warn if EXP contains any computations whose results are not used.
1740 Return 1 if a warning is printed; 0 otherwise. */
1741
150a992a 1742int
28d81abb
RK
1743warn_if_unused_value (exp)
1744 tree exp;
1745{
1746 if (TREE_USED (exp))
1747 return 0;
1748
1749 switch (TREE_CODE (exp))
1750 {
1751 case PREINCREMENT_EXPR:
1752 case POSTINCREMENT_EXPR:
1753 case PREDECREMENT_EXPR:
1754 case POSTDECREMENT_EXPR:
1755 case MODIFY_EXPR:
1756 case INIT_EXPR:
1757 case TARGET_EXPR:
1758 case CALL_EXPR:
1759 case METHOD_CALL_EXPR:
1760 case RTL_EXPR:
81797aba 1761 case TRY_CATCH_EXPR:
28d81abb
RK
1762 case WITH_CLEANUP_EXPR:
1763 case EXIT_EXPR:
1764 /* We don't warn about COND_EXPR because it may be a useful
1765 construct if either arm contains a side effect. */
1766 case COND_EXPR:
1767 return 0;
1768
1769 case BIND_EXPR:
1770 /* For a binding, warn if no side effect within it. */
1771 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1772
de73f171
RK
1773 case SAVE_EXPR:
1774 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1775
28d81abb
RK
1776 case TRUTH_ORIF_EXPR:
1777 case TRUTH_ANDIF_EXPR:
1778 /* In && or ||, warn if 2nd operand has no side effect. */
1779 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1780
1781 case COMPOUND_EXPR:
a646a211
JM
1782 if (TREE_NO_UNUSED_WARNING (exp))
1783 return 0;
28d81abb
RK
1784 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1785 return 1;
4d23e509
RS
1786 /* Let people do `(foo (), 0)' without a warning. */
1787 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1788 return 0;
28d81abb
RK
1789 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1790
1791 case NOP_EXPR:
1792 case CONVERT_EXPR:
b4ac57ab 1793 case NON_LVALUE_EXPR:
28d81abb
RK
1794 /* Don't warn about values cast to void. */
1795 if (TREE_TYPE (exp) == void_type_node)
1796 return 0;
1797 /* Don't warn about conversions not explicit in the user's program. */
1798 if (TREE_NO_UNUSED_WARNING (exp))
1799 return 0;
1800 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
1801 Don't complain about that. There can be an arbitrary number of
1802 casts before the modify, so we must loop until we find the first
1803 non-cast expression and then test to see if that is a modify. */
1804 {
1805 tree tem = TREE_OPERAND (exp, 0);
1806
1807 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1808 tem = TREE_OPERAND (tem, 0);
1809
de73f171
RK
1810 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1811 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
1812 return 0;
1813 }
d1e1adfb 1814 goto warn;
28d81abb 1815
d1e1adfb
JM
1816 case INDIRECT_REF:
1817 /* Don't warn about automatic dereferencing of references, since
1818 the user cannot control it. */
1819 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1820 return warn_if_unused_value (TREE_OPERAND (exp, 0));
0f41302f 1821 /* ... fall through ... */
d1e1adfb 1822
28d81abb 1823 default:
ddbe9812
RS
1824 /* Referencing a volatile value is a side effect, so don't warn. */
1825 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1826 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1827 && TREE_THIS_VOLATILE (exp))
1828 return 0;
d1e1adfb 1829 warn:
28d81abb
RK
1830 warning_with_file_and_line (emit_filename, emit_lineno,
1831 "value computed is not used");
1832 return 1;
1833 }
1834}
1835
1836/* Clear out the memory of the last expression evaluated. */
1837
1838void
1839clear_last_expr ()
1840{
1841 last_expr_type = 0;
1842}
1843
1844/* Begin a statement which will return a value.
1845 Return the RTL_EXPR for this statement expr.
1846 The caller must save that value and pass it to expand_end_stmt_expr. */
1847
1848tree
1849expand_start_stmt_expr ()
1850{
ca695ac9
JB
1851 int momentary;
1852 tree t;
1853
28d81abb
RK
1854 /* Make the RTL_EXPR node temporary, not momentary,
1855 so that rtl_expr_chain doesn't become garbage. */
ca695ac9
JB
1856 momentary = suspend_momentary ();
1857 t = make_node (RTL_EXPR);
28d81abb 1858 resume_momentary (momentary);
33c6ab80 1859 do_pending_stack_adjust ();
e922dbad 1860 start_sequence_for_rtl_expr (t);
28d81abb
RK
1861 NO_DEFER_POP;
1862 expr_stmts_for_value++;
1863 return t;
1864}
1865
1866/* Restore the previous state at the end of a statement that returns a value.
1867 Returns a tree node representing the statement's value and the
1868 insns to compute the value.
1869
1870 The nodes of that expression have been freed by now, so we cannot use them.
1871 But we don't want to do that anyway; the expression has already been
1872 evaluated and now we just want to use the value. So generate a RTL_EXPR
1873 with the proper type and RTL value.
1874
1875 If the last substatement was not an expression,
1876 return something with type `void'. */
1877
1878tree
1879expand_end_stmt_expr (t)
1880 tree t;
1881{
1882 OK_DEFER_POP;
1883
1884 if (last_expr_type == 0)
1885 {
1886 last_expr_type = void_type_node;
1887 last_expr_value = const0_rtx;
1888 }
1889 else if (last_expr_value == 0)
1890 /* There are some cases where this can happen, such as when the
1891 statement is void type. */
1892 last_expr_value = const0_rtx;
1893 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1894 /* Remove any possible QUEUED. */
1895 last_expr_value = protect_from_queue (last_expr_value, 0);
1896
1897 emit_queue ();
1898
1899 TREE_TYPE (t) = last_expr_type;
1900 RTL_EXPR_RTL (t) = last_expr_value;
1901 RTL_EXPR_SEQUENCE (t) = get_insns ();
1902
1903 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1904
1905 end_sequence ();
1906
1907 /* Don't consider deleting this expr or containing exprs at tree level. */
1908 TREE_SIDE_EFFECTS (t) = 1;
1909 /* Propagate volatility of the actual RTL expr. */
1910 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1911
1912 last_expr_type = 0;
1913 expr_stmts_for_value--;
1914
1915 return t;
1916}
1917\f
28d81abb
RK
1918/* Generate RTL for the start of an if-then. COND is the expression
1919 whose truth should be tested.
1920
1921 If EXITFLAG is nonzero, this conditional is visible to
1922 `exit_something'. */
1923
1924void
1925expand_start_cond (cond, exitflag)
1926 tree cond;
1927 int exitflag;
1928{
1929 struct nesting *thiscond = ALLOC_NESTING ();
1930
1931 /* Make an entry on cond_stack for the cond we are entering. */
1932
1933 thiscond->next = cond_stack;
1934 thiscond->all = nesting_stack;
1935 thiscond->depth = ++nesting_depth;
1936 thiscond->data.cond.next_label = gen_label_rtx ();
1937 /* Before we encounter an `else', we don't need a separate exit label
1938 unless there are supposed to be exit statements
1939 to exit this conditional. */
1940 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1941 thiscond->data.cond.endif_label = thiscond->exit_label;
1942 cond_stack = thiscond;
1943 nesting_stack = thiscond;
1944
b93a436e 1945 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
1946}
1947
1948/* Generate RTL between then-clause and the elseif-clause
1949 of an if-then-elseif-.... */
1950
1951void
1952expand_start_elseif (cond)
1953 tree cond;
1954{
1955 if (cond_stack->data.cond.endif_label == 0)
1956 cond_stack->data.cond.endif_label = gen_label_rtx ();
1957 emit_jump (cond_stack->data.cond.endif_label);
1958 emit_label (cond_stack->data.cond.next_label);
1959 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 1960 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
1961}
1962
1963/* Generate RTL between the then-clause and the else-clause
1964 of an if-then-else. */
1965
1966void
1967expand_start_else ()
1968{
1969 if (cond_stack->data.cond.endif_label == 0)
1970 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 1971
28d81abb
RK
1972 emit_jump (cond_stack->data.cond.endif_label);
1973 emit_label (cond_stack->data.cond.next_label);
0f41302f 1974 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
1975}
1976
d947ba59
RK
1977/* After calling expand_start_else, turn this "else" into an "else if"
1978 by providing another condition. */
1979
1980void
1981expand_elseif (cond)
1982 tree cond;
1983{
1984 cond_stack->data.cond.next_label = gen_label_rtx ();
1985 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1986}
1987
28d81abb
RK
1988/* Generate RTL for the end of an if-then.
1989 Pop the record for it off of cond_stack. */
1990
1991void
1992expand_end_cond ()
1993{
1994 struct nesting *thiscond = cond_stack;
1995
b93a436e
JL
1996 do_pending_stack_adjust ();
1997 if (thiscond->data.cond.next_label)
1998 emit_label (thiscond->data.cond.next_label);
1999 if (thiscond->data.cond.endif_label)
2000 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2001
2002 POPSTACK (cond_stack);
2003 last_expr_type = 0;
2004}
ca695ac9
JB
2005
2006
28d81abb
RK
2007\f
2008/* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2009 loop should be exited by `exit_something'. This is a loop for which
2010 `expand_continue' will jump to the top of the loop.
2011
2012 Make an entry on loop_stack to record the labels associated with
2013 this loop. */
2014
2015struct nesting *
2016expand_start_loop (exit_flag)
2017 int exit_flag;
2018{
2019 register struct nesting *thisloop = ALLOC_NESTING ();
2020
2021 /* Make an entry on loop_stack for the loop we are entering. */
2022
2023 thisloop->next = loop_stack;
2024 thisloop->all = nesting_stack;
2025 thisloop->depth = ++nesting_depth;
2026 thisloop->data.loop.start_label = gen_label_rtx ();
2027 thisloop->data.loop.end_label = gen_label_rtx ();
8afad312 2028 thisloop->data.loop.alt_end_label = 0;
28d81abb
RK
2029 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2030 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2031 loop_stack = thisloop;
2032 nesting_stack = thisloop;
2033
2034 do_pending_stack_adjust ();
2035 emit_queue ();
37366632 2036 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
28d81abb
RK
2037 emit_label (thisloop->data.loop.start_label);
2038
2039 return thisloop;
2040}
2041
2042/* Like expand_start_loop but for a loop where the continuation point
2043 (for expand_continue_loop) will be specified explicitly. */
2044
2045struct nesting *
2046expand_start_loop_continue_elsewhere (exit_flag)
2047 int exit_flag;
2048{
2049 struct nesting *thisloop = expand_start_loop (exit_flag);
2050 loop_stack->data.loop.continue_label = gen_label_rtx ();
2051 return thisloop;
2052}
2053
2054/* Specify the continuation point for a loop started with
2055 expand_start_loop_continue_elsewhere.
2056 Use this at the point in the code to which a continue statement
2057 should jump. */
2058
2059void
2060expand_loop_continue_here ()
2061{
2062 do_pending_stack_adjust ();
37366632 2063 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
28d81abb
RK
2064 emit_label (loop_stack->data.loop.continue_label);
2065}
2066
2067/* Finish a loop. Generate a jump back to the top and the loop-exit label.
2068 Pop the block off of loop_stack. */
2069
2070void
2071expand_end_loop ()
2072{
0720f6fb
MM
2073 rtx start_label = loop_stack->data.loop.start_label;
2074 rtx insn = get_last_insn ();
28d81abb
RK
2075
2076 /* Mark the continue-point at the top of the loop if none elsewhere. */
2077 if (start_label == loop_stack->data.loop.continue_label)
2078 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2079
2080 do_pending_stack_adjust ();
2081
93de5c31
MM
2082 /* If optimizing, perhaps reorder the loop. If the loop starts with
2083 a loop exit, roll that to the end where it will optimize together
2084 with the jump back.
2085
2086 We look for the conditional branch to the exit, except that once
2087 we find such a branch, we don't look past 30 instructions.
2088
2089 In more detail, if the loop presently looks like this (in pseudo-C):
2090
2091 start_label:
2092 if (test) goto end_label;
2093 body;
2094 goto start_label;
0720f6fb 2095 end_label:
93de5c31
MM
2096
2097 transform it to look like:
2098
2099 goto start_label;
2100 newstart_label:
2101 body;
2102 start_label:
2103 if (test) goto end_label;
2104 goto newstart_label;
0720f6fb 2105 end_label:
93de5c31
MM
2106
2107 Here, the `test' may actually consist of some reasonably complex
2108 code, terminating in a test. */
0720f6fb 2109
28d81abb
RK
2110 if (optimize
2111 &&
2112 ! (GET_CODE (insn) == JUMP_INSN
2113 && GET_CODE (PATTERN (insn)) == SET
2114 && SET_DEST (PATTERN (insn)) == pc_rtx
2115 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2116 {
93de5c31 2117 int eh_regions = 0;
0720f6fb
MM
2118 int num_insns = 0;
2119 rtx last_test_insn = NULL_RTX;
93de5c31 2120
28d81abb
RK
2121 /* Scan insns from the top of the loop looking for a qualified
2122 conditional exit. */
2123 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2124 insn = NEXT_INSN (insn))
2125 {
93de5c31
MM
2126 if (GET_CODE (insn) == NOTE)
2127 {
2128 if (optimize < 2
2129 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2130 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2131 /* The code that actually moves the exit test will
2132 carefully leave BLOCK notes in their original
2133 location. That means, however, that we can't debug
2134 the exit test itself. So, we refuse to move code
2135 containing BLOCK notes at low optimization levels. */
2136 break;
2137
2138 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2139 ++eh_regions;
2140 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2141 {
2142 --eh_regions;
2143 if (eh_regions < 0)
2144 /* We've come to the end of an EH region, but
2145 never saw the beginning of that region. That
2146 means that an EH region begins before the top
2147 of the loop, and ends in the middle of it. The
2148 existence of such a situation violates a basic
2149 assumption in this code, since that would imply
2150 that even when EH_REGIONS is zero, we might
2151 move code out of an exception region. */
2152 abort ();
2153 }
28d81abb 2154
93de5c31
MM
2155 /* We already know this INSN is a NOTE, so there's no
2156 point in looking at it to see if it's a JUMP. */
2157 continue;
2158 }
28d81abb
RK
2159
2160 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2161 num_insns++;
2162
2163 if (last_test_insn && num_insns > 30)
2164 break;
2165
93de5c31
MM
2166 if (eh_regions > 0)
2167 /* We don't want to move a partial EH region. Consider:
2168
2169 while ( ( { try {
2170 if (cond ()) 0;
2171 else {
2172 bar();
2173 1;
2174 }
2175 } catch (...) {
2176 1;
2177 } )) {
2178 body;
2179 }
2180
2181 This isn't legal C++, but here's what it's supposed to
2182 mean: if cond() is true, stop looping. Otherwise,
2183 call bar, and keep looping. In addition, if cond
2184 throws an exception, catch it and keep looping. Such
2185 constructs are certainy legal in LISP.
2186
2187 We should not move the `if (cond()) 0' test since then
2188 the EH-region for the try-block would be broken up.
2189 (In this case we would the EH_BEG note for the `try'
2190 and `if cond()' but not the call to bar() or the
2191 EH_END note.)
2192
2193 So we don't look for tests within an EH region. */
2194 continue;
2195
0720f6fb 2196 if (GET_CODE (insn) == JUMP_INSN
28d81abb 2197 && GET_CODE (PATTERN (insn)) == SET
0720f6fb
MM
2198 && SET_DEST (PATTERN (insn)) == pc_rtx)
2199 {
2200 /* This is indeed a jump. */
2201 rtx dest1 = NULL_RTX;
2202 rtx dest2 = NULL_RTX;
2203 rtx potential_last_test;
2204 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2205 {
2206 /* A conditional jump. */
2207 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2208 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2209 potential_last_test = insn;
2210 }
2211 else
2212 {
2213 /* An unconditional jump. */
2214 dest1 = SET_SRC (PATTERN (insn));
2215 /* Include the BARRIER after the JUMP. */
2216 potential_last_test = NEXT_INSN (insn);
2217 }
2218
2219 do {
2220 if (dest1 && GET_CODE (dest1) == LABEL_REF
2221 && ((XEXP (dest1, 0)
2222 == loop_stack->data.loop.alt_end_label)
2223 || (XEXP (dest1, 0)
2224 == loop_stack->data.loop.end_label)))
2225 {
2226 last_test_insn = potential_last_test;
2227 break;
2228 }
2229
2230 /* If this was a conditional jump, there may be
2231 another label at which we should look. */
2232 dest1 = dest2;
2233 dest2 = NULL_RTX;
2234 } while (dest1);
2235 }
28d81abb
RK
2236 }
2237
2238 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2239 {
2240 /* We found one. Move everything from there up
2241 to the end of the loop, and add a jump into the loop
2242 to jump to there. */
2243 register rtx newstart_label = gen_label_rtx ();
2244 register rtx start_move = start_label;
93de5c31 2245 rtx next_insn;
28d81abb 2246
b4ac57ab 2247 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
28d81abb
RK
2248 then we want to move this note also. */
2249 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2250 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2251 == NOTE_INSN_LOOP_CONT))
2252 start_move = PREV_INSN (start_move);
2253
2254 emit_label_after (newstart_label, PREV_INSN (start_move));
93de5c31
MM
2255
2256 /* Actually move the insns. Start at the beginning, and
2257 keep copying insns until we've copied the
2258 last_test_insn. */
2259 for (insn = start_move; insn; insn = next_insn)
2260 {
2261 /* Figure out which insn comes after this one. We have
2262 to do this before we move INSN. */
2263 if (insn == last_test_insn)
2264 /* We've moved all the insns. */
2265 next_insn = NULL_RTX;
2266 else
2267 next_insn = NEXT_INSN (insn);
2268
2269 if (GET_CODE (insn) == NOTE
2270 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2271 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2272 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2273 NOTE_INSN_BLOCK_ENDs because the correct generation
2274 of debugging information depends on these appearing
2275 in the same order in the RTL and in the tree
2276 structure, where they are represented as BLOCKs.
2277 So, we don't move block notes. Of course, moving
2278 the code inside the block is likely to make it
2279 impossible to debug the instructions in the exit
2280 test, but such is the price of optimization. */
2281 continue;
2282
2283 /* Move the INSN. */
2284 reorder_insns (insn, insn, get_last_insn ());
2285 }
2286
28d81abb
RK
2287 emit_jump_insn_after (gen_jump (start_label),
2288 PREV_INSN (newstart_label));
2289 emit_barrier_after (PREV_INSN (newstart_label));
2290 start_label = newstart_label;
2291 }
2292 }
2293
2294 emit_jump (start_label);
37366632 2295 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
28d81abb
RK
2296 emit_label (loop_stack->data.loop.end_label);
2297
2298 POPSTACK (loop_stack);
2299
2300 last_expr_type = 0;
2301}
2302
2303/* Generate a jump to the current loop's continue-point.
2304 This is usually the top of the loop, but may be specified
2305 explicitly elsewhere. If not currently inside a loop,
2306 return 0 and do nothing; caller will print an error message. */
2307
2308int
2309expand_continue_loop (whichloop)
2310 struct nesting *whichloop;
2311{
2312 last_expr_type = 0;
2313 if (whichloop == 0)
2314 whichloop = loop_stack;
2315 if (whichloop == 0)
2316 return 0;
37366632
RK
2317 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2318 NULL_RTX);
28d81abb
RK
2319 return 1;
2320}
2321
2322/* Generate a jump to exit the current loop. If not currently inside a loop,
2323 return 0 and do nothing; caller will print an error message. */
2324
2325int
2326expand_exit_loop (whichloop)
2327 struct nesting *whichloop;
2328{
2329 last_expr_type = 0;
2330 if (whichloop == 0)
2331 whichloop = loop_stack;
2332 if (whichloop == 0)
2333 return 0;
37366632 2334 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
28d81abb
RK
2335 return 1;
2336}
2337
2338/* Generate a conditional jump to exit the current loop if COND
2339 evaluates to zero. If not currently inside a loop,
2340 return 0 and do nothing; caller will print an error message. */
2341
2342int
2343expand_exit_loop_if_false (whichloop, cond)
2344 struct nesting *whichloop;
2345 tree cond;
2346{
b93a436e
JL
2347 rtx label = gen_label_rtx ();
2348 rtx last_insn;
28d81abb 2349 last_expr_type = 0;
b93a436e 2350
28d81abb
RK
2351 if (whichloop == 0)
2352 whichloop = loop_stack;
2353 if (whichloop == 0)
2354 return 0;
b93a436e
JL
2355 /* In order to handle fixups, we actually create a conditional jump
2356 around a unconditional branch to exit the loop. If fixups are
2357 necessary, they go before the unconditional branch. */
d902c7ea 2358
b93a436e
JL
2359
2360 do_jump (cond, NULL_RTX, label);
2361 last_insn = get_last_insn ();
2362 if (GET_CODE (last_insn) == CODE_LABEL)
2363 whichloop->data.loop.alt_end_label = last_insn;
2364 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2365 NULL_RTX);
2366 emit_label (label);
ca695ac9 2367
28d81abb
RK
2368 return 1;
2369}
2370
4a050cc2
JL
2371/* Return nonzero if the loop nest is empty. Else return zero. */
2372
2373int
2374stmt_loop_nest_empty ()
2375{
2376 return (loop_stack == NULL);
2377}
2378
28d81abb
RK
2379/* Return non-zero if we should preserve sub-expressions as separate
2380 pseudos. We never do so if we aren't optimizing. We always do so
2381 if -fexpensive-optimizations.
2382
2383 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2384 the loop may still be a small one. */
2385
2386int
2387preserve_subexpressions_p ()
2388{
2389 rtx insn;
2390
2391 if (flag_expensive_optimizations)
2392 return 1;
2393
2394 if (optimize == 0 || loop_stack == 0)
2395 return 0;
2396
2397 insn = get_last_insn_anywhere ();
2398
2399 return (insn
2400 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2401 < n_non_fixed_regs * 3));
2402
2403}
2404
2405/* Generate a jump to exit the current loop, conditional, binding contour
2406 or case statement. Not all such constructs are visible to this function,
2407 only those started with EXIT_FLAG nonzero. Individual languages use
2408 the EXIT_FLAG parameter to control which kinds of constructs you can
2409 exit this way.
2410
2411 If not currently inside anything that can be exited,
2412 return 0 and do nothing; caller will print an error message. */
2413
2414int
2415expand_exit_something ()
2416{
2417 struct nesting *n;
2418 last_expr_type = 0;
2419 for (n = nesting_stack; n; n = n->all)
2420 if (n->exit_label != 0)
2421 {
37366632 2422 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
28d81abb
RK
2423 return 1;
2424 }
2425
2426 return 0;
2427}
2428\f
2429/* Generate RTL to return from the current function, with no value.
2430 (That is, we do not do anything about returning any value.) */
2431
2432void
2433expand_null_return ()
2434{
2435 struct nesting *block = block_stack;
2436 rtx last_insn = 0;
2437
2438 /* Does any pending block have cleanups? */
2439
2440 while (block && block->data.block.cleanups == 0)
2441 block = block->next;
2442
2443 /* If yes, use a goto to return, since that runs cleanups. */
2444
2445 expand_null_return_1 (last_insn, block != 0);
2446}
2447
2448/* Generate RTL to return from the current function, with value VAL. */
2449
8d800403 2450static void
28d81abb
RK
2451expand_value_return (val)
2452 rtx val;
2453{
2454 struct nesting *block = block_stack;
2455 rtx last_insn = get_last_insn ();
2456 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2457
2458 /* Copy the value to the return location
2459 unless it's already there. */
2460
2461 if (return_reg != val)
77636079
RS
2462 {
2463#ifdef PROMOTE_FUNCTION_RETURN
77636079
RS
2464 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2465 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
2466 enum machine_mode mode
2467 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2468 &unsignedp, 1);
77636079
RS
2469
2470 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
3af6dfd8 2471 convert_move (return_reg, val, unsignedp);
77636079
RS
2472 else
2473#endif
2474 emit_move_insn (return_reg, val);
2475 }
28d81abb
RK
2476 if (GET_CODE (return_reg) == REG
2477 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
38a448ca 2478 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
e5eeae65
JW
2479 /* Handle calls that return values in multiple non-contiguous locations.
2480 The Irix 6 ABI has examples of this. */
2481 else if (GET_CODE (return_reg) == PARALLEL)
2482 {
2483 int i;
2484
2485 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2486 {
2487 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2488
2489 if (GET_CODE (x) == REG
2490 && REGNO (x) < FIRST_PSEUDO_REGISTER)
38a448ca 2491 emit_insn (gen_rtx_USE (VOIDmode, x));
e5eeae65
JW
2492 }
2493 }
28d81abb
RK
2494
2495 /* Does any pending block have cleanups? */
2496
2497 while (block && block->data.block.cleanups == 0)
2498 block = block->next;
2499
2500 /* If yes, use a goto to return, since that runs cleanups.
2501 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2502
2503 expand_null_return_1 (last_insn, block != 0);
2504}
2505
2506/* Output a return with no value. If LAST_INSN is nonzero,
2507 pretend that the return takes place after LAST_INSN.
2508 If USE_GOTO is nonzero then don't use a return instruction;
2509 go to the return label instead. This causes any cleanups
2510 of pending blocks to be executed normally. */
2511
2512static void
2513expand_null_return_1 (last_insn, use_goto)
2514 rtx last_insn;
2515 int use_goto;
2516{
2517 rtx end_label = cleanup_label ? cleanup_label : return_label;
2518
2519 clear_pending_stack_adjust ();
2520 do_pending_stack_adjust ();
2521 last_expr_type = 0;
2522
2523 /* PCC-struct return always uses an epilogue. */
2524 if (current_function_returns_pcc_struct || use_goto)
2525 {
2526 if (end_label == 0)
2527 end_label = return_label = gen_label_rtx ();
37366632 2528 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2529 return;
2530 }
2531
2532 /* Otherwise output a simple return-insn if one is available,
2533 unless it won't do the job. */
2534#ifdef HAVE_return
2535 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2536 {
2537 emit_jump_insn (gen_return ());
2538 emit_barrier ();
2539 return;
2540 }
2541#endif
2542
2543 /* Otherwise jump to the epilogue. */
37366632 2544 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2545}
2546\f
2547/* Generate RTL to evaluate the expression RETVAL and return it
2548 from the current function. */
2549
2550void
2551expand_return (retval)
2552 tree retval;
2553{
2554 /* If there are any cleanups to be performed, then they will
2555 be inserted following LAST_INSN. It is desirable
2556 that the last_insn, for such purposes, should be the
2557 last insn before computing the return value. Otherwise, cleanups
2558 which call functions can clobber the return value. */
2559 /* ??? rms: I think that is erroneous, because in C++ it would
2560 run destructors on variables that might be used in the subsequent
2561 computation of the return value. */
2562 rtx last_insn = 0;
2563 register rtx val = 0;
2564 register rtx op0;
2565 tree retval_rhs;
2566 int cleanups;
28d81abb
RK
2567
2568 /* If function wants no value, give it none. */
2569 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2570 {
37366632 2571 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2572 emit_queue ();
28d81abb
RK
2573 expand_null_return ();
2574 return;
2575 }
2576
2577 /* Are any cleanups needed? E.g. C++ destructors to be run? */
7a9a00be
MS
2578 /* This is not sufficient. We also need to watch for cleanups of the
2579 expression we are about to expand. Unfortunately, we cannot know
2580 if it has cleanups until we expand it, and we want to change how we
2581 expand it depending upon if we need cleanups. We can't win. */
2582#if 0
28d81abb 2583 cleanups = any_pending_cleanups (1);
7a9a00be
MS
2584#else
2585 cleanups = 1;
2586#endif
28d81abb
RK
2587
2588 if (TREE_CODE (retval) == RESULT_DECL)
2589 retval_rhs = retval;
2590 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2591 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2592 retval_rhs = TREE_OPERAND (retval, 1);
2593 else if (TREE_TYPE (retval) == void_type_node)
2594 /* Recognize tail-recursive call to void function. */
2595 retval_rhs = retval;
2596 else
2597 retval_rhs = NULL_TREE;
2598
2599 /* Only use `last_insn' if there are cleanups which must be run. */
2600 if (cleanups || cleanup_label != 0)
2601 last_insn = get_last_insn ();
2602
2603 /* Distribute return down conditional expr if either of the sides
2604 may involve tail recursion (see test below). This enhances the number
2605 of tail recursions we see. Don't do this always since it can produce
2606 sub-optimal code in some cases and we distribute assignments into
2607 conditional expressions when it would help. */
2608
2609 if (optimize && retval_rhs != 0
2610 && frame_offset == 0
2611 && TREE_CODE (retval_rhs) == COND_EXPR
2612 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2613 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2614 {
2615 rtx label = gen_label_rtx ();
a0a34f94
RK
2616 tree expr;
2617
37366632 2618 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
dd98f85c 2619 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2620 DECL_RESULT (current_function_decl),
2621 TREE_OPERAND (retval_rhs, 1));
2622 TREE_SIDE_EFFECTS (expr) = 1;
2623 expand_return (expr);
28d81abb 2624 emit_label (label);
a0a34f94 2625
dd98f85c 2626 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
a0a34f94
RK
2627 DECL_RESULT (current_function_decl),
2628 TREE_OPERAND (retval_rhs, 2));
2629 TREE_SIDE_EFFECTS (expr) = 1;
2630 expand_return (expr);
28d81abb
RK
2631 return;
2632 }
2633
642cac7b 2634 /* Attempt to optimize the call if it is tail recursive. */
564ea051
JW
2635 if (optimize_tail_recursion (retval_rhs, last_insn))
2636 return;
642cac7b 2637
28d81abb
RK
2638#ifdef HAVE_return
2639 /* This optimization is safe if there are local cleanups
2640 because expand_null_return takes care of them.
2641 ??? I think it should also be safe when there is a cleanup label,
2642 because expand_null_return takes care of them, too.
2643 Any reason why not? */
2644 if (HAVE_return && cleanup_label == 0
5eb94e4e
RK
2645 && ! current_function_returns_pcc_struct
2646 && BRANCH_COST <= 1)
28d81abb
RK
2647 {
2648 /* If this is return x == y; then generate
2649 if (x == y) return 1; else return 0;
3f8b69de
TG
2650 if we can do it with explicit return insns and branches are cheap,
2651 but not if we have the corresponding scc insn. */
2652 int has_scc = 0;
28d81abb
RK
2653 if (retval_rhs)
2654 switch (TREE_CODE (retval_rhs))
2655 {
2656 case EQ_EXPR:
3f8b69de
TG
2657#ifdef HAVE_seq
2658 has_scc = HAVE_seq;
2659#endif
28d81abb 2660 case NE_EXPR:
3f8b69de
TG
2661#ifdef HAVE_sne
2662 has_scc = HAVE_sne;
2663#endif
28d81abb 2664 case GT_EXPR:
3f8b69de
TG
2665#ifdef HAVE_sgt
2666 has_scc = HAVE_sgt;
2667#endif
28d81abb 2668 case GE_EXPR:
3f8b69de
TG
2669#ifdef HAVE_sge
2670 has_scc = HAVE_sge;
2671#endif
28d81abb 2672 case LT_EXPR:
3f8b69de
TG
2673#ifdef HAVE_slt
2674 has_scc = HAVE_slt;
2675#endif
28d81abb 2676 case LE_EXPR:
3f8b69de
TG
2677#ifdef HAVE_sle
2678 has_scc = HAVE_sle;
2679#endif
28d81abb
RK
2680 case TRUTH_ANDIF_EXPR:
2681 case TRUTH_ORIF_EXPR:
2682 case TRUTH_AND_EXPR:
2683 case TRUTH_OR_EXPR:
2684 case TRUTH_NOT_EXPR:
94ed3915 2685 case TRUTH_XOR_EXPR:
3f8b69de
TG
2686 if (! has_scc)
2687 {
2688 op0 = gen_label_rtx ();
2689 jumpifnot (retval_rhs, op0);
2690 expand_value_return (const1_rtx);
2691 emit_label (op0);
2692 expand_value_return (const0_rtx);
2693 return;
2694 }
e9a25f70
JL
2695 break;
2696
2697 default:
2698 break;
28d81abb
RK
2699 }
2700 }
2701#endif /* HAVE_return */
2702
4c485b63
JL
2703 /* If the result is an aggregate that is being returned in one (or more)
2704 registers, load the registers here. The compiler currently can't handle
2705 copying a BLKmode value into registers. We could put this code in a
2706 more general area (for use by everyone instead of just function
2707 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2708 (and in expand_call). The value must go into a pseudo in case there
2709 are cleanups that will clobber the real return register. */
4c485b63
JL
2710
2711 if (retval_rhs != 0
2712 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2713 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2714 {
a7f875d7 2715 int i, bitpos, xbitpos;
4c485b63
JL
2716 int big_endian_correction = 0;
2717 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2718 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
c84e2712
KG
2719 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2720 (unsigned int)BITS_PER_WORD);
4c485b63 2721 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
c16ddde3 2722 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 2723 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2724 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2725
a7f875d7
RK
2726 /* Structures whose size is not a multiple of a word are aligned
2727 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2728 machine, this means we must skip the empty high order bytes when
2729 calculating the bit offset. */
2730 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2731 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2732 * BITS_PER_UNIT));
2733
2734 /* Copy the structure BITSIZE bits at a time. */
2735 for (bitpos = 0, xbitpos = big_endian_correction;
2736 bitpos < bytes * BITS_PER_UNIT;
2737 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2738 {
a7f875d7 2739 /* We need a new destination pseudo each time xbitpos is
abc95ed3 2740 on a word boundary and when xbitpos == big_endian_correction
a7f875d7
RK
2741 (the first time through). */
2742 if (xbitpos % BITS_PER_WORD == 0
2743 || xbitpos == big_endian_correction)
4c485b63 2744 {
a7f875d7
RK
2745 /* Generate an appropriate register. */
2746 dst = gen_reg_rtx (word_mode);
2747 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2748
2749 /* Clobber the destination before we move anything into it. */
38a448ca 2750 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
4c485b63 2751 }
a7f875d7
RK
2752
2753 /* We need a new source operand each time bitpos is on a word
2754 boundary. */
2755 if (bitpos % BITS_PER_WORD == 0)
2756 src = operand_subword_force (result_val,
2757 bitpos / BITS_PER_WORD,
2758 BLKmode);
2759
2760 /* Use bitpos for the source extraction (left justified) and
2761 xbitpos for the destination store (right justified). */
2762 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2763 extract_bit_field (src, bitsize,
2764 bitpos % BITS_PER_WORD, 1,
2765 NULL_RTX, word_mode,
2766 word_mode,
2767 bitsize / BITS_PER_UNIT,
2768 BITS_PER_WORD),
2769 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
4c485b63
JL
2770 }
2771
4c485b63
JL
2772 /* Find the smallest integer mode large enough to hold the
2773 entire structure and use that mode instead of BLKmode
2774 on the USE insn for the return register. */
2775 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2776 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2777 tmpmode != MAX_MACHINE_MODE;
2778 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3ffeb8f1
JW
2779 {
2780 /* Have we found a large enough mode? */
2781 if (GET_MODE_SIZE (tmpmode) >= bytes)
2782 break;
2783 }
4c485b63
JL
2784
2785 /* No suitable mode found. */
2786 if (tmpmode == MAX_MACHINE_MODE)
3ffeb8f1 2787 abort ();
4c485b63 2788
3ffeb8f1
JW
2789 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2790
af55da56
JW
2791 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2792 result_reg_mode = word_mode;
2793 else
2794 result_reg_mode = tmpmode;
2795 result_reg = gen_reg_rtx (result_reg_mode);
2796
3ffeb8f1 2797 emit_queue ();
3ffeb8f1 2798 for (i = 0; i < n_regs; i++)
af55da56 2799 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 2800 result_pseudos[i]);
4c485b63 2801
af55da56
JW
2802 if (tmpmode != result_reg_mode)
2803 result_reg = gen_lowpart (tmpmode, result_reg);
2804
4c485b63
JL
2805 expand_value_return (result_reg);
2806 }
2807 else if (cleanups
28d81abb
RK
2808 && retval_rhs != 0
2809 && TREE_TYPE (retval_rhs) != void_type_node
2810 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2811 {
2812 /* Calculate the return value into a pseudo reg. */
dd98f85c
JM
2813 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2814 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2815 val = force_not_mem (val);
28d81abb 2816 emit_queue ();
28d81abb
RK
2817 /* Return the calculated value, doing cleanups first. */
2818 expand_value_return (val);
2819 }
2820 else
2821 {
2822 /* No cleanups or no hard reg used;
2823 calculate value into hard return reg. */
cba389cd 2824 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 2825 emit_queue ();
28d81abb
RK
2826 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2827 }
2828}
2829
2830/* Return 1 if the end of the generated RTX is not a barrier.
2831 This means code already compiled can drop through. */
2832
2833int
2834drop_through_at_end_p ()
2835{
2836 rtx insn = get_last_insn ();
2837 while (insn && GET_CODE (insn) == NOTE)
2838 insn = PREV_INSN (insn);
2839 return insn && GET_CODE (insn) != BARRIER;
2840}
2841\f
642cac7b
JL
2842/* Test CALL_EXPR to determine if it is a potential tail recursion call
2843 and emit code to optimize the tail recursion. LAST_INSN indicates where
564ea051
JW
2844 to place the jump to the tail recursion label. Return TRUE if the
2845 call was optimized into a goto.
642cac7b
JL
2846
2847 This is only used by expand_return, but expand_call is expected to
2848 use it soon. */
2849
564ea051 2850int
642cac7b
JL
2851optimize_tail_recursion (call_expr, last_insn)
2852 tree call_expr;
2853 rtx last_insn;
2854{
2855 /* For tail-recursive call to current function,
2856 just jump back to the beginning.
2857 It's unsafe if any auto variable in this function
2858 has its address taken; for simplicity,
2859 require stack frame to be empty. */
2860 if (optimize && call_expr != 0
2861 && frame_offset == 0
2862 && TREE_CODE (call_expr) == CALL_EXPR
2863 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2864 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2865 /* Finish checking validity, and if valid emit code
2866 to set the argument variables for the new call. */
2867 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2868 DECL_ARGUMENTS (current_function_decl)))
2869 {
2870 if (tail_recursion_label == 0)
2871 {
2872 tail_recursion_label = gen_label_rtx ();
2873 emit_label_after (tail_recursion_label,
2874 tail_recursion_reentry);
2875 }
2876 emit_queue ();
2877 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2878 emit_barrier ();
564ea051 2879 return 1;
642cac7b 2880 }
564ea051
JW
2881
2882 return 0;
642cac7b
JL
2883}
2884
28d81abb
RK
2885/* Emit code to alter this function's formal parms for a tail-recursive call.
2886 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2887 FORMALS is the chain of decls of formals.
2888 Return 1 if this can be done;
2889 otherwise return 0 and do not emit any code. */
2890
2891static int
2892tail_recursion_args (actuals, formals)
2893 tree actuals, formals;
2894{
2895 register tree a = actuals, f = formals;
2896 register int i;
2897 register rtx *argvec;
2898
2899 /* Check that number and types of actuals are compatible
2900 with the formals. This is not always true in valid C code.
2901 Also check that no formal needs to be addressable
2902 and that all formals are scalars. */
2903
2904 /* Also count the args. */
2905
2906 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2907 {
5c7fe359
RK
2908 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2909 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
28d81abb
RK
2910 return 0;
2911 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2912 return 0;
2913 }
2914 if (a != 0 || f != 0)
2915 return 0;
2916
2917 /* Compute all the actuals. */
2918
2919 argvec = (rtx *) alloca (i * sizeof (rtx));
2920
2921 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
37366632 2922 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2923
2924 /* Find which actual values refer to current values of previous formals.
2925 Copy each of them now, before any formal is changed. */
2926
2927 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2928 {
2929 int copy = 0;
2930 register int j;
2931 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2932 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2933 { copy = 1; break; }
2934 if (copy)
2935 argvec[i] = copy_to_reg (argvec[i]);
2936 }
2937
2938 /* Store the values of the actuals into the formals. */
2939
2940 for (f = formals, a = actuals, i = 0; f;
2941 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2942 {
98f3b471 2943 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
28d81abb
RK
2944 emit_move_insn (DECL_RTL (f), argvec[i]);
2945 else
2946 convert_move (DECL_RTL (f), argvec[i],
2947 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2948 }
2949
2950 free_temp_slots ();
2951 return 1;
2952}
2953\f
2954/* Generate the RTL code for entering a binding contour.
2955 The variables are declared one by one, by calls to `expand_decl'.
2956
2957 EXIT_FLAG is nonzero if this construct should be visible to
2958 `exit_something'. */
2959
2960void
2961expand_start_bindings (exit_flag)
2962 int exit_flag;
2963{
2964 struct nesting *thisblock = ALLOC_NESTING ();
b93a436e 2965 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
28d81abb
RK
2966
2967 /* Make an entry on block_stack for the block we are entering. */
2968
2969 thisblock->next = block_stack;
2970 thisblock->all = nesting_stack;
2971 thisblock->depth = ++nesting_depth;
2972 thisblock->data.block.stack_level = 0;
2973 thisblock->data.block.cleanups = 0;
2974 thisblock->data.block.function_call_count = 0;
e976b8b2
MS
2975 thisblock->data.block.exception_region = 0;
2976 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2977
2978 thisblock->data.block.conditional_code = 0;
2979 thisblock->data.block.last_unconditional_cleanup = note;
2980 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2981
28d81abb
RK
2982 if (block_stack
2983 && !(block_stack->data.block.cleanups == NULL_TREE
2984 && block_stack->data.block.outer_cleanups == NULL_TREE))
2985 thisblock->data.block.outer_cleanups
2986 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2987 block_stack->data.block.outer_cleanups);
2988 else
2989 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
2990 thisblock->data.block.label_chain = 0;
2991 thisblock->data.block.innermost_stack_block = stack_block_stack;
2992 thisblock->data.block.first_insn = note;
2993 thisblock->data.block.block_start_count = ++block_start_count;
2994 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2995 block_stack = thisblock;
2996 nesting_stack = thisblock;
2997
b93a436e
JL
2998 /* Make a new level for allocating stack slots. */
2999 push_temp_slots ();
28d81abb
RK
3000}
3001
e976b8b2
MS
3002/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3003 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3004 expand_expr are made. After we end the region, we know that all
3005 space for all temporaries that were created by TARGET_EXPRs will be
3006 destroyed and their space freed for reuse. */
3007
3008void
3009expand_start_target_temps ()
3010{
3011 /* This is so that even if the result is preserved, the space
3012 allocated will be freed, as we know that it is no longer in use. */
3013 push_temp_slots ();
3014
3015 /* Start a new binding layer that will keep track of all cleanup
3016 actions to be performed. */
3017 expand_start_bindings (0);
3018
3019 target_temp_slot_level = temp_slot_level;
3020}
3021
3022void
3023expand_end_target_temps ()
3024{
3025 expand_end_bindings (NULL_TREE, 0, 0);
3026
3027 /* This is so that even if the result is preserved, the space
3028 allocated will be freed, as we know that it is no longer in use. */
3029 pop_temp_slots ();
3030}
3031
3032/* Mark top block of block_stack as an implicit binding for an
3033 exception region. This is used to prevent infinite recursion when
3034 ending a binding with expand_end_bindings. It is only ever called
3035 by expand_eh_region_start, as that it the only way to create a
3036 block stack for a exception region. */
3037
3038void
3039mark_block_as_eh_region ()
3040{
3041 block_stack->data.block.exception_region = 1;
3042 if (block_stack->next
3043 && block_stack->next->data.block.conditional_code)
3044 {
3045 block_stack->data.block.conditional_code
3046 = block_stack->next->data.block.conditional_code;
3047 block_stack->data.block.last_unconditional_cleanup
3048 = block_stack->next->data.block.last_unconditional_cleanup;
3049 block_stack->data.block.cleanup_ptr
3050 = block_stack->next->data.block.cleanup_ptr;
3051 }
3052}
3053
3054/* True if we are currently emitting insns in an area of output code
3055 that is controlled by a conditional expression. This is used by
3056 the cleanup handling code to generate conditional cleanup actions. */
3057
3058int
3059conditional_context ()
3060{
3061 return block_stack && block_stack->data.block.conditional_code;
3062}
3063
3064/* Mark top block of block_stack as not for an implicit binding for an
3065 exception region. This is only ever done by expand_eh_region_end
3066 to let expand_end_bindings know that it is being called explicitly
3067 to end the binding layer for just the binding layer associated with
3068 the exception region, otherwise expand_end_bindings would try and
3069 end all implicit binding layers for exceptions regions, and then
3070 one normal binding layer. */
3071
3072void
3073mark_block_as_not_eh_region ()
3074{
3075 block_stack->data.block.exception_region = 0;
3076}
3077
3078/* True if the top block of block_stack was marked as for an exception
3079 region by mark_block_as_eh_region. */
3080
3081int
3082is_eh_region ()
3083{
3084 return block_stack && block_stack->data.block.exception_region;
3085}
3086
7629c936
RS
3087/* Given a pointer to a BLOCK node, save a pointer to the most recently
3088 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3089 BLOCK node. */
3090
3091void
3092remember_end_note (block)
3093 register tree block;
3094{
3095 BLOCK_END_NOTE (block) = last_block_end_note;
3096 last_block_end_note = NULL_RTX;
3097}
3098
ba716ac9
BS
3099/* Emit a handler label for a nonlocal goto handler.
3100 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3101
3102static void
3103expand_nl_handler_label (slot, before_insn)
3104 rtx slot, before_insn;
3105{
3106 rtx insns;
3107 rtx handler_label = gen_label_rtx ();
3108
3109 /* Don't let jump_optimize delete the handler. */
3110 LABEL_PRESERVE_P (handler_label) = 1;
3111
3112 start_sequence ();
3113 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3114 insns = get_insns ();
3115 end_sequence ();
3116 emit_insns_before (insns, before_insn);
3117
3118 emit_label (handler_label);
3119}
3120
3121/* Emit code to restore vital registers at the beginning of a nonlocal goto
3122 handler. */
3123static void
3124expand_nl_goto_receiver ()
3125{
3126#ifdef HAVE_nonlocal_goto
3127 if (! HAVE_nonlocal_goto)
3128#endif
3129 /* First adjust our frame pointer to its actual value. It was
3130 previously set to the start of the virtual area corresponding to
3131 the stacked variables when we branched here and now needs to be
3132 adjusted to the actual hardware fp value.
3133
3134 Assignments are to virtual registers are converted by
3135 instantiate_virtual_regs into the corresponding assignment
3136 to the underlying register (fp in this case) that makes
3137 the original assignment true.
3138 So the following insn will actually be
3139 decrementing fp by STARTING_FRAME_OFFSET. */
3140 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3141
3142#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3143 if (fixed_regs[ARG_POINTER_REGNUM])
3144 {
3145#ifdef ELIMINABLE_REGS
3146 /* If the argument pointer can be eliminated in favor of the
3147 frame pointer, we don't need to restore it. We assume here
3148 that if such an elimination is present, it can always be used.
3149 This is the case on all known machines; if we don't make this
3150 assumption, we do unnecessary saving on many machines. */
3151 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3152 size_t i;
3153
3154 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3155 if (elim_regs[i].from == ARG_POINTER_REGNUM
3156 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3157 break;
3158
3159 if (i == sizeof elim_regs / sizeof elim_regs [0])
3160#endif
3161 {
3162 /* Now restore our arg pointer from the address at which it
3163 was saved in our stack frame.
3164 If there hasn't be space allocated for it yet, make
3165 some now. */
3166 if (arg_pointer_save_area == 0)
3167 arg_pointer_save_area
3168 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3169 emit_move_insn (virtual_incoming_args_rtx,
3170 /* We need a pseudo here, or else
3171 instantiate_virtual_regs_1 complains. */
3172 copy_to_reg (arg_pointer_save_area));
3173 }
3174 }
3175#endif
3176
3177#ifdef HAVE_nonlocal_goto_receiver
3178 if (HAVE_nonlocal_goto_receiver)
3179 emit_insn (gen_nonlocal_goto_receiver ());
3180#endif
3181}
3182
3183/* Make handlers for nonlocal gotos taking place in the function calls in
3184 block THISBLOCK. */
3185
3186static void
3187expand_nl_goto_receivers (thisblock)
3188 struct nesting *thisblock;
3189{
3190 tree link;
3191 rtx afterward = gen_label_rtx ();
3192 rtx insns, slot;
3193 int any_invalid;
3194
3195 /* Record the handler address in the stack slot for that purpose,
3196 during this block, saving and restoring the outer value. */
3197 if (thisblock->next != 0)
3198 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3199 {
3200 rtx save_receiver = gen_reg_rtx (Pmode);
3201 emit_move_insn (XEXP (slot, 0), save_receiver);
3202
3203 start_sequence ();
3204 emit_move_insn (save_receiver, XEXP (slot, 0));
3205 insns = get_insns ();
3206 end_sequence ();
3207 emit_insns_before (insns, thisblock->data.block.first_insn);
3208 }
3209
3210 /* Jump around the handlers; they run only when specially invoked. */
3211 emit_jump (afterward);
3212
3213 /* Make a separate handler for each label. */
3214 link = nonlocal_labels;
3215 slot = nonlocal_goto_handler_slots;
3216 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3217 /* Skip any labels we shouldn't be able to jump to from here,
3218 we generate one special handler for all of them below which just calls
3219 abort. */
3220 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3221 {
3222 expand_nl_handler_label (XEXP (slot, 0),
3223 thisblock->data.block.first_insn);
3224 expand_nl_goto_receiver ();
3225
3226 /* Jump to the "real" nonlocal label. */
3227 expand_goto (TREE_VALUE (link));
3228 }
3229
3230 /* A second pass over all nonlocal labels; this time we handle those
3231 we should not be able to jump to at this point. */
3232 link = nonlocal_labels;
3233 slot = nonlocal_goto_handler_slots;
3234 any_invalid = 0;
3235 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3236 if (DECL_TOO_LATE (TREE_VALUE (link)))
3237 {
3238 expand_nl_handler_label (XEXP (slot, 0),
3239 thisblock->data.block.first_insn);
3240 any_invalid = 1;
3241 }
3242
3243 if (any_invalid)
3244 {
3245 expand_nl_goto_receiver ();
3246 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3247 VOIDmode, 0);
3248 emit_barrier ();
3249 }
3250
3251 emit_label (afterward);
3252}
3253
28d81abb
RK
3254/* Generate RTL code to terminate a binding contour.
3255 VARS is the chain of VAR_DECL nodes
3256 for the variables bound in this contour.
3257 MARK_ENDS is nonzero if we should put a note at the beginning
3258 and end of this binding contour.
3259
3260 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3261 (That is true automatically if the contour has a saved stack level.) */
3262
3263void
3264expand_end_bindings (vars, mark_ends, dont_jump_in)
3265 tree vars;
3266 int mark_ends;
3267 int dont_jump_in;
3268{
e976b8b2 3269 register struct nesting *thisblock;
28d81abb
RK
3270 register tree decl;
3271
e976b8b2
MS
3272 while (block_stack->data.block.exception_region)
3273 {
3274 /* Because we don't need or want a new temporary level and
3275 because we didn't create one in expand_eh_region_start,
3276 create a fake one now to avoid removing one in
3277 expand_end_bindings. */
3278 push_temp_slots ();
3279
3280 block_stack->data.block.exception_region = 0;
3281
3282 expand_end_bindings (NULL_TREE, 0, 0);
3283 }
3284
e976b8b2
MS
3285 /* Since expand_eh_region_start does an expand_start_bindings, we
3286 have to first end all the bindings that were created by
3287 expand_eh_region_start. */
3288
3289 thisblock = block_stack;
3290
28d81abb
RK
3291 if (warn_unused)
3292 for (decl = vars; decl; decl = TREE_CHAIN (decl))
7e70e7c5 3293 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
e9a25f70
JL
3294 && ! DECL_IN_SYSTEM_HEADER (decl)
3295 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
28d81abb
RK
3296 warning_with_decl (decl, "unused variable `%s'");
3297
28d81abb
RK
3298 if (thisblock->exit_label)
3299 {
3300 do_pending_stack_adjust ();
3301 emit_label (thisblock->exit_label);
3302 }
3303
ba716ac9 3304 /* If necessary, make handlers for nonlocal gotos taking
28d81abb
RK
3305 place in the function calls in this block. */
3306 if (function_call_count != thisblock->data.block.function_call_count
3307 && nonlocal_labels
3308 /* Make handler for outermost block
3309 if there were any nonlocal gotos to this function. */
3310 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3311 /* Make handler for inner block if it has something
3312 special to do when you jump out of it. */
3313 : (thisblock->data.block.cleanups != 0
3314 || thisblock->data.block.stack_level != 0)))
ba716ac9 3315 expand_nl_goto_receivers (thisblock);
28d81abb 3316
72eb1038
BH
3317 /* Don't allow jumping into a block that has a stack level.
3318 Cleanups are allowed, though. */
28d81abb 3319 if (dont_jump_in
72eb1038 3320 || thisblock->data.block.stack_level != 0)
28d81abb
RK
3321 {
3322 struct label_chain *chain;
3323
3324 /* Any labels in this block are no longer valid to go to.
3325 Mark them to cause an error message. */
3326 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3327 {
3328 DECL_TOO_LATE (chain->label) = 1;
3329 /* If any goto without a fixup came to this label,
3330 that must be an error, because gotos without fixups
72eb1038 3331 come from outside all saved stack-levels. */
28d81abb
RK
3332 if (TREE_ADDRESSABLE (chain->label))
3333 error_with_decl (chain->label,
3334 "label `%s' used before containing binding contour");
3335 }
3336 }
3337
3338 /* Restore stack level in effect before the block
3339 (only if variable-size objects allocated). */
3340 /* Perform any cleanups associated with the block. */
3341
3342 if (thisblock->data.block.stack_level != 0
3343 || thisblock->data.block.cleanups != 0)
3344 {
413ec213 3345 /* Only clean up here if this point can actually be reached. */
50d1b7a1 3346 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
28d81abb 3347
50d1b7a1
MS
3348 /* Don't let cleanups affect ({...}) constructs. */
3349 int old_expr_stmts_for_value = expr_stmts_for_value;
3350 rtx old_last_expr_value = last_expr_value;
3351 tree old_last_expr_type = last_expr_type;
3352 expr_stmts_for_value = 0;
28d81abb 3353
50d1b7a1
MS
3354 /* Do the cleanups. */
3355 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3356 if (reachable)
3357 do_pending_stack_adjust ();
28d81abb 3358
50d1b7a1
MS
3359 expr_stmts_for_value = old_expr_stmts_for_value;
3360 last_expr_value = old_last_expr_value;
3361 last_expr_type = old_last_expr_type;
3362
3363 /* Restore the stack level. */
3364
3365 if (reachable && thisblock->data.block.stack_level != 0)
3366 {
3367 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3368 thisblock->data.block.stack_level, NULL_RTX);
ba716ac9 3369 if (nonlocal_goto_handler_slots != 0)
50d1b7a1
MS
3370 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3371 NULL_RTX);
28d81abb
RK
3372 }
3373
3374 /* Any gotos out of this block must also do these things.
59257ff7
RK
3375 Also report any gotos with fixups that came to labels in this
3376 level. */
28d81abb
RK
3377 fixup_gotos (thisblock,
3378 thisblock->data.block.stack_level,
3379 thisblock->data.block.cleanups,
3380 thisblock->data.block.first_insn,
3381 dont_jump_in);
3382 }
3383
c7d2d61d
RS
3384 /* Mark the beginning and end of the scope if requested.
3385 We do this now, after running cleanups on the variables
3386 just going out of scope, so they are in scope for their cleanups. */
3387
3388 if (mark_ends)
7629c936 3389 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
c7d2d61d
RS
3390 else
3391 /* Get rid of the beginning-mark if we don't make an end-mark. */
3392 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3393
28d81abb
RK
3394 /* If doing stupid register allocation, make sure lives of all
3395 register variables declared here extend thru end of scope. */
3396
3397 if (obey_regdecls)
3398 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3399 {
3400 rtx rtl = DECL_RTL (decl);
3401 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3402 use_variable (rtl);
3403 }
3404
e976b8b2
MS
3405 /* Restore the temporary level of TARGET_EXPRs. */
3406 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3407
28d81abb
RK
3408 /* Restore block_stack level for containing block. */
3409
3410 stack_block_stack = thisblock->data.block.innermost_stack_block;
3411 POPSTACK (block_stack);
3412
3413 /* Pop the stack slot nesting and free any slots at this level. */
3414 pop_temp_slots ();
3415}
3416\f
3417/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3418 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3419
3420void
3421expand_decl (decl)
3422 register tree decl;
3423{
3424 struct nesting *thisblock = block_stack;
ca695ac9
JB
3425 tree type;
3426
ca695ac9 3427 type = TREE_TYPE (decl);
28d81abb
RK
3428
3429 /* Only automatic variables need any expansion done.
3430 Static and external variables, and external functions,
3431 will be handled by `assemble_variable' (called from finish_decl).
3432 TYPE_DECL and CONST_DECL require nothing.
3433 PARM_DECLs are handled in `assign_parms'. */
3434
3435 if (TREE_CODE (decl) != VAR_DECL)
3436 return;
44fe2e80 3437 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3438 return;
3439
3440 /* Create the RTL representation for the variable. */
3441
3442 if (type == error_mark_node)
38a448ca 3443 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3444 else if (DECL_SIZE (decl) == 0)
3445 /* Variable with incomplete type. */
3446 {
3447 if (DECL_INITIAL (decl) == 0)
3448 /* Error message was already done; now avoid a crash. */
3449 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3450 else
3451 /* An initializer is going to decide the size of this array.
3452 Until we know the size, represent its address with a reg. */
38a448ca 3453 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
c6df88cb 3454 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
28d81abb
RK
3455 }
3456 else if (DECL_MODE (decl) != BLKmode
3457 /* If -ffloat-store, don't put explicit float vars
3458 into regs. */
3459 && !(flag_float_store
3460 && TREE_CODE (type) == REAL_TYPE)
3461 && ! TREE_THIS_VOLATILE (decl)
3462 && ! TREE_ADDRESSABLE (decl)
e5e809f4
JL
3463 && (DECL_REGISTER (decl) || ! obey_regdecls)
3464 /* if -fcheck-memory-usage, check all variables. */
7d384cc0 3465 && ! current_function_check_memory_usage)
28d81abb
RK
3466 {
3467 /* Automatic variable that can go in a register. */
98f3b471 3468 int unsignedp = TREE_UNSIGNED (type);
28612f9e
RK
3469 enum machine_mode reg_mode
3470 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3471
7f070d5e
RK
3472 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3473 mark_user_reg (DECL_RTL (decl));
3474
e5e809f4 3475 if (POINTER_TYPE_P (type))
7f070d5e
RK
3476 mark_reg_pointer (DECL_RTL (decl),
3477 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3478 / BITS_PER_UNIT));
28d81abb 3479 }
0df15c2c 3480
5e4ef18a
RK
3481 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3482 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3483 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3484 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3485 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
28d81abb
RK
3486 {
3487 /* Variable of fixed size that goes on the stack. */
3488 rtx oldaddr = 0;
3489 rtx addr;
3490
3491 /* If we previously made RTL for this decl, it must be an array
3492 whose size was determined by the initializer.
3493 The old address was a register; set that register now
3494 to the proper address. */
3495 if (DECL_RTL (decl) != 0)
3496 {
3497 if (GET_CODE (DECL_RTL (decl)) != MEM
3498 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3499 abort ();
3500 oldaddr = XEXP (DECL_RTL (decl), 0);
3501 }
3502
d16790f2 3503 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
c6df88cb
MM
3504 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3505 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
28d81abb
RK
3506
3507 /* Set alignment we actually gave this decl. */
3508 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3509 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3510
3511 if (oldaddr)
3512 {
3513 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3514 if (addr != oldaddr)
3515 emit_move_insn (oldaddr, addr);
3516 }
3517
3518 /* If this is a memory ref that contains aggregate components,
3519 mark it as such for cse and loop optimize. */
c6df88cb
MM
3520 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3521 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
28d81abb
RK
3522#if 0
3523 /* If this is in memory because of -ffloat-store,
3524 set the volatile bit, to prevent optimizations from
3525 undoing the effects. */
3526 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3527 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3528#endif
41472af8
MM
3529
3530 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
28d81abb
RK
3531 }
3532 else
3533 /* Dynamic-size object: must push space on the stack. */
3534 {
3535 rtx address, size;
3536
3537 /* Record the stack pointer on entry to block, if have
3538 not already done so. */
3539 if (thisblock->data.block.stack_level == 0)
3540 {
3541 do_pending_stack_adjust ();
59257ff7
RK
3542 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3543 &thisblock->data.block.stack_level,
3544 thisblock->data.block.first_insn);
28d81abb
RK
3545 stack_block_stack = thisblock;
3546 }
3547
3548 /* Compute the variable's size, in bytes. */
3549 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3550 DECL_SIZE (decl),
3551 size_int (BITS_PER_UNIT)),
37366632 3552 NULL_RTX, VOIDmode, 0);
28d81abb
RK
3553 free_temp_slots ();
3554
ff91ad08
RK
3555 /* Allocate space on the stack for the variable. Note that
3556 DECL_ALIGN says how the variable is to be aligned and we
3557 cannot use it to conclude anything about the alignment of
3558 the size. */
37366632 3559 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3560 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3561
28d81abb 3562 /* Reference the variable indirect through that rtx. */
38a448ca 3563 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
28d81abb 3564
2207e295
RS
3565 /* If this is a memory ref that contains aggregate components,
3566 mark it as such for cse and loop optimize. */
c6df88cb
MM
3567 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3568 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
2207e295 3569
28d81abb
RK
3570 /* Indicate the alignment we actually gave this variable. */
3571#ifdef STACK_BOUNDARY
3572 DECL_ALIGN (decl) = STACK_BOUNDARY;
3573#else
3574 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3575#endif
3576 }
3577
3578 if (TREE_THIS_VOLATILE (decl))
3579 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
b4bf13a8
RS
3580#if 0 /* A variable is not necessarily unchanging
3581 just because it is const. RTX_UNCHANGING_P
3582 means no change in the function,
3583 not merely no change in the variable's scope.
3584 It is correct to set RTX_UNCHANGING_P if the variable's scope
3585 is the whole function. There's no convenient way to test that. */
28d81abb
RK
3586 if (TREE_READONLY (decl))
3587 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
b4bf13a8 3588#endif
28d81abb
RK
3589
3590 /* If doing stupid register allocation, make sure life of any
3591 register variable starts here, at the start of its scope. */
3592
3593 if (obey_regdecls)
3594 use_variable (DECL_RTL (decl));
3595}
ca695ac9
JB
3596
3597
28d81abb
RK
3598\f
3599/* Emit code to perform the initialization of a declaration DECL. */
3600
3601void
3602expand_decl_init (decl)
3603 tree decl;
3604{
b4ac57ab
RS
3605 int was_used = TREE_USED (decl);
3606
3564e40e
RK
3607 /* If this is a CONST_DECL, we don't have to generate any code, but
3608 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3609 to be set while in the obstack containing the constant. If we don't
3610 do this, we can lose if we have functions nested three deep and the middle
3611 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3612 the innermost function is the first to expand that STRING_CST. */
3613 if (TREE_CODE (decl) == CONST_DECL)
3614 {
3615 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3616 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3617 EXPAND_INITIALIZER);
3618 return;
3619 }
3620
28d81abb
RK
3621 if (TREE_STATIC (decl))
3622 return;
3623
3624 /* Compute and store the initial value now. */
3625
3626 if (DECL_INITIAL (decl) == error_mark_node)
3627 {
3628 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 3629
28d81abb 3630 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 3631 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb
RK
3632 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3633 0, 0);
3634 emit_queue ();
3635 }
3636 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3637 {
3638 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3639 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3640 emit_queue ();
3641 }
3642
b4ac57ab
RS
3643 /* Don't let the initialization count as "using" the variable. */
3644 TREE_USED (decl) = was_used;
3645
28d81abb 3646 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3647 preserve_temp_slots (NULL_RTX);
28d81abb
RK
3648 free_temp_slots ();
3649}
3650
3651/* CLEANUP is an expression to be executed at exit from this binding contour;
3652 for example, in C++, it might call the destructor for this variable.
3653
4847c938
MS
3654 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3655 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
3656 happens in exception handling, for gotos, returns, breaks that
3657 leave the current scope.
28d81abb
RK
3658
3659 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3660 that is not associated with any particular variable. */
3661
3662int
3663expand_decl_cleanup (decl, cleanup)
3664 tree decl, cleanup;
3665{
3666 struct nesting *thisblock = block_stack;
3667
3668 /* Error if we are not in any block. */
3669 if (thisblock == 0)
3670 return 0;
3671
3672 /* Record the cleanup if there is one. */
3673
3674 if (cleanup != 0)
3675 {
e976b8b2
MS
3676 tree t;
3677 rtx seq;
3678 tree *cleanups = &thisblock->data.block.cleanups;
3679 int cond_context = conditional_context ();
3680
3681 if (cond_context)
3682 {
3683 rtx flag = gen_reg_rtx (word_mode);
3684 rtx set_flag_0;
3685 tree cond;
3686
3687 start_sequence ();
3688 emit_move_insn (flag, const0_rtx);
3689 set_flag_0 = get_insns ();
3690 end_sequence ();
3691
3692 thisblock->data.block.last_unconditional_cleanup
3693 = emit_insns_after (set_flag_0,
3694 thisblock->data.block.last_unconditional_cleanup);
3695
3696 emit_move_insn (flag, const1_rtx);
3697
3698 /* All cleanups must be on the function_obstack. */
3699 push_obstacks_nochange ();
3700 resume_temporary_allocation ();
3701
3702 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3703 DECL_RTL (cond) = flag;
3704
3705 /* Conditionalize the cleanup. */
3706 cleanup = build (COND_EXPR, void_type_node,
3707 truthvalue_conversion (cond),
3708 cleanup, integer_zero_node);
3709 cleanup = fold (cleanup);
3710
3711 pop_obstacks ();
3712
3713 cleanups = thisblock->data.block.cleanup_ptr;
3714 }
3715
3716 /* All cleanups must be on the function_obstack. */
3717 push_obstacks_nochange ();
3718 resume_temporary_allocation ();
4847c938 3719 cleanup = unsave_expr (cleanup);
e976b8b2
MS
3720 pop_obstacks ();
3721
3722 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3723
3724 if (! cond_context)
3725 /* If this block has a cleanup, it belongs in stack_block_stack. */
3726 stack_block_stack = thisblock;
3727
3728 if (cond_context)
3729 {
3730 start_sequence ();
3731 }
4847c938 3732
e976b8b2
MS
3733 /* If this was optimized so that there is no exception region for the
3734 cleanup, then mark the TREE_LIST node, so that we can later tell
3735 if we need to call expand_eh_region_end. */
e9a25f70
JL
3736 if (! using_eh_for_cleanups_p
3737 || expand_eh_region_start_tree (decl, cleanup))
e976b8b2 3738 TREE_ADDRESSABLE (t) = 1;
716cc7f7
JM
3739 /* If that started a new EH region, we're in a new block. */
3740 thisblock = block_stack;
e976b8b2
MS
3741
3742 if (cond_context)
3743 {
3744 seq = get_insns ();
3745 end_sequence ();
7e82801f
MS
3746 if (seq)
3747 thisblock->data.block.last_unconditional_cleanup
3748 = emit_insns_after (seq,
3749 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
3750 }
3751 else
3752 {
3753 thisblock->data.block.last_unconditional_cleanup
3754 = get_last_insn ();
3755 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3756 }
28d81abb
RK
3757 }
3758 return 1;
3759}
e976b8b2 3760
c7ae64f2
JM
3761/* Like expand_decl_cleanup, but suppress generating an exception handler
3762 to perform the cleanup. */
3763
3764int
3765expand_decl_cleanup_no_eh (decl, cleanup)
3766 tree decl, cleanup;
3767{
3768 int save_eh = using_eh_for_cleanups_p;
b472527b
JL
3769 int result;
3770
c7ae64f2 3771 using_eh_for_cleanups_p = 0;
b472527b 3772 result = expand_decl_cleanup (decl, cleanup);
c7ae64f2 3773 using_eh_for_cleanups_p = save_eh;
b472527b
JL
3774
3775 return result;
c7ae64f2
JM
3776}
3777
e976b8b2 3778/* Arrange for the top element of the dynamic cleanup chain to be
4c581243
MS
3779 popped if we exit the current binding contour. DECL is the
3780 associated declaration, if any, otherwise NULL_TREE. If the
3781 current contour is left via an exception, then __sjthrow will pop
3782 the top element off the dynamic cleanup chain. The code that
3783 avoids doing the action we push into the cleanup chain in the
3784 exceptional case is contained in expand_cleanups.
e976b8b2
MS
3785
3786 This routine is only used by expand_eh_region_start, and that is
3787 the only way in which an exception region should be started. This
3788 routine is only used when using the setjmp/longjmp codegen method
3789 for exception handling. */
3790
3791int
4c581243
MS
3792expand_dcc_cleanup (decl)
3793 tree decl;
e976b8b2
MS
3794{
3795 struct nesting *thisblock = block_stack;
3796 tree cleanup;
3797
3798 /* Error if we are not in any block. */
3799 if (thisblock == 0)
3800 return 0;
3801
3802 /* Record the cleanup for the dynamic handler chain. */
3803
3804 /* All cleanups must be on the function_obstack. */
3805 push_obstacks_nochange ();
3806 resume_temporary_allocation ();
3807 cleanup = make_node (POPDCC_EXPR);
3808 pop_obstacks ();
3809
3810 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3811 thisblock->data.block.cleanups
4c581243 3812 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
3813
3814 /* If this block has a cleanup, it belongs in stack_block_stack. */
3815 stack_block_stack = thisblock;
3816 return 1;
3817}
3818
3819/* Arrange for the top element of the dynamic handler chain to be
4c581243 3820 popped if we exit the current binding contour. DECL is the
956d6950 3821 associated declaration, if any, otherwise NULL_TREE. If the current
4c581243
MS
3822 contour is left via an exception, then __sjthrow will pop the top
3823 element off the dynamic handler chain. The code that avoids doing
3824 the action we push into the handler chain in the exceptional case
3825 is contained in expand_cleanups.
e976b8b2
MS
3826
3827 This routine is only used by expand_eh_region_start, and that is
3828 the only way in which an exception region should be started. This
3829 routine is only used when using the setjmp/longjmp codegen method
3830 for exception handling. */
3831
3832int
4c581243
MS
3833expand_dhc_cleanup (decl)
3834 tree decl;
e976b8b2
MS
3835{
3836 struct nesting *thisblock = block_stack;
3837 tree cleanup;
3838
3839 /* Error if we are not in any block. */
3840 if (thisblock == 0)
3841 return 0;
3842
3843 /* Record the cleanup for the dynamic handler chain. */
3844
3845 /* All cleanups must be on the function_obstack. */
3846 push_obstacks_nochange ();
3847 resume_temporary_allocation ();
3848 cleanup = make_node (POPDHC_EXPR);
3849 pop_obstacks ();
3850
3851 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3852 thisblock->data.block.cleanups
4c581243 3853 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
e976b8b2
MS
3854
3855 /* If this block has a cleanup, it belongs in stack_block_stack. */
3856 stack_block_stack = thisblock;
3857 return 1;
3858}
28d81abb
RK
3859\f
3860/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3861 DECL_ELTS is the list of elements that belong to DECL's type.
3862 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3863
3864void
3865expand_anon_union_decl (decl, cleanup, decl_elts)
3866 tree decl, cleanup, decl_elts;
3867{
3868 struct nesting *thisblock = block_stack;
3869 rtx x;
3870
ec5cd386
RK
3871 expand_decl (decl);
3872 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
3873 x = DECL_RTL (decl);
3874
3875 while (decl_elts)
3876 {
3877 tree decl_elt = TREE_VALUE (decl_elts);
3878 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3879 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3880
7b9032dd
JM
3881 /* Propagate the union's alignment to the elements. */
3882 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3883
3884 /* If the element has BLKmode and the union doesn't, the union is
3885 aligned such that the element doesn't need to have BLKmode, so
3886 change the element's mode to the appropriate one for its size. */
3887 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3888 DECL_MODE (decl_elt) = mode
3889 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3890 MODE_INT, 1);
3891
28d81abb
RK
3892 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3893 instead create a new MEM rtx with the proper mode. */
3894 if (GET_CODE (x) == MEM)
3895 {
3896 if (mode == GET_MODE (x))
3897 DECL_RTL (decl_elt) = x;
3898 else
3899 {
38a448ca 3900 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
c6df88cb 3901 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
28d81abb
RK
3902 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3903 }
3904 }
3905 else if (GET_CODE (x) == REG)
3906 {
3907 if (mode == GET_MODE (x))
3908 DECL_RTL (decl_elt) = x;
3909 else
38a448ca 3910 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
28d81abb
RK
3911 }
3912 else
3913 abort ();
3914
3915 /* Record the cleanup if there is one. */
3916
3917 if (cleanup != 0)
3918 thisblock->data.block.cleanups
3919 = temp_tree_cons (decl_elt, cleanup_elt,
3920 thisblock->data.block.cleanups);
3921
3922 decl_elts = TREE_CHAIN (decl_elts);
3923 }
3924}
3925\f
3926/* Expand a list of cleanups LIST.
3927 Elements may be expressions or may be nested lists.
3928
3929 If DONT_DO is nonnull, then any list-element
3930 whose TREE_PURPOSE matches DONT_DO is omitted.
3931 This is sometimes used to avoid a cleanup associated with
4e44807b
MS
3932 a value that is being returned out of the scope.
3933
3934 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
50d1b7a1
MS
3935 goto and handle protection regions specially in that case.
3936
3937 If REACHABLE, we emit code, otherwise just inform the exception handling
3938 code about this finalization. */
28d81abb
RK
3939
3940static void
50d1b7a1 3941expand_cleanups (list, dont_do, in_fixup, reachable)
28d81abb
RK
3942 tree list;
3943 tree dont_do;
4e44807b 3944 int in_fixup;
50d1b7a1 3945 int reachable;
28d81abb
RK
3946{
3947 tree tail;
3948 for (tail = list; tail; tail = TREE_CHAIN (tail))
3949 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3950 {
3951 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
50d1b7a1 3952 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
28d81abb
RK
3953 else
3954 {
4e44807b 3955 if (! in_fixup)
e976b8b2
MS
3956 {
3957 tree cleanup = TREE_VALUE (tail);
3958
3959 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3960 if (TREE_CODE (cleanup) != POPDHC_EXPR
3961 && TREE_CODE (cleanup) != POPDCC_EXPR
3962 /* See expand_eh_region_start_tree for this case. */
3963 && ! TREE_ADDRESSABLE (tail))
3964 {
3965 cleanup = protect_with_terminate (cleanup);
3966 expand_eh_region_end (cleanup);
3967 }
3968 }
61d6b1cc 3969
50d1b7a1
MS
3970 if (reachable)
3971 {
3972 /* Cleanups may be run multiple times. For example,
3973 when exiting a binding contour, we expand the
3974 cleanups associated with that contour. When a goto
3975 within that binding contour has a target outside that
3976 contour, it will expand all cleanups from its scope to
3977 the target. Though the cleanups are expanded multiple
3978 times, the control paths are non-overlapping so the
3979 cleanups will not be executed twice. */
9762d48d
JM
3980
3981 /* We may need to protect fixups with rethrow regions. */
3982 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
e5e809f4 3983
9762d48d
JM
3984 if (protect)
3985 expand_fixup_region_start ();
e5e809f4 3986
50d1b7a1 3987 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
9762d48d
JM
3988 if (protect)
3989 expand_fixup_region_end (TREE_VALUE (tail));
50d1b7a1
MS
3990 free_temp_slots ();
3991 }
28d81abb
RK
3992 }
3993 }
3994}
3995
e976b8b2
MS
3996/* Mark when the context we are emitting RTL for as a conditional
3997 context, so that any cleanup actions we register with
3998 expand_decl_init will be properly conditionalized when those
3999 cleanup actions are later performed. Must be called before any
956d6950 4000 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
4001
4002void
956d6950 4003start_cleanup_deferral ()
e976b8b2 4004{
e3eef942
JW
4005 /* block_stack can be NULL if we are inside the parameter list. It is
4006 OK to do nothing, because cleanups aren't possible here. */
4007 if (block_stack)
4008 ++block_stack->data.block.conditional_code;
e976b8b2
MS
4009}
4010
4011/* Mark the end of a conditional region of code. Because cleanup
956d6950 4012 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
4013 after we end the currently deferred cleanups, only after we end all
4014 deferred cleanups, are we back in unconditional code. */
4015
4016void
956d6950 4017end_cleanup_deferral ()
e976b8b2 4018{
e3eef942
JW
4019 /* block_stack can be NULL if we are inside the parameter list. It is
4020 OK to do nothing, because cleanups aren't possible here. */
4021 if (block_stack)
4022 --block_stack->data.block.conditional_code;
e976b8b2
MS
4023}
4024
28d81abb
RK
4025/* Move all cleanups from the current block_stack
4026 to the containing block_stack, where they are assumed to
4027 have been created. If anything can cause a temporary to
4028 be created, but not expanded for more than one level of
4029 block_stacks, then this code will have to change. */
4030
4031void
4032move_cleanups_up ()
4033{
4034 struct nesting *block = block_stack;
4035 struct nesting *outer = block->next;
4036
4037 outer->data.block.cleanups
4038 = chainon (block->data.block.cleanups,
4039 outer->data.block.cleanups);
4040 block->data.block.cleanups = 0;
4041}
4042
4043tree
4044last_cleanup_this_contour ()
4045{
4046 if (block_stack == 0)
4047 return 0;
4048
4049 return block_stack->data.block.cleanups;
4050}
4051
4052/* Return 1 if there are any pending cleanups at this point.
4053 If THIS_CONTOUR is nonzero, check the current contour as well.
4054 Otherwise, look only at the contours that enclose this one. */
4055
4056int
4057any_pending_cleanups (this_contour)
4058 int this_contour;
4059{
4060 struct nesting *block;
4061
4062 if (block_stack == 0)
4063 return 0;
4064
4065 if (this_contour && block_stack->data.block.cleanups != NULL)
4066 return 1;
4067 if (block_stack->data.block.cleanups == 0
e976b8b2 4068 && block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
4069 return 0;
4070
4071 for (block = block_stack->next; block; block = block->next)
4072 if (block->data.block.cleanups != 0)
4073 return 1;
4074
4075 return 0;
4076}
4077\f
4078/* Enter a case (Pascal) or switch (C) statement.
4079 Push a block onto case_stack and nesting_stack
4080 to accumulate the case-labels that are seen
4081 and to record the labels generated for the statement.
4082
4083 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4084 Otherwise, this construct is transparent for `exit_something'.
4085
4086 EXPR is the index-expression to be dispatched on.
4087 TYPE is its nominal type. We could simply convert EXPR to this type,
4088 but instead we take short cuts. */
4089
4090void
4091expand_start_case (exit_flag, expr, type, printname)
4092 int exit_flag;
4093 tree expr;
4094 tree type;
4095 char *printname;
4096{
4097 register struct nesting *thiscase = ALLOC_NESTING ();
4098
4099 /* Make an entry on case_stack for the case we are entering. */
4100
4101 thiscase->next = case_stack;
4102 thiscase->all = nesting_stack;
4103 thiscase->depth = ++nesting_depth;
4104 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4105 thiscase->data.case_stmt.case_list = 0;
4106 thiscase->data.case_stmt.index_expr = expr;
4107 thiscase->data.case_stmt.nominal_type = type;
4108 thiscase->data.case_stmt.default_label = 0;
4109 thiscase->data.case_stmt.num_ranges = 0;
4110 thiscase->data.case_stmt.printname = printname;
a11759a3 4111 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
4112 case_stack = thiscase;
4113 nesting_stack = thiscase;
4114
4115 do_pending_stack_adjust ();
4116
4117 /* Make sure case_stmt.start points to something that won't
4118 need any transformation before expand_end_case. */
4119 if (GET_CODE (get_last_insn ()) != NOTE)
37366632 4120 emit_note (NULL_PTR, NOTE_INSN_DELETED);
28d81abb
RK
4121
4122 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 4123
956d6950 4124 start_cleanup_deferral ();
28d81abb
RK
4125}
4126
ca695ac9 4127
28d81abb
RK
4128/* Start a "dummy case statement" within which case labels are invalid
4129 and are not connected to any larger real case statement.
4130 This can be used if you don't want to let a case statement jump
4131 into the middle of certain kinds of constructs. */
4132
4133void
4134expand_start_case_dummy ()
4135{
4136 register struct nesting *thiscase = ALLOC_NESTING ();
4137
4138 /* Make an entry on case_stack for the dummy. */
4139
4140 thiscase->next = case_stack;
4141 thiscase->all = nesting_stack;
4142 thiscase->depth = ++nesting_depth;
4143 thiscase->exit_label = 0;
4144 thiscase->data.case_stmt.case_list = 0;
4145 thiscase->data.case_stmt.start = 0;
4146 thiscase->data.case_stmt.nominal_type = 0;
4147 thiscase->data.case_stmt.default_label = 0;
4148 thiscase->data.case_stmt.num_ranges = 0;
4149 case_stack = thiscase;
4150 nesting_stack = thiscase;
956d6950 4151 start_cleanup_deferral ();
28d81abb
RK
4152}
4153
4154/* End a dummy case statement. */
4155
4156void
4157expand_end_case_dummy ()
4158{
956d6950 4159 end_cleanup_deferral ();
28d81abb
RK
4160 POPSTACK (case_stack);
4161}
4162
4163/* Return the data type of the index-expression
4164 of the innermost case statement, or null if none. */
4165
4166tree
4167case_index_expr_type ()
4168{
4169 if (case_stack)
4170 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4171 return 0;
4172}
4173\f
a11759a3
JR
4174static void
4175check_seenlabel ()
4176{
4177 /* If this is the first label, warn if any insns have been emitted. */
4178 if (case_stack->data.case_stmt.line_number_status >= 0)
4179 {
4180 rtx insn;
4181
4182 restore_line_number_status
4183 (case_stack->data.case_stmt.line_number_status);
4184 case_stack->data.case_stmt.line_number_status = -1;
4185
4186 for (insn = case_stack->data.case_stmt.start;
4187 insn;
4188 insn = NEXT_INSN (insn))
4189 {
4190 if (GET_CODE (insn) == CODE_LABEL)
4191 break;
4192 if (GET_CODE (insn) != NOTE
4193 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4194 {
4195 do
4196 insn = PREV_INSN (insn);
0dacbd0e
JW
4197 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4198
4199 /* If insn is zero, then there must have been a syntax error. */
4200 if (insn)
4201 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4202 NOTE_LINE_NUMBER(insn),
4203 "unreachable code at beginning of %s",
4204 case_stack->data.case_stmt.printname);
a11759a3
JR
4205 break;
4206 }
4207 }
4208 }
4209}
4210
28d81abb
RK
4211/* Accumulate one case or default label inside a case or switch statement.
4212 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
4213 The function CONVERTER, when applied to arguments T and V,
4214 converts the value V to the type T.
28d81abb
RK
4215
4216 If not currently inside a case or switch statement, return 1 and do
4217 nothing. The caller will print a language-specific error message.
4218 If VALUE is a duplicate or overlaps, return 2 and do nothing
4219 except store the (first) duplicate node in *DUPLICATE.
4220 If VALUE is out of range, return 3 and do nothing.
e976b8b2 4221 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
4222 Return 0 on success.
4223
4224 Extended to handle range statements. */
4225
4226int
f52fba84 4227pushcase (value, converter, label, duplicate)
28d81abb 4228 register tree value;
f52fba84 4229 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4230 register tree label;
4231 tree *duplicate;
4232{
28d81abb
RK
4233 tree index_type;
4234 tree nominal_type;
4235
4236 /* Fail if not inside a real case statement. */
4237 if (! (case_stack && case_stack->data.case_stmt.start))
4238 return 1;
4239
4240 if (stack_block_stack
4241 && stack_block_stack->depth > case_stack->depth)
4242 return 5;
4243
4244 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4245 nominal_type = case_stack->data.case_stmt.nominal_type;
4246
4247 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4248 if (index_type == error_mark_node)
4249 return 0;
4250
4251 /* Convert VALUE to the type in which the comparisons are nominally done. */
4252 if (value != 0)
f52fba84 4253 value = (*converter) (nominal_type, value);
28d81abb 4254
feb60352
R
4255 check_seenlabel ();
4256
28d81abb
RK
4257 /* Fail if this value is out of range for the actual type of the index
4258 (which may be narrower than NOMINAL_TYPE). */
4259 if (value != 0 && ! int_fits_type_p (value, index_type))
4260 return 3;
4261
4262 /* Fail if this is a duplicate or overlaps another entry. */
4263 if (value == 0)
4264 {
4265 if (case_stack->data.case_stmt.default_label != 0)
4266 {
4267 *duplicate = case_stack->data.case_stmt.default_label;
4268 return 2;
4269 }
4270 case_stack->data.case_stmt.default_label = label;
4271 }
4272 else
57641239 4273 return add_case_node (value, value, label, duplicate);
28d81abb
RK
4274
4275 expand_label (label);
4276 return 0;
4277}
4278
956d6950
JL
4279/* Like pushcase but this case applies to all values between VALUE1 and
4280 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4281 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4282 starts at VALUE1 and ends at the highest value of the index type.
4283 If both are NULL, this case applies to all values.
4284
4285 The return value is the same as that of pushcase but there is one
4286 additional error code: 4 means the specified range was empty. */
28d81abb
RK
4287
4288int
f52fba84 4289pushcase_range (value1, value2, converter, label, duplicate)
28d81abb 4290 register tree value1, value2;
f52fba84 4291 tree (*converter) PROTO((tree, tree));
28d81abb
RK
4292 register tree label;
4293 tree *duplicate;
4294{
28d81abb
RK
4295 tree index_type;
4296 tree nominal_type;
4297
4298 /* Fail if not inside a real case statement. */
4299 if (! (case_stack && case_stack->data.case_stmt.start))
4300 return 1;
4301
4302 if (stack_block_stack
4303 && stack_block_stack->depth > case_stack->depth)
4304 return 5;
4305
4306 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4307 nominal_type = case_stack->data.case_stmt.nominal_type;
4308
4309 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4310 if (index_type == error_mark_node)
4311 return 0;
4312
a11759a3 4313 check_seenlabel ();
28d81abb 4314
956d6950
JL
4315 /* Convert VALUEs to type in which the comparisons are nominally done
4316 and replace any unspecified value with the corresponding bound. */
4317 if (value1 == 0)
1974bfb1 4318 value1 = TYPE_MIN_VALUE (index_type);
956d6950 4319 if (value2 == 0)
1974bfb1 4320 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
4321
4322 /* Fail if the range is empty. Do this before any conversion since
4323 we want to allow out-of-range empty ranges. */
e1ee5cdc 4324 if (value2 && tree_int_cst_lt (value2, value1))
956d6950
JL
4325 return 4;
4326
4327 value1 = (*converter) (nominal_type, value1);
e1ee5cdc
RH
4328
4329 /* If the max was unbounded, use the max of the nominal_type we are
4330 converting to. Do this after the < check above to suppress false
4331 positives. */
4332 if (!value2)
4333 value2 = TYPE_MAX_VALUE (nominal_type);
f52fba84 4334 value2 = (*converter) (nominal_type, value2);
28d81abb
RK
4335
4336 /* Fail if these values are out of range. */
956d6950
JL
4337 if (TREE_CONSTANT_OVERFLOW (value1)
4338 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
4339 return 3;
4340
956d6950
JL
4341 if (TREE_CONSTANT_OVERFLOW (value2)
4342 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
4343 return 3;
4344
57641239
RK
4345 return add_case_node (value1, value2, label, duplicate);
4346}
4347
4348/* Do the actual insertion of a case label for pushcase and pushcase_range
4349 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4350 slowdown for large switch statements. */
4351
4352static int
4353add_case_node (low, high, label, duplicate)
4354 tree low, high;
4355 tree label;
4356 tree *duplicate;
4357{
4358 struct case_node *p, **q, *r;
4359
4360 q = &case_stack->data.case_stmt.case_list;
4361 p = *q;
4362
69d4ca36 4363 while ((r = *q))
28d81abb 4364 {
57641239
RK
4365 p = r;
4366
4367 /* Keep going past elements distinctly greater than HIGH. */
4368 if (tree_int_cst_lt (high, p->low))
4369 q = &p->left;
4370
4371 /* or distinctly less than LOW. */
4372 else if (tree_int_cst_lt (p->high, low))
4373 q = &p->right;
4374
4375 else
28d81abb 4376 {
57641239
RK
4377 /* We have an overlap; this is an error. */
4378 *duplicate = p->code_label;
28d81abb
RK
4379 return 2;
4380 }
4381 }
4382
4383 /* Add this label to the chain, and succeed.
57641239 4384 Copy LOW, HIGH so they are on temporary rather than momentary
28d81abb
RK
4385 obstack and will thus survive till the end of the case statement. */
4386
57641239
RK
4387 r = (struct case_node *) oballoc (sizeof (struct case_node));
4388 r->low = copy_node (low);
28d81abb 4389
57641239
RK
4390 /* If the bounds are equal, turn this into the one-value case. */
4391
4392 if (tree_int_cst_equal (low, high))
4393 r->high = r->low;
4394 else
4395 {
4396 r->high = copy_node (high);
4397 case_stack->data.case_stmt.num_ranges++;
4398 }
4399
4400 r->code_label = label;
28d81abb
RK
4401 expand_label (label);
4402
57641239
RK
4403 *q = r;
4404 r->parent = p;
4405 r->left = 0;
4406 r->right = 0;
4407 r->balance = 0;
4408
4409 while (p)
4410 {
4411 struct case_node *s;
4412
4413 if (r == p->left)
4414 {
4415 int b;
4416
4417 if (! (b = p->balance))
4418 /* Growth propagation from left side. */
4419 p->balance = -1;
4420 else if (b < 0)
4421 {
4422 if (r->balance < 0)
4423 {
4424 /* R-Rotation */
51723711 4425 if ((p->left = s = r->right))
57641239
RK
4426 s->parent = p;
4427
4428 r->right = p;
4429 p->balance = 0;
4430 r->balance = 0;
4431 s = p->parent;
4432 p->parent = r;
4433
51723711 4434 if ((r->parent = s))
57641239
RK
4435 {
4436 if (s->left == p)
4437 s->left = r;
4438 else
4439 s->right = r;
4440 }
4441 else
4442 case_stack->data.case_stmt.case_list = r;
4443 }
4444 else
4445 /* r->balance == +1 */
4446 {
5720c7e7
RK
4447 /* LR-Rotation */
4448
57641239
RK
4449 int b2;
4450 struct case_node *t = r->right;
4451
51723711 4452 if ((p->left = s = t->right))
57641239
RK
4453 s->parent = p;
4454
4455 t->right = p;
51723711 4456 if ((r->right = s = t->left))
57641239
RK
4457 s->parent = r;
4458
4459 t->left = r;
4460 b = t->balance;
4461 b2 = b < 0;
4462 p->balance = b2;
4463 b2 = -b2 - b;
4464 r->balance = b2;
4465 t->balance = 0;
4466 s = p->parent;
4467 p->parent = t;
4468 r->parent = t;
4469
51723711 4470 if ((t->parent = s))
57641239
RK
4471 {
4472 if (s->left == p)
4473 s->left = t;
4474 else
4475 s->right = t;
4476 }
4477 else
4478 case_stack->data.case_stmt.case_list = t;
4479 }
4480 break;
4481 }
4482
4483 else
4484 {
4485 /* p->balance == +1; growth of left side balances the node. */
4486 p->balance = 0;
4487 break;
4488 }
4489 }
4490 else
4491 /* r == p->right */
4492 {
4493 int b;
4494
4495 if (! (b = p->balance))
4496 /* Growth propagation from right side. */
4497 p->balance++;
4498 else if (b > 0)
4499 {
4500 if (r->balance > 0)
4501 {
4502 /* L-Rotation */
4503
51723711 4504 if ((p->right = s = r->left))
57641239
RK
4505 s->parent = p;
4506
4507 r->left = p;
4508 p->balance = 0;
4509 r->balance = 0;
4510 s = p->parent;
4511 p->parent = r;
51723711 4512 if ((r->parent = s))
57641239
RK
4513 {
4514 if (s->left == p)
4515 s->left = r;
4516 else
4517 s->right = r;
4518 }
4519
4520 else
4521 case_stack->data.case_stmt.case_list = r;
4522 }
4523
4524 else
4525 /* r->balance == -1 */
4526 {
4527 /* RL-Rotation */
4528 int b2;
4529 struct case_node *t = r->left;
4530
51723711 4531 if ((p->right = s = t->left))
57641239
RK
4532 s->parent = p;
4533
4534 t->left = p;
4535
51723711 4536 if ((r->left = s = t->right))
57641239
RK
4537 s->parent = r;
4538
4539 t->right = r;
4540 b = t->balance;
4541 b2 = b < 0;
4542 r->balance = b2;
4543 b2 = -b2 - b;
4544 p->balance = b2;
4545 t->balance = 0;
4546 s = p->parent;
4547 p->parent = t;
4548 r->parent = t;
4549
51723711 4550 if ((t->parent = s))
57641239
RK
4551 {
4552 if (s->left == p)
4553 s->left = t;
4554 else
4555 s->right = t;
4556 }
4557
4558 else
4559 case_stack->data.case_stmt.case_list = t;
4560 }
4561 break;
4562 }
4563 else
4564 {
4565 /* p->balance == -1; growth of right side balances the node. */
4566 p->balance = 0;
4567 break;
4568 }
4569 }
4570
4571 r = p;
4572 p = p->parent;
4573 }
28d81abb
RK
4574
4575 return 0;
4576}
ca695ac9 4577
28d81abb 4578\f
94d6511c
PB
4579/* Returns the number of possible values of TYPE.
4580 Returns -1 if the number is unknown or variable.
4581 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4582 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4583 do not increase monotonically (there may be duplicates);
4584 to 1 if the values increase monotonically, but not always by 1;
4585 otherwise sets it to 0. */
4586
4587HOST_WIDE_INT
4588all_cases_count (type, spareness)
4589 tree type;
4590 int *spareness;
4591{
69d4ca36 4592 HOST_WIDE_INT count;
94d6511c
PB
4593 *spareness = 0;
4594
4595 switch (TREE_CODE (type))
4596 {
4597 tree t;
4598 case BOOLEAN_TYPE:
4599 count = 2;
4600 break;
4601 case CHAR_TYPE:
4602 count = 1 << BITS_PER_UNIT;
4603 break;
4604 default:
4605 case INTEGER_TYPE:
4606 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
e1ee5cdc 4607 || TYPE_MAX_VALUE (type) == NULL
c02aebe2 4608 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
94d6511c
PB
4609 return -1;
4610 else
4611 {
4612 /* count
4613 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4614 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
0f41302f 4615 but with overflow checking. */
94d6511c
PB
4616 tree mint = TYPE_MIN_VALUE (type);
4617 tree maxt = TYPE_MAX_VALUE (type);
4618 HOST_WIDE_INT lo, hi;
4619 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4620 &lo, &hi);
4621 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4622 lo, hi, &lo, &hi);
4623 add_double (lo, hi, 1, 0, &lo, &hi);
4624 if (hi != 0 || lo < 0)
4625 return -2;
4626 count = lo;
4627 }
4628 break;
4629 case ENUMERAL_TYPE:
4630 count = 0;
4631 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4632 {
4633 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4634 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4635 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4636 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4637 *spareness = 1;
4638 count++;
4639 }
4640 if (*spareness == 1)
4641 {
4642 tree prev = TREE_VALUE (TYPE_VALUES (type));
4643 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4644 {
4645 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4646 {
4647 *spareness = 2;
4648 break;
4649 }
4650 prev = TREE_VALUE (t);
4651 }
4652
4653 }
4654 }
4655 return count;
4656}
4657
4658
4659#define BITARRAY_TEST(ARRAY, INDEX) \
0f41302f
MS
4660 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4661 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
94d6511c 4662#define BITARRAY_SET(ARRAY, INDEX) \
0f41302f
MS
4663 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4664 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
94d6511c
PB
4665
4666/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4667 with the case values we have seen, assuming the case expression
4668 has the given TYPE.
4669 SPARSENESS is as determined by all_cases_count.
4670
9faa82d8 4671 The time needed is proportional to COUNT, unless
94d6511c
PB
4672 SPARSENESS is 2, in which case quadratic time is needed. */
4673
df03cab5 4674void
94d6511c
PB
4675mark_seen_cases (type, cases_seen, count, sparseness)
4676 tree type;
4677 unsigned char *cases_seen;
4678 long count;
4679 int sparseness;
4680{
94d6511c
PB
4681 tree next_node_to_try = NULL_TREE;
4682 long next_node_offset = 0;
4683
5720c7e7 4684 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
94d6511c
PB
4685 tree val = make_node (INTEGER_CST);
4686 TREE_TYPE (val) = type;
5720c7e7
RK
4687 if (! root)
4688 ; /* Do nothing */
4689 else if (sparseness == 2)
94d6511c 4690 {
5720c7e7
RK
4691 tree t;
4692 HOST_WIDE_INT xlo;
4693
4694 /* This less efficient loop is only needed to handle
4695 duplicate case values (multiple enum constants
4696 with the same value). */
4697 TREE_TYPE (val) = TREE_TYPE (root->low);
4698 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4699 t = TREE_CHAIN (t), xlo++)
94d6511c 4700 {
5720c7e7
RK
4701 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4702 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4703 n = root;
4704 do
94d6511c 4705 {
5720c7e7
RK
4706 /* Keep going past elements distinctly greater than VAL. */
4707 if (tree_int_cst_lt (val, n->low))
4708 n = n->left;
4709
4710 /* or distinctly less than VAL. */
4711 else if (tree_int_cst_lt (n->high, val))
4712 n = n->right;
4713
4714 else
94d6511c 4715 {
5720c7e7
RK
4716 /* We have found a matching range. */
4717 BITARRAY_SET (cases_seen, xlo);
4718 break;
94d6511c
PB
4719 }
4720 }
5720c7e7
RK
4721 while (n);
4722 }
4723 }
4724 else
4725 {
4726 if (root->left)
4727 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4728 for (n = root; n; n = n->right)
4729 {
4730 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4731 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4732 while ( ! tree_int_cst_lt (n->high, val))
94d6511c 4733 {
5720c7e7
RK
4734 /* Calculate (into xlo) the "offset" of the integer (val).
4735 The element with lowest value has offset 0, the next smallest
4736 element has offset 1, etc. */
4737
4738 HOST_WIDE_INT xlo, xhi;
4739 tree t;
94d6511c
PB
4740 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4741 {
4742 /* The TYPE_VALUES will be in increasing order, so
4743 starting searching where we last ended. */
4744 t = next_node_to_try;
4745 xlo = next_node_offset;
4746 xhi = 0;
4747 for (;;)
4748 {
4749 if (t == NULL_TREE)
4750 {
4751 t = TYPE_VALUES (type);
4752 xlo = 0;
4753 }
4754 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4755 {
4756 next_node_to_try = TREE_CHAIN (t);
4757 next_node_offset = xlo + 1;
4758 break;
4759 }
4760 xlo++;
4761 t = TREE_CHAIN (t);
4762 if (t == next_node_to_try)
5720c7e7
RK
4763 {
4764 xlo = -1;
4765 break;
4766 }
94d6511c
PB
4767 }
4768 }
4769 else
4770 {
4771 t = TYPE_MIN_VALUE (type);
4772 if (t)
4773 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4774 &xlo, &xhi);
4775 else
4776 xlo = xhi = 0;
4777 add_double (xlo, xhi,
4778 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4779 &xlo, &xhi);
4780 }
4781
9dd53f1e 4782 if (xhi == 0 && xlo >= 0 && xlo < count)
94d6511c 4783 BITARRAY_SET (cases_seen, xlo);
5720c7e7
RK
4784 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4785 1, 0,
4786 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
94d6511c 4787 }
94d6511c
PB
4788 }
4789 }
4790}
4791
28d81abb
RK
4792/* Called when the index of a switch statement is an enumerated type
4793 and there is no default label.
4794
4795 Checks that all enumeration literals are covered by the case
4796 expressions of a switch. Also, warn if there are any extra
4797 switch cases that are *not* elements of the enumerated type.
4798
4799 If all enumeration literals were covered by the case expressions,
4800 turn one of the expressions into the default expression since it should
4801 not be possible to fall through such a switch. */
4802
4803void
4804check_for_full_enumeration_handling (type)
4805 tree type;
4806{
4807 register struct case_node *n;
28d81abb 4808 register tree chain;
69d4ca36
RL
4809#if 0 /* variable used by 'if 0'ed code below. */
4810 register struct case_node **l;
28d81abb 4811 int all_values = 1;
69d4ca36 4812#endif
28d81abb 4813
0f41302f 4814 /* True iff the selector type is a numbered set mode. */
94d6511c
PB
4815 int sparseness = 0;
4816
0f41302f 4817 /* The number of possible selector values. */
94d6511c
PB
4818 HOST_WIDE_INT size;
4819
4820 /* For each possible selector value. a one iff it has been matched
0f41302f 4821 by a case value alternative. */
94d6511c
PB
4822 unsigned char *cases_seen;
4823
0f41302f 4824 /* The allocated size of cases_seen, in chars. */
94d6511c 4825 long bytes_needed;
94d6511c 4826
94d6511c
PB
4827 if (! warn_switch)
4828 return;
4829
4830 size = all_cases_count (type, &sparseness);
4831 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
28d81abb 4832
94d6511c 4833 if (size > 0 && size < 600000
0f41302f 4834 /* We deliberately use malloc here - not xmalloc. */
ad03007a 4835 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
28d81abb 4836 {
94d6511c
PB
4837 long i;
4838 tree v = TYPE_VALUES (type);
4839 bzero (cases_seen, bytes_needed);
28d81abb 4840
94d6511c
PB
4841 /* The time complexity of this code is normally O(N), where
4842 N being the number of members in the enumerated type.
4843 However, if type is a ENUMERAL_TYPE whose values do not
0f41302f 4844 increase monotonically, O(N*log(N)) time may be needed. */
94d6511c
PB
4845
4846 mark_seen_cases (type, cases_seen, size, sparseness);
4847
4848 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
28d81abb 4849 {
94d6511c 4850 if (BITARRAY_TEST(cases_seen, i) == 0)
1ddde1cd 4851 warning ("enumeration value `%s' not handled in switch",
94d6511c 4852 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
28d81abb 4853 }
94d6511c
PB
4854
4855 free (cases_seen);
28d81abb
RK
4856 }
4857
4858 /* Now we go the other way around; we warn if there are case
ac2a9454 4859 expressions that don't correspond to enumerators. This can
28d81abb 4860 occur since C and C++ don't enforce type-checking of
0f41302f 4861 assignments to enumeration variables. */
28d81abb 4862
5720c7e7
RK
4863 if (case_stack->data.case_stmt.case_list
4864 && case_stack->data.case_stmt.case_list->left)
4865 case_stack->data.case_stmt.case_list
4866 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
28d81abb
RK
4867 if (warn_switch)
4868 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4869 {
4870 for (chain = TYPE_VALUES (type);
4871 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4872 chain = TREE_CHAIN (chain))
4873 ;
4874
4875 if (!chain)
3b24f55b
RS
4876 {
4877 if (TYPE_NAME (type) == 0)
e016950d
KG
4878 warning ("case value `%ld' not in enumerated type",
4879 (long) TREE_INT_CST_LOW (n->low));
3b24f55b 4880 else
e016950d
KG
4881 warning ("case value `%ld' not in enumerated type `%s'",
4882 (long) TREE_INT_CST_LOW (n->low),
3b24f55b
RS
4883 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4884 == IDENTIFIER_NODE)
4885 ? TYPE_NAME (type)
4886 : DECL_NAME (TYPE_NAME (type))));
4887 }
1ddde1cd
RS
4888 if (!tree_int_cst_equal (n->low, n->high))
4889 {
4890 for (chain = TYPE_VALUES (type);
4891 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4892 chain = TREE_CHAIN (chain))
4893 ;
4894
4895 if (!chain)
3b24f55b
RS
4896 {
4897 if (TYPE_NAME (type) == 0)
e016950d
KG
4898 warning ("case value `%ld' not in enumerated type",
4899 (long) TREE_INT_CST_LOW (n->high));
3b24f55b 4900 else
e016950d
KG
4901 warning ("case value `%ld' not in enumerated type `%s'",
4902 (long) TREE_INT_CST_LOW (n->high),
3b24f55b
RS
4903 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4904 == IDENTIFIER_NODE)
4905 ? TYPE_NAME (type)
4906 : DECL_NAME (TYPE_NAME (type))));
4907 }
1ddde1cd 4908 }
28d81abb
RK
4909 }
4910
ae8cb346
RS
4911#if 0
4912 /* ??? This optimization is disabled because it causes valid programs to
4913 fail. ANSI C does not guarantee that an expression with enum type
9faa82d8 4914 will have a value that is the same as one of the enumeration literals. */
ae8cb346 4915
28d81abb
RK
4916 /* If all values were found as case labels, make one of them the default
4917 label. Thus, this switch will never fall through. We arbitrarily pick
4918 the last one to make the default since this is likely the most
4919 efficient choice. */
4920
4921 if (all_values)
4922 {
4923 for (l = &case_stack->data.case_stmt.case_list;
4924 (*l)->right != 0;
4925 l = &(*l)->right)
4926 ;
4927
4928 case_stack->data.case_stmt.default_label = (*l)->code_label;
4929 *l = 0;
4930 }
ae8cb346 4931#endif /* 0 */
28d81abb 4932}
ca695ac9 4933
28d81abb
RK
4934\f
4935/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4936 in which ORIG_INDEX is the expression to be tested.
28d81abb
RK
4937 Generate the code to test it and jump to the right place. */
4938
4939void
4940expand_end_case (orig_index)
4941 tree orig_index;
4942{
3474db0e 4943 tree minval, maxval, range, orig_minval;
28d81abb
RK
4944 rtx default_label = 0;
4945 register struct case_node *n;
85066503 4946 unsigned int count;
28d81abb 4947 rtx index;
ca695ac9 4948 rtx table_label;
28d81abb
RK
4949 int ncases;
4950 rtx *labelvec;
4951 register int i;
4952 rtx before_case;
4953 register struct nesting *thiscase = case_stack;
1b0cb6fc 4954 tree index_expr, index_type;
ca695ac9
JB
4955 int unsignedp;
4956
ca695ac9
JB
4957 table_label = gen_label_rtx ();
4958 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc
RK
4959 index_type = TREE_TYPE (index_expr);
4960 unsignedp = TREE_UNSIGNED (index_type);
28d81abb
RK
4961
4962 do_pending_stack_adjust ();
4963
feb60352
R
4964 /* This might get an spurious warning in the presence of a syntax error;
4965 it could be fixed by moving the call to check_seenlabel after the
4966 check for error_mark_node, and copying the code of check_seenlabel that
4967 deals with case_stack->data.case_stmt.line_number_status /
4968 restore_line_number_status in front of the call to end_cleanup_deferral;
4969 However, this might miss some useful warnings in the presence of
4970 non-syntax errors. */
a11759a3
JR
4971 check_seenlabel ();
4972
28d81abb 4973 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 4974 if (index_type != error_mark_node)
28d81abb
RK
4975 {
4976 /* If switch expression was an enumerated type, check that all
4977 enumeration literals are covered by the cases.
4978 No sense trying this if there's a default case, however. */
4979
4980 if (!thiscase->data.case_stmt.default_label
4981 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4982 && TREE_CODE (index_expr) != INTEGER_CST)
4983 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4984
28d81abb
RK
4985 /* If we don't have a default-label, create one here,
4986 after the body of the switch. */
4987 if (thiscase->data.case_stmt.default_label == 0)
4988 {
4989 thiscase->data.case_stmt.default_label
4990 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4991 expand_label (thiscase->data.case_stmt.default_label);
4992 }
4993 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4994
4995 before_case = get_last_insn ();
4996
5720c7e7
RK
4997 if (thiscase->data.case_stmt.case_list
4998 && thiscase->data.case_stmt.case_list->left)
b059139c
RK
4999 thiscase->data.case_stmt.case_list
5000 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5001
28d81abb
RK
5002 /* Simplify the case-list before we count it. */
5003 group_case_nodes (thiscase->data.case_stmt.case_list);
5004
5005 /* Get upper and lower bounds of case values.
5006 Also convert all the case values to the index expr's data type. */
5007
5008 count = 0;
5009 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5010 {
5011 /* Check low and high label values are integers. */
5012 if (TREE_CODE (n->low) != INTEGER_CST)
5013 abort ();
5014 if (TREE_CODE (n->high) != INTEGER_CST)
5015 abort ();
5016
1b0cb6fc
RK
5017 n->low = convert (index_type, n->low);
5018 n->high = convert (index_type, n->high);
28d81abb
RK
5019
5020 /* Count the elements and track the largest and smallest
5021 of them (treating them as signed even if they are not). */
5022 if (count++ == 0)
5023 {
5024 minval = n->low;
5025 maxval = n->high;
5026 }
5027 else
5028 {
5029 if (INT_CST_LT (n->low, minval))
5030 minval = n->low;
5031 if (INT_CST_LT (maxval, n->high))
5032 maxval = n->high;
5033 }
5034 /* A range counts double, since it requires two compares. */
5035 if (! tree_int_cst_equal (n->low, n->high))
5036 count++;
5037 }
5038
3474db0e
RS
5039 orig_minval = minval;
5040
28d81abb
RK
5041 /* Compute span of values. */
5042 if (count != 0)
1b0cb6fc 5043 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 5044
956d6950 5045 end_cleanup_deferral ();
4c581243 5046
1b0cb6fc 5047 if (count == 0)
28d81abb
RK
5048 {
5049 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5050 emit_queue ();
5051 emit_jump (default_label);
5052 }
3474db0e 5053
28d81abb
RK
5054 /* If range of values is much bigger than number of values,
5055 make a sequence of conditional branches instead of a dispatch.
5056 If the switch-index is a constant, do it this way
5057 because we can optimize it. */
4f73c5dd
TW
5058
5059#ifndef CASE_VALUES_THRESHOLD
28d81abb 5060#ifdef HAVE_casesi
4f73c5dd 5061#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
28d81abb 5062#else
4f73c5dd
TW
5063 /* If machine does not have a case insn that compares the
5064 bounds, this means extra overhead for dispatch tables
5065 which raises the threshold for using them. */
5066#define CASE_VALUES_THRESHOLD 5
5067#endif /* HAVE_casesi */
5068#endif /* CASE_VALUES_THRESHOLD */
5069
5070 else if (TREE_INT_CST_HIGH (range) != 0
c84e2712 5071 || count < (unsigned int) CASE_VALUES_THRESHOLD
37366632
RK
5072 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5073 > 10 * count)
3f6fe18e
RK
5074#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5075 || flag_pic
5076#endif
28d81abb 5077 || TREE_CODE (index_expr) == INTEGER_CST
b4ac57ab 5078 /* These will reduce to a constant. */
28d81abb 5079 || (TREE_CODE (index_expr) == CALL_EXPR
de14fd73 5080 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
28d81abb 5081 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
b4ac57ab
RS
5082 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5083 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5084 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
28d81abb 5085 {
37366632 5086 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
5087
5088 /* If the index is a short or char that we do not have
5089 an insn to handle comparisons directly, convert it to
5090 a full integer now, rather than letting each comparison
5091 generate the conversion. */
5092
5093 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5094 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5095 == CODE_FOR_nothing))
5096 {
5097 enum machine_mode wider_mode;
5098 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5099 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5100 if (cmp_optab->handlers[(int) wider_mode].insn_code
5101 != CODE_FOR_nothing)
5102 {
5103 index = convert_to_mode (wider_mode, index, unsignedp);
5104 break;
5105 }
5106 }
5107
5108 emit_queue ();
5109 do_pending_stack_adjust ();
5110
5111 index = protect_from_queue (index, 0);
5112 if (GET_CODE (index) == MEM)
5113 index = copy_to_reg (index);
5114 if (GET_CODE (index) == CONST_INT
5115 || TREE_CODE (index_expr) == INTEGER_CST)
5116 {
5117 /* Make a tree node with the proper constant value
5118 if we don't already have one. */
5119 if (TREE_CODE (index_expr) != INTEGER_CST)
5120 {
5121 index_expr
5122 = build_int_2 (INTVAL (index),
e9a042b6 5123 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 5124 index_expr = convert (index_type, index_expr);
28d81abb
RK
5125 }
5126
5127 /* For constant index expressions we need only
5128 issue a unconditional branch to the appropriate
5129 target code. The job of removing any unreachable
5130 code is left to the optimisation phase if the
5131 "-O" option is specified. */
1b0cb6fc
RK
5132 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5133 if (! tree_int_cst_lt (index_expr, n->low)
5134 && ! tree_int_cst_lt (n->high, index_expr))
5135 break;
5136
28d81abb
RK
5137 if (n)
5138 emit_jump (label_rtx (n->code_label));
5139 else
5140 emit_jump (default_label);
5141 }
5142 else
5143 {
5144 /* If the index expression is not constant we generate
5145 a binary decision tree to select the appropriate
5146 target code. This is done as follows:
5147
5148 The list of cases is rearranged into a binary tree,
5149 nearly optimal assuming equal probability for each case.
5150
5151 The tree is transformed into RTL, eliminating
5152 redundant test conditions at the same time.
5153
5154 If program flow could reach the end of the
5155 decision tree an unconditional jump to the
5156 default code is emitted. */
5157
5158 use_cost_table
5159 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
28d81abb 5160 && estimate_case_costs (thiscase->data.case_stmt.case_list));
37366632
RK
5161 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5162 NULL_PTR);
28d81abb 5163 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 5164 default_label, index_type);
28d81abb
RK
5165 emit_jump_if_reachable (default_label);
5166 }
5167 }
5168 else
5169 {
5170 int win = 0;
5171#ifdef HAVE_casesi
5172 if (HAVE_casesi)
5173 {
c4fcf531 5174 enum machine_mode index_mode = SImode;
5130a5cc 5175 int index_bits = GET_MODE_BITSIZE (index_mode);
086f237d
JW
5176 rtx op1, op2;
5177 enum machine_mode op_mode;
c4fcf531 5178
28d81abb 5179 /* Convert the index to SImode. */
1b0cb6fc 5180 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
c4fcf531 5181 > GET_MODE_BITSIZE (index_mode))
28d81abb 5182 {
1b0cb6fc 5183 enum machine_mode omode = TYPE_MODE (index_type);
37366632 5184 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
af2682ef
RS
5185
5186 /* We must handle the endpoints in the original mode. */
1b0cb6fc 5187 index_expr = build (MINUS_EXPR, index_type,
28d81abb
RK
5188 index_expr, minval);
5189 minval = integer_zero_node;
37366632 5190 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
c5d5d461
JL
5191 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5192 omode, 1, 0, default_label);
af2682ef
RS
5193 /* Now we can safely truncate. */
5194 index = convert_to_mode (index_mode, index, 0);
5195 }
5196 else
5197 {
1b0cb6fc 5198 if (TYPE_MODE (index_type) != index_mode)
d3b35d75
RK
5199 {
5200 index_expr = convert (type_for_size (index_bits, 0),
5201 index_expr);
5202 index_type = TREE_TYPE (index_expr);
5203 }
5204
37366632 5205 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5206 }
28d81abb
RK
5207 emit_queue ();
5208 index = protect_from_queue (index, 0);
5209 do_pending_stack_adjust ();
5210
086f237d
JW
5211 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5212 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5213 (index, op_mode))
5214 index = copy_to_mode_reg (op_mode, index);
5215
5216 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5217
5218 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5219 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5220 (op1, op_mode))
5221 op1 = copy_to_mode_reg (op_mode, op1);
5222
5223 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5224
5225 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5226 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5227 (op2, op_mode))
5228 op2 = copy_to_mode_reg (op_mode, op2);
5229
5230 emit_jump_insn (gen_casesi (index, op1, op2,
28d81abb
RK
5231 table_label, default_label));
5232 win = 1;
5233 }
5234#endif
5235#ifdef HAVE_tablejump
5236 if (! win && HAVE_tablejump)
5237 {
5238 index_expr = convert (thiscase->data.case_stmt.nominal_type,
1b0cb6fc 5239 fold (build (MINUS_EXPR, index_type,
b4ac57ab 5240 index_expr, minval)));
d3b35d75 5241 index_type = TREE_TYPE (index_expr);
37366632 5242 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb 5243 emit_queue ();
af2682ef 5244 index = protect_from_queue (index, 0);
28d81abb
RK
5245 do_pending_stack_adjust ();
5246
1b0cb6fc 5247 do_tablejump (index, TYPE_MODE (index_type),
37366632 5248 expand_expr (range, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5249 table_label, default_label);
5250 win = 1;
5251 }
5252#endif
5253 if (! win)
5254 abort ();
5255
5256 /* Get table of labels to jump to, in order of case index. */
5257
5258 ncases = TREE_INT_CST_LOW (range) + 1;
5259 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4c9a05bc 5260 bzero ((char *) labelvec, ncases * sizeof (rtx));
28d81abb
RK
5261
5262 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5263 {
37366632 5264 register HOST_WIDE_INT i
3474db0e 5265 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
28d81abb
RK
5266
5267 while (1)
5268 {
5269 labelvec[i]
38a448ca 5270 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3474db0e 5271 if (i + TREE_INT_CST_LOW (orig_minval)
28d81abb
RK
5272 == TREE_INT_CST_LOW (n->high))
5273 break;
5274 i++;
5275 }
5276 }
5277
5278 /* Fill in the gaps with the default. */
5279 for (i = 0; i < ncases; i++)
5280 if (labelvec[i] == 0)
38a448ca 5281 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb
RK
5282
5283 /* Output the table */
5284 emit_label (table_label);
5285
18543a22 5286 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
5287 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5288 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353
JR
5289 gen_rtvec_v (ncases, labelvec),
5290 const0_rtx, const0_rtx, 0));
28d81abb 5291 else
38a448ca
RH
5292 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5293 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
5294
5295 /* If the case insn drops through the table,
5296 after the table we must jump to the default-label.
5297 Otherwise record no drop-through after the table. */
5298#ifdef CASE_DROPS_THROUGH
5299 emit_jump (default_label);
5300#else
5301 emit_barrier ();
5302#endif
5303 }
5304
915f619f
JW
5305 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5306 reorder_insns (before_case, get_last_insn (),
28d81abb
RK
5307 thiscase->data.case_stmt.start);
5308 }
4c581243 5309 else
956d6950 5310 end_cleanup_deferral ();
1b0cb6fc 5311
28d81abb
RK
5312 if (thiscase->exit_label)
5313 emit_label (thiscase->exit_label);
5314
5315 POPSTACK (case_stack);
5316
5317 free_temp_slots ();
5318}
5319
57641239
RK
5320/* Convert the tree NODE into a list linked by the right field, with the left
5321 field zeroed. RIGHT is used for recursion; it is a list to be placed
5322 rightmost in the resulting list. */
5323
5324static struct case_node *
5325case_tree2list (node, right)
5326 struct case_node *node, *right;
5327{
5328 struct case_node *left;
5329
5330 if (node->right)
5331 right = case_tree2list (node->right, right);
5332
5333 node->right = right;
51723711 5334 if ((left = node->left))
57641239
RK
5335 {
5336 node->left = 0;
5337 return case_tree2list (left, node);
5338 }
5339
5340 return node;
5341}
ca695ac9 5342
28d81abb
RK
5343/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5344
5345static void
5346do_jump_if_equal (op1, op2, label, unsignedp)
5347 rtx op1, op2, label;
5348 int unsignedp;
5349{
5350 if (GET_CODE (op1) == CONST_INT
5351 && GET_CODE (op2) == CONST_INT)
5352 {
5353 if (INTVAL (op1) == INTVAL (op2))
5354 emit_jump (label);
5355 }
5356 else
5357 {
5358 enum machine_mode mode = GET_MODE (op1);
5359 if (mode == VOIDmode)
5360 mode = GET_MODE (op2);
c5d5d461
JL
5361 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5362 0, label);
28d81abb
RK
5363 }
5364}
5365\f
5366/* Not all case values are encountered equally. This function
5367 uses a heuristic to weight case labels, in cases where that
5368 looks like a reasonable thing to do.
5369
5370 Right now, all we try to guess is text, and we establish the
5371 following weights:
5372
5373 chars above space: 16
5374 digits: 16
5375 default: 12
5376 space, punct: 8
5377 tab: 4
5378 newline: 2
5379 other "\" chars: 1
5380 remaining chars: 0
5381
5382 If we find any cases in the switch that are not either -1 or in the range
5383 of valid ASCII characters, or are control characters other than those
5384 commonly used with "\", don't treat this switch scanning text.
5385
5386 Return 1 if these nodes are suitable for cost estimation, otherwise
5387 return 0. */
5388
5389static int
5390estimate_case_costs (node)
5391 case_node_ptr node;
5392{
5393 tree min_ascii = build_int_2 (-1, -1);
5394 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5395 case_node_ptr n;
5396 int i;
5397
5398 /* If we haven't already made the cost table, make it now. Note that the
5399 lower bound of the table is -1, not zero. */
5400
5401 if (cost_table == NULL)
5402 {
5403 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
4c9a05bc 5404 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
28d81abb
RK
5405
5406 for (i = 0; i < 128; i++)
5407 {
e9a780ec 5408 if (ISALNUM (i))
28d81abb 5409 cost_table[i] = 16;
e9a780ec 5410 else if (ISPUNCT (i))
28d81abb 5411 cost_table[i] = 8;
e9a780ec 5412 else if (ISCNTRL (i))
28d81abb
RK
5413 cost_table[i] = -1;
5414 }
5415
5416 cost_table[' '] = 8;
5417 cost_table['\t'] = 4;
5418 cost_table['\0'] = 4;
5419 cost_table['\n'] = 2;
5420 cost_table['\f'] = 1;
5421 cost_table['\v'] = 1;
5422 cost_table['\b'] = 1;
5423 }
5424
5425 /* See if all the case expressions look like text. It is text if the
5426 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5427 as signed arithmetic since we don't want to ever access cost_table with a
5428 value less than -1. Also check that none of the constants in a range
5429 are strange control characters. */
5430
5431 for (n = node; n; n = n->right)
5432 {
5433 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5434 return 0;
5435
5436 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5437 if (cost_table[i] < 0)
5438 return 0;
5439 }
5440
5441 /* All interesting values are within the range of interesting
5442 ASCII characters. */
5443 return 1;
5444}
5445
5446/* Scan an ordered list of case nodes
5447 combining those with consecutive values or ranges.
5448
5449 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5450
5451static void
5452group_case_nodes (head)
5453 case_node_ptr head;
5454{
5455 case_node_ptr node = head;
5456
5457 while (node)
5458 {
5459 rtx lb = next_real_insn (label_rtx (node->code_label));
ad7e369f 5460 rtx lb2;
28d81abb
RK
5461 case_node_ptr np = node;
5462
5463 /* Try to group the successors of NODE with NODE. */
5464 while (((np = np->right) != 0)
5465 /* Do they jump to the same place? */
ad7e369f
JL
5466 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5467 || (lb != 0 && lb2 != 0
5468 && simplejump_p (lb)
5469 && simplejump_p (lb2)
5470 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5471 SET_SRC (PATTERN (lb2)))))
28d81abb
RK
5472 /* Are their ranges consecutive? */
5473 && tree_int_cst_equal (np->low,
5474 fold (build (PLUS_EXPR,
5475 TREE_TYPE (node->high),
5476 node->high,
5477 integer_one_node)))
5478 /* An overflow is not consecutive. */
5479 && tree_int_cst_lt (node->high,
5480 fold (build (PLUS_EXPR,
5481 TREE_TYPE (node->high),
5482 node->high,
5483 integer_one_node))))
5484 {
5485 node->high = np->high;
5486 }
5487 /* NP is the first node after NODE which can't be grouped with it.
5488 Delete the nodes in between, and move on to that node. */
5489 node->right = np;
5490 node = np;
5491 }
5492}
5493
5494/* Take an ordered list of case nodes
5495 and transform them into a near optimal binary tree,
6dc42e49 5496 on the assumption that any target code selection value is as
28d81abb
RK
5497 likely as any other.
5498
5499 The transformation is performed by splitting the ordered
5500 list into two equal sections plus a pivot. The parts are
5501 then attached to the pivot as left and right branches. Each
38e01259 5502 branch is then transformed recursively. */
28d81abb
RK
5503
5504static void
5505balance_case_nodes (head, parent)
5506 case_node_ptr *head;
5507 case_node_ptr parent;
5508{
5509 register case_node_ptr np;
5510
5511 np = *head;
5512 if (np)
5513 {
5514 int cost = 0;
5515 int i = 0;
5516 int ranges = 0;
5517 register case_node_ptr *npp;
5518 case_node_ptr left;
5519
5520 /* Count the number of entries on branch. Also count the ranges. */
5521
5522 while (np)
5523 {
5524 if (!tree_int_cst_equal (np->low, np->high))
5525 {
5526 ranges++;
5527 if (use_cost_table)
5528 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5529 }
5530
5531 if (use_cost_table)
5532 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5533
5534 i++;
5535 np = np->right;
5536 }
5537
5538 if (i > 2)
5539 {
5540 /* Split this list if it is long enough for that to help. */
5541 npp = head;
5542 left = *npp;
5543 if (use_cost_table)
5544 {
5545 /* Find the place in the list that bisects the list's total cost,
5546 Here I gets half the total cost. */
5547 int n_moved = 0;
5548 i = (cost + 1) / 2;
5549 while (1)
5550 {
5551 /* Skip nodes while their cost does not reach that amount. */
5552 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5553 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5554 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5555 if (i <= 0)
5556 break;
5557 npp = &(*npp)->right;
5558 n_moved += 1;
5559 }
5560 if (n_moved == 0)
5561 {
5562 /* Leave this branch lopsided, but optimize left-hand
5563 side and fill in `parent' fields for right-hand side. */
5564 np = *head;
5565 np->parent = parent;
5566 balance_case_nodes (&np->left, np);
5567 for (; np->right; np = np->right)
5568 np->right->parent = np;
5569 return;
5570 }
5571 }
5572 /* If there are just three nodes, split at the middle one. */
5573 else if (i == 3)
5574 npp = &(*npp)->right;
5575 else
5576 {
5577 /* Find the place in the list that bisects the list's total cost,
5578 where ranges count as 2.
5579 Here I gets half the total cost. */
5580 i = (i + ranges + 1) / 2;
5581 while (1)
5582 {
5583 /* Skip nodes while their cost does not reach that amount. */
5584 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5585 i--;
5586 i--;
5587 if (i <= 0)
5588 break;
5589 npp = &(*npp)->right;
5590 }
5591 }
5592 *head = np = *npp;
5593 *npp = 0;
5594 np->parent = parent;
5595 np->left = left;
5596
5597 /* Optimize each of the two split parts. */
5598 balance_case_nodes (&np->left, np);
5599 balance_case_nodes (&np->right, np);
5600 }
5601 else
5602 {
5603 /* Else leave this branch as one level,
5604 but fill in `parent' fields. */
5605 np = *head;
5606 np->parent = parent;
5607 for (; np->right; np = np->right)
5608 np->right->parent = np;
5609 }
5610 }
5611}
5612\f
5613/* Search the parent sections of the case node tree
5614 to see if a test for the lower bound of NODE would be redundant.
5615 INDEX_TYPE is the type of the index expression.
5616
5617 The instructions to generate the case decision tree are
5618 output in the same order as nodes are processed so it is
5619 known that if a parent node checks the range of the current
5620 node minus one that the current node is bounded at its lower
5621 span. Thus the test would be redundant. */
5622
5623static int
5624node_has_low_bound (node, index_type)
5625 case_node_ptr node;
5626 tree index_type;
5627{
5628 tree low_minus_one;
5629 case_node_ptr pnode;
5630
5631 /* If the lower bound of this node is the lowest value in the index type,
5632 we need not test it. */
5633
5634 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5635 return 1;
5636
5637 /* If this node has a left branch, the value at the left must be less
5638 than that at this node, so it cannot be bounded at the bottom and
5639 we need not bother testing any further. */
5640
5641 if (node->left)
5642 return 0;
5643
5644 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5645 node->low, integer_one_node));
5646
5647 /* If the subtraction above overflowed, we can't verify anything.
5648 Otherwise, look for a parent that tests our value - 1. */
5649
5650 if (! tree_int_cst_lt (low_minus_one, node->low))
5651 return 0;
5652
5653 for (pnode = node->parent; pnode; pnode = pnode->parent)
5654 if (tree_int_cst_equal (low_minus_one, pnode->high))
5655 return 1;
5656
5657 return 0;
5658}
5659
5660/* Search the parent sections of the case node tree
5661 to see if a test for the upper bound of NODE would be redundant.
5662 INDEX_TYPE is the type of the index expression.
5663
5664 The instructions to generate the case decision tree are
5665 output in the same order as nodes are processed so it is
5666 known that if a parent node checks the range of the current
5667 node plus one that the current node is bounded at its upper
5668 span. Thus the test would be redundant. */
5669
5670static int
5671node_has_high_bound (node, index_type)
5672 case_node_ptr node;
5673 tree index_type;
5674{
5675 tree high_plus_one;
5676 case_node_ptr pnode;
5677
e1ee5cdc
RH
5678 /* If there is no upper bound, obviously no test is needed. */
5679
5680 if (TYPE_MAX_VALUE (index_type) == NULL)
5681 return 1;
5682
28d81abb
RK
5683 /* If the upper bound of this node is the highest value in the type
5684 of the index expression, we need not test against it. */
5685
5686 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5687 return 1;
5688
5689 /* If this node has a right branch, the value at the right must be greater
5690 than that at this node, so it cannot be bounded at the top and
5691 we need not bother testing any further. */
5692
5693 if (node->right)
5694 return 0;
5695
5696 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5697 node->high, integer_one_node));
5698
5699 /* If the addition above overflowed, we can't verify anything.
5700 Otherwise, look for a parent that tests our value + 1. */
5701
5702 if (! tree_int_cst_lt (node->high, high_plus_one))
5703 return 0;
5704
5705 for (pnode = node->parent; pnode; pnode = pnode->parent)
5706 if (tree_int_cst_equal (high_plus_one, pnode->low))
5707 return 1;
5708
5709 return 0;
5710}
5711
5712/* Search the parent sections of the
5713 case node tree to see if both tests for the upper and lower
5714 bounds of NODE would be redundant. */
5715
5716static int
5717node_is_bounded (node, index_type)
5718 case_node_ptr node;
5719 tree index_type;
5720{
5721 return (node_has_low_bound (node, index_type)
5722 && node_has_high_bound (node, index_type));
5723}
5724
5725/* Emit an unconditional jump to LABEL unless it would be dead code. */
5726
5727static void
5728emit_jump_if_reachable (label)
5729 rtx label;
5730{
5731 if (GET_CODE (get_last_insn ()) != BARRIER)
5732 emit_jump (label);
5733}
5734\f
5735/* Emit step-by-step code to select a case for the value of INDEX.
5736 The thus generated decision tree follows the form of the
5737 case-node binary tree NODE, whose nodes represent test conditions.
5738 INDEX_TYPE is the type of the index of the switch.
5739
5740 Care is taken to prune redundant tests from the decision tree
5741 by detecting any boundary conditions already checked by
5742 emitted rtx. (See node_has_high_bound, node_has_low_bound
5743 and node_is_bounded, above.)
5744
5745 Where the test conditions can be shown to be redundant we emit
5746 an unconditional jump to the target code. As a further
5747 optimization, the subordinates of a tree node are examined to
5748 check for bounded nodes. In this case conditional and/or
5749 unconditional jumps as a result of the boundary check for the
5750 current node are arranged to target the subordinates associated
38e01259 5751 code for out of bound conditions on the current node.
28d81abb 5752
f72aed24 5753 We can assume that when control reaches the code generated here,
28d81abb
RK
5754 the index value has already been compared with the parents
5755 of this node, and determined to be on the same side of each parent
5756 as this node is. Thus, if this node tests for the value 51,
5757 and a parent tested for 52, we don't need to consider
5758 the possibility of a value greater than 51. If another parent
5759 tests for the value 50, then this node need not test anything. */
5760
5761static void
5762emit_case_nodes (index, node, default_label, index_type)
5763 rtx index;
5764 case_node_ptr node;
5765 rtx default_label;
5766 tree index_type;
5767{
5768 /* If INDEX has an unsigned type, we must make unsigned branches. */
5769 int unsignedp = TREE_UNSIGNED (index_type);
ea90cb62 5770 typedef rtx rtx_fn ();
28d81abb
RK
5771 enum machine_mode mode = GET_MODE (index);
5772
5773 /* See if our parents have already tested everything for us.
5774 If they have, emit an unconditional jump for this node. */
5775 if (node_is_bounded (node, index_type))
5776 emit_jump (label_rtx (node->code_label));
5777
5778 else if (tree_int_cst_equal (node->low, node->high))
5779 {
5780 /* Node is single valued. First see if the index expression matches
0f41302f 5781 this node and then check our children, if any. */
28d81abb 5782
37366632 5783 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
28d81abb
RK
5784 label_rtx (node->code_label), unsignedp);
5785
5786 if (node->right != 0 && node->left != 0)
5787 {
5788 /* This node has children on both sides.
5789 Dispatch to one side or the other
5790 by comparing the index value with this node's value.
5791 If one subtree is bounded, check that one first,
5792 so we can avoid real branches in the tree. */
5793
5794 if (node_is_bounded (node->right, index_type))
5795 {
c5d5d461
JL
5796 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5797 VOIDmode, 0),
5798 GT, NULL_RTX, mode, unsignedp, 0,
5799 label_rtx (node->right->code_label));
28d81abb
RK
5800 emit_case_nodes (index, node->left, default_label, index_type);
5801 }
5802
5803 else if (node_is_bounded (node->left, index_type))
5804 {
c5d5d461
JL
5805 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5806 VOIDmode, 0),
5807 LT, NULL_RTX, mode, unsignedp, 0,
5808 label_rtx (node->left->code_label));
28d81abb
RK
5809 emit_case_nodes (index, node->right, default_label, index_type);
5810 }
5811
5812 else
5813 {
5814 /* Neither node is bounded. First distinguish the two sides;
5815 then emit the code for one side at a time. */
5816
5817 tree test_label
5818 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5819
5820 /* See if the value is on the right. */
c5d5d461
JL
5821 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5822 VOIDmode, 0),
5823 GT, NULL_RTX, mode, unsignedp, 0,
5824 label_rtx (test_label));
28d81abb
RK
5825
5826 /* Value must be on the left.
5827 Handle the left-hand subtree. */
5828 emit_case_nodes (index, node->left, default_label, index_type);
5829 /* If left-hand subtree does nothing,
5830 go to default. */
5831 emit_jump_if_reachable (default_label);
5832
5833 /* Code branches here for the right-hand subtree. */
5834 expand_label (test_label);
5835 emit_case_nodes (index, node->right, default_label, index_type);
5836 }
5837 }
5838
5839 else if (node->right != 0 && node->left == 0)
5840 {
5841 /* Here we have a right child but no left so we issue conditional
5842 branch to default and process the right child.
5843
5844 Omit the conditional branch to default if we it avoid only one
5845 right child; it costs too much space to save so little time. */
5846
de14fd73 5847 if (node->right->right || node->right->left
28d81abb
RK
5848 || !tree_int_cst_equal (node->right->low, node->right->high))
5849 {
5850 if (!node_has_low_bound (node, index_type))
5851 {
c5d5d461
JL
5852 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5853 NULL_RTX,
5854 VOIDmode, 0),
5855 LT, NULL_RTX, mode, unsignedp, 0,
5856 default_label);
28d81abb
RK
5857 }
5858
5859 emit_case_nodes (index, node->right, default_label, index_type);
5860 }
5861 else
5862 /* We cannot process node->right normally
5863 since we haven't ruled out the numbers less than
5864 this node's value. So handle node->right explicitly. */
5865 do_jump_if_equal (index,
37366632
RK
5866 expand_expr (node->right->low, NULL_RTX,
5867 VOIDmode, 0),
28d81abb
RK
5868 label_rtx (node->right->code_label), unsignedp);
5869 }
5870
5871 else if (node->right == 0 && node->left != 0)
5872 {
5873 /* Just one subtree, on the left. */
5874
de14fd73
RK
5875#if 0 /* The following code and comment were formerly part
5876 of the condition here, but they didn't work
5877 and I don't understand what the idea was. -- rms. */
5878 /* If our "most probable entry" is less probable
28d81abb
RK
5879 than the default label, emit a jump to
5880 the default label using condition codes
5881 already lying around. With no right branch,
5882 a branch-greater-than will get us to the default
5883 label correctly. */
de14fd73
RK
5884 if (use_cost_table
5885 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5886 ;
5887#endif /* 0 */
5888 if (node->left->left || node->left->right
28d81abb
RK
5889 || !tree_int_cst_equal (node->left->low, node->left->high))
5890 {
5891 if (!node_has_high_bound (node, index_type))
5892 {
c5d5d461
JL
5893 emit_cmp_and_jump_insns (index, expand_expr (node->high,
5894 NULL_RTX,
5895 VOIDmode, 0),
5896 GT, NULL_RTX, mode, unsignedp, 0,
5897 default_label);
28d81abb
RK
5898 }
5899
5900 emit_case_nodes (index, node->left, default_label, index_type);
5901 }
5902 else
5903 /* We cannot process node->left normally
5904 since we haven't ruled out the numbers less than
5905 this node's value. So handle node->left explicitly. */
5906 do_jump_if_equal (index,
37366632
RK
5907 expand_expr (node->left->low, NULL_RTX,
5908 VOIDmode, 0),
28d81abb
RK
5909 label_rtx (node->left->code_label), unsignedp);
5910 }
5911 }
5912 else
5913 {
5914 /* Node is a range. These cases are very similar to those for a single
5915 value, except that we do not start by testing whether this node
5916 is the one to branch to. */
5917
5918 if (node->right != 0 && node->left != 0)
5919 {
5920 /* Node has subtrees on both sides.
5921 If the right-hand subtree is bounded,
5922 test for it first, since we can go straight there.
5923 Otherwise, we need to make a branch in the control structure,
5924 then handle the two subtrees. */
5925 tree test_label = 0;
5926
28d81abb
RK
5927
5928 if (node_is_bounded (node->right, index_type))
5929 /* Right hand node is fully bounded so we can eliminate any
5930 testing and branch directly to the target code. */
c5d5d461
JL
5931 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5932 VOIDmode, 0),
5933 GT, NULL_RTX, mode, unsignedp, 0,
5934 label_rtx (node->right->code_label));
28d81abb
RK
5935 else
5936 {
5937 /* Right hand node requires testing.
5938 Branch to a label where we will handle it later. */
5939
5940 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
c5d5d461
JL
5941 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5942 VOIDmode, 0),
5943 GT, NULL_RTX, mode, unsignedp, 0,
5944 label_rtx (test_label));
28d81abb
RK
5945 }
5946
5947 /* Value belongs to this node or to the left-hand subtree. */
5948
c5d5d461
JL
5949 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
5950 VOIDmode, 0),
5951 GE, NULL_RTX, mode, unsignedp, 0,
5952 label_rtx (node->code_label));
28d81abb
RK
5953
5954 /* Handle the left-hand subtree. */
5955 emit_case_nodes (index, node->left, default_label, index_type);
5956
5957 /* If right node had to be handled later, do that now. */
5958
5959 if (test_label)
5960 {
5961 /* If the left-hand subtree fell through,
5962 don't let it fall into the right-hand subtree. */
5963 emit_jump_if_reachable (default_label);
5964
5965 expand_label (test_label);
5966 emit_case_nodes (index, node->right, default_label, index_type);
5967 }
5968 }
5969
5970 else if (node->right != 0 && node->left == 0)
5971 {
5972 /* Deal with values to the left of this node,
5973 if they are possible. */
5974 if (!node_has_low_bound (node, index_type))
5975 {
c5d5d461
JL
5976 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
5977 VOIDmode, 0),
5978 LT, NULL_RTX, mode, unsignedp, 0,
5979 default_label);
28d81abb
RK
5980 }
5981
5982 /* Value belongs to this node or to the right-hand subtree. */
5983
c5d5d461
JL
5984 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5985 VOIDmode, 0),
5986 LE, NULL_RTX, mode, unsignedp, 0,
5987 label_rtx (node->code_label));
28d81abb
RK
5988
5989 emit_case_nodes (index, node->right, default_label, index_type);
5990 }
5991
5992 else if (node->right == 0 && node->left != 0)
5993 {
5994 /* Deal with values to the right of this node,
5995 if they are possible. */
5996 if (!node_has_high_bound (node, index_type))
5997 {
c5d5d461
JL
5998 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5999 VOIDmode, 0),
6000 GT, NULL_RTX, mode, unsignedp, 0,
6001 default_label);
28d81abb
RK
6002 }
6003
6004 /* Value belongs to this node or to the left-hand subtree. */
6005
c5d5d461
JL
6006 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6007 VOIDmode, 0),
6008 GE, NULL_RTX, mode, unsignedp, 0,
6009 label_rtx (node->code_label));
28d81abb
RK
6010
6011 emit_case_nodes (index, node->left, default_label, index_type);
6012 }
6013
6014 else
6015 {
6016 /* Node has no children so we check low and high bounds to remove
6017 redundant tests. Only one of the bounds can exist,
6018 since otherwise this node is bounded--a case tested already. */
6019
6020 if (!node_has_high_bound (node, index_type))
6021 {
c5d5d461
JL
6022 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6023 VOIDmode, 0),
6024 GT, NULL_RTX, mode, unsignedp, 0,
6025 default_label);
28d81abb
RK
6026 }
6027
6028 if (!node_has_low_bound (node, index_type))
6029 {
c5d5d461
JL
6030 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6031 VOIDmode, 0),
6032 LT, NULL_RTX, mode, unsignedp, 0,
6033 default_label);
28d81abb
RK
6034 }
6035
6036 emit_jump (label_rtx (node->code_label));
6037 }
6038 }
6039}
6040\f
6041/* These routines are used by the loop unrolling code. They copy BLOCK trees
6042 so that the debugging info will be correct for the unrolled loop. */
6043
65d98182
JL
6044/* Indexed by block number, contains a pointer to the N'th block node.
6045
6046 Allocated by the call to identify_blocks, then released after the call
6047 to reorder_blocks in the function unroll_block_trees. */
28d81abb 6048
94dc8b56 6049static tree *block_vector;
28d81abb
RK
6050
6051void
94dc8b56 6052find_loop_tree_blocks ()
28d81abb 6053{
94dc8b56 6054 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6055
94dc8b56 6056 block_vector = identify_blocks (block, get_insns ());
28d81abb
RK
6057}
6058
28d81abb 6059void
94dc8b56 6060unroll_block_trees ()
28d81abb 6061{
94dc8b56 6062 tree block = DECL_INITIAL (current_function_decl);
28d81abb 6063
94dc8b56 6064 reorder_blocks (block_vector, block, get_insns ());
65d98182
JL
6065
6066 /* Release any memory allocated by identify_blocks. */
6067 if (block_vector)
6068 free (block_vector);
28d81abb 6069}