]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/stmt.c
toplev.h: New file.
[thirdparty/gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54
55 #define obstack_chunk_alloc xmalloc
56 #define obstack_chunk_free free
57 struct obstack stmt_obstack;
58
59 /* Assume that case vectors are not pc-relative. */
60 #ifndef CASE_VECTOR_PC_RELATIVE
61 #define CASE_VECTOR_PC_RELATIVE 0
62 #endif
63
64 /* Filename and line number of last line-number note,
65 whether we actually emitted it or not. */
66 char *emit_filename;
67 int emit_lineno;
68
69 /* Nonzero if within a ({...}) grouping, in which case we must
70 always compute a value for each expr-stmt in case it is the last one. */
71
72 int expr_stmts_for_value;
73
74 /* Each time we expand an expression-statement,
75 record the expr's type and its RTL value here. */
76
77 static tree last_expr_type;
78 static rtx last_expr_value;
79
80 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
81 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
82 This is used by the `remember_end_note' function to record the endpoint
83 of each generated block in its associated BLOCK node. */
84
85 static rtx last_block_end_note;
86
87 /* Number of binding contours started so far in this function. */
88
89 int block_start_count;
90
91 /* Nonzero if function being compiled needs to
92 return the address of where it has put a structure value. */
93
94 extern int current_function_returns_pcc_struct;
95
96 /* Label that will go on parm cleanup code, if any.
97 Jumping to this label runs cleanup code for parameters, if
98 such code must be run. Following this code is the logical return label. */
99
100 extern rtx cleanup_label;
101
102 /* Label that will go on function epilogue.
103 Jumping to this label serves as a "return" instruction
104 on machines which require execution of the epilogue on all returns. */
105
106 extern rtx return_label;
107
108 /* Offset to end of allocated area of stack frame.
109 If stack grows down, this is the address of the last stack slot allocated.
110 If stack grows up, this is the address for the next slot. */
111 extern int frame_offset;
112
113 /* Label to jump back to for tail recursion, or 0 if we have
114 not yet needed one for this function. */
115 extern rtx tail_recursion_label;
116
117 /* Place after which to insert the tail_recursion_label if we need one. */
118 extern rtx tail_recursion_reentry;
119
120 /* Location at which to save the argument pointer if it will need to be
121 referenced. There are two cases where this is done: if nonlocal gotos
122 exist, or if vars whose is an offset from the argument pointer will be
123 needed by inner routines. */
124
125 extern rtx arg_pointer_save_area;
126
127 /* Chain of all RTL_EXPRs that have insns in them. */
128 extern tree rtl_expr_chain;
129 \f
130 /* Functions and data structures for expanding case statements. */
131
132 /* Case label structure, used to hold info on labels within case
133 statements. We handle "range" labels; for a single-value label
134 as in C, the high and low limits are the same.
135
136 An AVL tree of case nodes is initially created, and later transformed
137 to a list linked via the RIGHT fields in the nodes. Nodes with
138 higher case values are later in the list.
139
140 Switch statements can be output in one of two forms. A branch table
141 is used if there are more than a few labels and the labels are dense
142 within the range between the smallest and largest case value. If a
143 branch table is used, no further manipulations are done with the case
144 node chain.
145
146 The alternative to the use of a branch table is to generate a series
147 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
148 and PARENT fields to hold a binary tree. Initially the tree is
149 totally unbalanced, with everything on the right. We balance the tree
150 with nodes on the left having lower case values than the parent
151 and nodes on the right having higher values. We then output the tree
152 in order. */
153
154 struct case_node
155 {
156 struct case_node *left; /* Left son in binary tree */
157 struct case_node *right; /* Right son in binary tree; also node chain */
158 struct case_node *parent; /* Parent of node in binary tree */
159 tree low; /* Lowest index value for this label */
160 tree high; /* Highest index value for this label */
161 tree code_label; /* Label to jump to when node matches */
162 int balance;
163 };
164
165 typedef struct case_node case_node;
166 typedef struct case_node *case_node_ptr;
167
168 /* These are used by estimate_case_costs and balance_case_nodes. */
169
170 /* This must be a signed type, and non-ANSI compilers lack signed char. */
171 static short *cost_table;
172 static int use_cost_table;
173 \f
174 /* Stack of control and binding constructs we are currently inside.
175
176 These constructs begin when you call `expand_start_WHATEVER'
177 and end when you call `expand_end_WHATEVER'. This stack records
178 info about how the construct began that tells the end-function
179 what to do. It also may provide information about the construct
180 to alter the behavior of other constructs within the body.
181 For example, they may affect the behavior of C `break' and `continue'.
182
183 Each construct gets one `struct nesting' object.
184 All of these objects are chained through the `all' field.
185 `nesting_stack' points to the first object (innermost construct).
186 The position of an entry on `nesting_stack' is in its `depth' field.
187
188 Each type of construct has its own individual stack.
189 For example, loops have `loop_stack'. Each object points to the
190 next object of the same type through the `next' field.
191
192 Some constructs are visible to `break' exit-statements and others
193 are not. Which constructs are visible depends on the language.
194 Therefore, the data structure allows each construct to be visible
195 or not, according to the args given when the construct is started.
196 The construct is visible if the `exit_label' field is non-null.
197 In that case, the value should be a CODE_LABEL rtx. */
198
199 struct nesting
200 {
201 struct nesting *all;
202 struct nesting *next;
203 int depth;
204 rtx exit_label;
205 union
206 {
207 /* For conds (if-then and if-then-else statements). */
208 struct
209 {
210 /* Label for the end of the if construct.
211 There is none if EXITFLAG was not set
212 and no `else' has been seen yet. */
213 rtx endif_label;
214 /* Label for the end of this alternative.
215 This may be the end of the if or the next else/elseif. */
216 rtx next_label;
217 } cond;
218 /* For loops. */
219 struct
220 {
221 /* Label at the top of the loop; place to loop back to. */
222 rtx start_label;
223 /* Label at the end of the whole construct. */
224 rtx end_label;
225 /* Label before a jump that branches to the end of the whole
226 construct. This is where destructors go if any. */
227 rtx alt_end_label;
228 /* Label for `continue' statement to jump to;
229 this is in front of the stepper of the loop. */
230 rtx continue_label;
231 } loop;
232 /* For variable binding contours. */
233 struct
234 {
235 /* Sequence number of this binding contour within the function,
236 in order of entry. */
237 int block_start_count;
238 /* Nonzero => value to restore stack to on exit. */
239 rtx stack_level;
240 /* The NOTE that starts this contour.
241 Used by expand_goto to check whether the destination
242 is within each contour or not. */
243 rtx first_insn;
244 /* Innermost containing binding contour that has a stack level. */
245 struct nesting *innermost_stack_block;
246 /* List of cleanups to be run on exit from this contour.
247 This is a list of expressions to be evaluated.
248 The TREE_PURPOSE of each link is the ..._DECL node
249 which the cleanup pertains to. */
250 tree cleanups;
251 /* List of cleanup-lists of blocks containing this block,
252 as they were at the locus where this block appears.
253 There is an element for each containing block,
254 ordered innermost containing block first.
255 The tail of this list can be 0,
256 if all remaining elements would be empty lists.
257 The element's TREE_VALUE is the cleanup-list of that block,
258 which may be null. */
259 tree outer_cleanups;
260 /* Chain of labels defined inside this binding contour.
261 For contours that have stack levels or cleanups. */
262 struct label_chain *label_chain;
263 /* Number of function calls seen, as of start of this block. */
264 int function_call_count;
265 /* Nonzero if this is associated with a EH region. */
266 int exception_region;
267 /* The saved target_temp_slot_level from our outer block.
268 We may reset target_temp_slot_level to be the level of
269 this block, if that is done, target_temp_slot_level
270 reverts to the saved target_temp_slot_level at the very
271 end of the block. */
272 int target_temp_slot_level;
273 /* True if we are currently emitting insns in an area of
274 output code that is controlled by a conditional
275 expression. This is used by the cleanup handling code to
276 generate conditional cleanup actions. */
277 int conditional_code;
278 /* A place to move the start of the exception region for any
279 of the conditional cleanups, must be at the end or after
280 the start of the last unconditional cleanup, and before any
281 conditional branch points. */
282 rtx last_unconditional_cleanup;
283 /* When in a conditional context, this is the specific
284 cleanup list associated with last_unconditional_cleanup,
285 where we place the conditionalized cleanups. */
286 tree *cleanup_ptr;
287 } block;
288 /* For switch (C) or case (Pascal) statements,
289 and also for dummies (see `expand_start_case_dummy'). */
290 struct
291 {
292 /* The insn after which the case dispatch should finally
293 be emitted. Zero for a dummy. */
294 rtx start;
295 /* A list of case labels; it is first built as an AVL tree.
296 During expand_end_case, this is converted to a list, and may be
297 rearranged into a nearly balanced binary tree. */
298 struct case_node *case_list;
299 /* Label to jump to if no case matches. */
300 tree default_label;
301 /* The expression to be dispatched on. */
302 tree index_expr;
303 /* Type that INDEX_EXPR should be converted to. */
304 tree nominal_type;
305 /* Number of range exprs in case statement. */
306 int num_ranges;
307 /* Name of this kind of statement, for warnings. */
308 char *printname;
309 /* Used to save no_line_numbers till we see the first case label.
310 We set this to -1 when we see the first case label in this
311 case statement. */
312 int line_number_status;
313 } case_stmt;
314 } data;
315 };
316
317 /* Chain of all pending binding contours. */
318 struct nesting *block_stack;
319
320 /* If any new stacks are added here, add them to POPSTACKS too. */
321
322 /* Chain of all pending binding contours that restore stack levels
323 or have cleanups. */
324 struct nesting *stack_block_stack;
325
326 /* Chain of all pending conditional statements. */
327 struct nesting *cond_stack;
328
329 /* Chain of all pending loops. */
330 struct nesting *loop_stack;
331
332 /* Chain of all pending case or switch statements. */
333 struct nesting *case_stack;
334
335 /* Separate chain including all of the above,
336 chained through the `all' field. */
337 struct nesting *nesting_stack;
338
339 /* Number of entries on nesting_stack now. */
340 int nesting_depth;
341
342 /* Allocate and return a new `struct nesting'. */
343
344 #define ALLOC_NESTING() \
345 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
346
347 /* Pop the nesting stack element by element until we pop off
348 the element which is at the top of STACK.
349 Update all the other stacks, popping off elements from them
350 as we pop them from nesting_stack. */
351
352 #define POPSTACK(STACK) \
353 do { struct nesting *target = STACK; \
354 struct nesting *this; \
355 do { this = nesting_stack; \
356 if (loop_stack == this) \
357 loop_stack = loop_stack->next; \
358 if (cond_stack == this) \
359 cond_stack = cond_stack->next; \
360 if (block_stack == this) \
361 block_stack = block_stack->next; \
362 if (stack_block_stack == this) \
363 stack_block_stack = stack_block_stack->next; \
364 if (case_stack == this) \
365 case_stack = case_stack->next; \
366 nesting_depth = nesting_stack->depth - 1; \
367 nesting_stack = this->all; \
368 obstack_free (&stmt_obstack, this); } \
369 while (this != target); } while (0)
370 \f
371 /* In some cases it is impossible to generate code for a forward goto
372 until the label definition is seen. This happens when it may be necessary
373 for the goto to reset the stack pointer: we don't yet know how to do that.
374 So expand_goto puts an entry on this fixup list.
375 Each time a binding contour that resets the stack is exited,
376 we check each fixup.
377 If the target label has now been defined, we can insert the proper code. */
378
379 struct goto_fixup
380 {
381 /* Points to following fixup. */
382 struct goto_fixup *next;
383 /* Points to the insn before the jump insn.
384 If more code must be inserted, it goes after this insn. */
385 rtx before_jump;
386 /* The LABEL_DECL that this jump is jumping to, or 0
387 for break, continue or return. */
388 tree target;
389 /* The BLOCK for the place where this goto was found. */
390 tree context;
391 /* The CODE_LABEL rtx that this is jumping to. */
392 rtx target_rtl;
393 /* Number of binding contours started in current function
394 before the label reference. */
395 int block_start_count;
396 /* The outermost stack level that should be restored for this jump.
397 Each time a binding contour that resets the stack is exited,
398 if the target label is *not* yet defined, this slot is updated. */
399 rtx stack_level;
400 /* List of lists of cleanup expressions to be run by this goto.
401 There is one element for each block that this goto is within.
402 The tail of this list can be 0,
403 if all remaining elements would be empty.
404 The TREE_VALUE contains the cleanup list of that block as of the
405 time this goto was seen.
406 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
407 tree cleanup_list_list;
408 };
409
410 static struct goto_fixup *goto_fixup_chain;
411
412 /* Within any binding contour that must restore a stack level,
413 all labels are recorded with a chain of these structures. */
414
415 struct label_chain
416 {
417 /* Points to following fixup. */
418 struct label_chain *next;
419 tree label;
420 };
421
422
423 /* Non-zero if we are using EH to handle cleanus. */
424 static int using_eh_for_cleanups_p = 0;
425
426
427 static void expand_goto_internal PROTO((tree, rtx, rtx));
428 static int expand_fixup PROTO((tree, rtx, rtx));
429 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
430 rtx, int));
431 static void expand_null_return_1 PROTO((rtx, int));
432 static void expand_value_return PROTO((rtx));
433 static int tail_recursion_args PROTO((tree, tree));
434 static void expand_cleanups PROTO((tree, tree, int, int));
435 static void check_seenlabel PROTO((void));
436 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
437 static int estimate_case_costs PROTO((case_node_ptr));
438 static void group_case_nodes PROTO((case_node_ptr));
439 static void balance_case_nodes PROTO((case_node_ptr *,
440 case_node_ptr));
441 static int node_has_low_bound PROTO((case_node_ptr, tree));
442 static int node_has_high_bound PROTO((case_node_ptr, tree));
443 static int node_is_bounded PROTO((case_node_ptr, tree));
444 static void emit_jump_if_reachable PROTO((rtx));
445 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
446 static int add_case_node PROTO((tree, tree, tree, tree *));
447 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
448 static void mark_seen_cases PROTO((tree, unsigned char *,
449 long, int));
450 \f
451 void
452 using_eh_for_cleanups ()
453 {
454 using_eh_for_cleanups_p = 1;
455 }
456
457 void
458 init_stmt ()
459 {
460 gcc_obstack_init (&stmt_obstack);
461 init_eh ();
462 }
463
464 void
465 init_stmt_for_function ()
466 {
467 /* We are not currently within any block, conditional, loop or case. */
468 block_stack = 0;
469 stack_block_stack = 0;
470 loop_stack = 0;
471 case_stack = 0;
472 cond_stack = 0;
473 nesting_stack = 0;
474 nesting_depth = 0;
475
476 block_start_count = 0;
477
478 /* No gotos have been expanded yet. */
479 goto_fixup_chain = 0;
480
481 /* We are not processing a ({...}) grouping. */
482 expr_stmts_for_value = 0;
483 last_expr_type = 0;
484
485 init_eh_for_function ();
486 }
487
488 void
489 save_stmt_status (p)
490 struct function *p;
491 {
492 p->block_stack = block_stack;
493 p->stack_block_stack = stack_block_stack;
494 p->cond_stack = cond_stack;
495 p->loop_stack = loop_stack;
496 p->case_stack = case_stack;
497 p->nesting_stack = nesting_stack;
498 p->nesting_depth = nesting_depth;
499 p->block_start_count = block_start_count;
500 p->last_expr_type = last_expr_type;
501 p->last_expr_value = last_expr_value;
502 p->expr_stmts_for_value = expr_stmts_for_value;
503 p->emit_filename = emit_filename;
504 p->emit_lineno = emit_lineno;
505 p->goto_fixup_chain = goto_fixup_chain;
506 save_eh_status (p);
507 }
508
509 void
510 restore_stmt_status (p)
511 struct function *p;
512 {
513 block_stack = p->block_stack;
514 stack_block_stack = p->stack_block_stack;
515 cond_stack = p->cond_stack;
516 loop_stack = p->loop_stack;
517 case_stack = p->case_stack;
518 nesting_stack = p->nesting_stack;
519 nesting_depth = p->nesting_depth;
520 block_start_count = p->block_start_count;
521 last_expr_type = p->last_expr_type;
522 last_expr_value = p->last_expr_value;
523 expr_stmts_for_value = p->expr_stmts_for_value;
524 emit_filename = p->emit_filename;
525 emit_lineno = p->emit_lineno;
526 goto_fixup_chain = p->goto_fixup_chain;
527 restore_eh_status (p);
528 }
529 \f
530 /* Emit a no-op instruction. */
531
532 void
533 emit_nop ()
534 {
535 rtx last_insn;
536
537 last_insn = get_last_insn ();
538 if (!optimize
539 && (GET_CODE (last_insn) == CODE_LABEL
540 || (GET_CODE (last_insn) == NOTE
541 && prev_real_insn (last_insn) == 0)))
542 emit_insn (gen_nop ());
543 }
544 \f
545 /* Return the rtx-label that corresponds to a LABEL_DECL,
546 creating it if necessary. */
547
548 rtx
549 label_rtx (label)
550 tree label;
551 {
552 if (TREE_CODE (label) != LABEL_DECL)
553 abort ();
554
555 if (DECL_RTL (label))
556 return DECL_RTL (label);
557
558 return DECL_RTL (label) = gen_label_rtx ();
559 }
560
561 /* Add an unconditional jump to LABEL as the next sequential instruction. */
562
563 void
564 emit_jump (label)
565 rtx label;
566 {
567 do_pending_stack_adjust ();
568 emit_jump_insn (gen_jump (label));
569 emit_barrier ();
570 }
571
572 /* Emit code to jump to the address
573 specified by the pointer expression EXP. */
574
575 void
576 expand_computed_goto (exp)
577 tree exp;
578 {
579 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
580
581 #ifdef POINTERS_EXTEND_UNSIGNED
582 x = convert_memory_address (Pmode, x);
583 #endif
584
585 emit_queue ();
586 /* Be sure the function is executable. */
587 if (flag_check_memory_usage)
588 emit_library_call (chkr_check_exec_libfunc, 1,
589 VOIDmode, 1, x, ptr_mode);
590
591 do_pending_stack_adjust ();
592 emit_indirect_jump (x);
593 }
594 \f
595 /* Handle goto statements and the labels that they can go to. */
596
597 /* Specify the location in the RTL code of a label LABEL,
598 which is a LABEL_DECL tree node.
599
600 This is used for the kind of label that the user can jump to with a
601 goto statement, and for alternatives of a switch or case statement.
602 RTL labels generated for loops and conditionals don't go through here;
603 they are generated directly at the RTL level, by other functions below.
604
605 Note that this has nothing to do with defining label *names*.
606 Languages vary in how they do that and what that even means. */
607
608 void
609 expand_label (label)
610 tree label;
611 {
612 struct label_chain *p;
613
614 do_pending_stack_adjust ();
615 emit_label (label_rtx (label));
616 if (DECL_NAME (label))
617 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
618
619 if (stack_block_stack != 0)
620 {
621 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
622 p->next = stack_block_stack->data.block.label_chain;
623 stack_block_stack->data.block.label_chain = p;
624 p->label = label;
625 }
626 }
627
628 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
629 from nested functions. */
630
631 void
632 declare_nonlocal_label (label)
633 tree label;
634 {
635 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
636 LABEL_PRESERVE_P (label_rtx (label)) = 1;
637 if (nonlocal_goto_handler_slot == 0)
638 {
639 nonlocal_goto_handler_slot
640 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
641 emit_stack_save (SAVE_NONLOCAL,
642 &nonlocal_goto_stack_level,
643 PREV_INSN (tail_recursion_reentry));
644 }
645 }
646
647 /* Generate RTL code for a `goto' statement with target label LABEL.
648 LABEL should be a LABEL_DECL tree node that was or will later be
649 defined with `expand_label'. */
650
651 void
652 expand_goto (label)
653 tree label;
654 {
655 tree context;
656
657 /* Check for a nonlocal goto to a containing function. */
658 context = decl_function_context (label);
659 if (context != 0 && context != current_function_decl)
660 {
661 struct function *p = find_function_data (context);
662 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
663 rtx temp;
664
665 p->has_nonlocal_label = 1;
666 current_function_has_nonlocal_goto = 1;
667 LABEL_REF_NONLOCAL_P (label_ref) = 1;
668
669 /* Copy the rtl for the slots so that they won't be shared in
670 case the virtual stack vars register gets instantiated differently
671 in the parent than in the child. */
672
673 #if HAVE_nonlocal_goto
674 if (HAVE_nonlocal_goto)
675 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
676 copy_rtx (p->nonlocal_goto_handler_slot),
677 copy_rtx (p->nonlocal_goto_stack_level),
678 label_ref));
679 else
680 #endif
681 {
682 rtx addr;
683
684 /* Restore frame pointer for containing function.
685 This sets the actual hard register used for the frame pointer
686 to the location of the function's incoming static chain info.
687 The non-local goto handler will then adjust it to contain the
688 proper value and reload the argument pointer, if needed. */
689 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
690
691 /* We have now loaded the frame pointer hardware register with
692 the address of that corresponds to the start of the virtual
693 stack vars. So replace virtual_stack_vars_rtx in all
694 addresses we use with stack_pointer_rtx. */
695
696 /* Get addr of containing function's current nonlocal goto handler,
697 which will do any cleanups and then jump to the label. */
698 addr = copy_rtx (p->nonlocal_goto_handler_slot);
699 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
700 hard_frame_pointer_rtx));
701
702 /* Restore the stack pointer. Note this uses fp just restored. */
703 addr = p->nonlocal_goto_stack_level;
704 if (addr)
705 addr = replace_rtx (copy_rtx (addr),
706 virtual_stack_vars_rtx,
707 hard_frame_pointer_rtx);
708
709 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
710
711 /* Put in the static chain register the nonlocal label address. */
712 emit_move_insn (static_chain_rtx, label_ref);
713 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
714 really needed. */
715 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
716 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
717 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
718 emit_indirect_jump (temp);
719 }
720 }
721 else
722 expand_goto_internal (label, label_rtx (label), NULL_RTX);
723 }
724
725 /* Generate RTL code for a `goto' statement with target label BODY.
726 LABEL should be a LABEL_REF.
727 LAST_INSN, if non-0, is the rtx we should consider as the last
728 insn emitted (for the purposes of cleaning up a return). */
729
730 static void
731 expand_goto_internal (body, label, last_insn)
732 tree body;
733 rtx label;
734 rtx last_insn;
735 {
736 struct nesting *block;
737 rtx stack_level = 0;
738
739 if (GET_CODE (label) != CODE_LABEL)
740 abort ();
741
742 /* If label has already been defined, we can tell now
743 whether and how we must alter the stack level. */
744
745 if (PREV_INSN (label) != 0)
746 {
747 /* Find the innermost pending block that contains the label.
748 (Check containment by comparing insn-uids.)
749 Then restore the outermost stack level within that block,
750 and do cleanups of all blocks contained in it. */
751 for (block = block_stack; block; block = block->next)
752 {
753 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
754 break;
755 if (block->data.block.stack_level != 0)
756 stack_level = block->data.block.stack_level;
757 /* Execute the cleanups for blocks we are exiting. */
758 if (block->data.block.cleanups != 0)
759 {
760 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
761 do_pending_stack_adjust ();
762 }
763 }
764
765 if (stack_level)
766 {
767 /* Ensure stack adjust isn't done by emit_jump, as this
768 would clobber the stack pointer. This one should be
769 deleted as dead by flow. */
770 clear_pending_stack_adjust ();
771 do_pending_stack_adjust ();
772 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
773 }
774
775 if (body != 0 && DECL_TOO_LATE (body))
776 error ("jump to `%s' invalidly jumps into binding contour",
777 IDENTIFIER_POINTER (DECL_NAME (body)));
778 }
779 /* Label not yet defined: may need to put this goto
780 on the fixup list. */
781 else if (! expand_fixup (body, label, last_insn))
782 {
783 /* No fixup needed. Record that the label is the target
784 of at least one goto that has no fixup. */
785 if (body != 0)
786 TREE_ADDRESSABLE (body) = 1;
787 }
788
789 emit_jump (label);
790 }
791 \f
792 /* Generate if necessary a fixup for a goto
793 whose target label in tree structure (if any) is TREE_LABEL
794 and whose target in rtl is RTL_LABEL.
795
796 If LAST_INSN is nonzero, we pretend that the jump appears
797 after insn LAST_INSN instead of at the current point in the insn stream.
798
799 The fixup will be used later to insert insns just before the goto.
800 Those insns will restore the stack level as appropriate for the
801 target label, and will (in the case of C++) also invoke any object
802 destructors which have to be invoked when we exit the scopes which
803 are exited by the goto.
804
805 Value is nonzero if a fixup is made. */
806
807 static int
808 expand_fixup (tree_label, rtl_label, last_insn)
809 tree tree_label;
810 rtx rtl_label;
811 rtx last_insn;
812 {
813 struct nesting *block, *end_block;
814
815 /* See if we can recognize which block the label will be output in.
816 This is possible in some very common cases.
817 If we succeed, set END_BLOCK to that block.
818 Otherwise, set it to 0. */
819
820 if (cond_stack
821 && (rtl_label == cond_stack->data.cond.endif_label
822 || rtl_label == cond_stack->data.cond.next_label))
823 end_block = cond_stack;
824 /* If we are in a loop, recognize certain labels which
825 are likely targets. This reduces the number of fixups
826 we need to create. */
827 else if (loop_stack
828 && (rtl_label == loop_stack->data.loop.start_label
829 || rtl_label == loop_stack->data.loop.end_label
830 || rtl_label == loop_stack->data.loop.continue_label))
831 end_block = loop_stack;
832 else
833 end_block = 0;
834
835 /* Now set END_BLOCK to the binding level to which we will return. */
836
837 if (end_block)
838 {
839 struct nesting *next_block = end_block->all;
840 block = block_stack;
841
842 /* First see if the END_BLOCK is inside the innermost binding level.
843 If so, then no cleanups or stack levels are relevant. */
844 while (next_block && next_block != block)
845 next_block = next_block->all;
846
847 if (next_block)
848 return 0;
849
850 /* Otherwise, set END_BLOCK to the innermost binding level
851 which is outside the relevant control-structure nesting. */
852 next_block = block_stack->next;
853 for (block = block_stack; block != end_block; block = block->all)
854 if (block == next_block)
855 next_block = next_block->next;
856 end_block = next_block;
857 }
858
859 /* Does any containing block have a stack level or cleanups?
860 If not, no fixup is needed, and that is the normal case
861 (the only case, for standard C). */
862 for (block = block_stack; block != end_block; block = block->next)
863 if (block->data.block.stack_level != 0
864 || block->data.block.cleanups != 0)
865 break;
866
867 if (block != end_block)
868 {
869 /* Ok, a fixup is needed. Add a fixup to the list of such. */
870 struct goto_fixup *fixup
871 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
872 /* In case an old stack level is restored, make sure that comes
873 after any pending stack adjust. */
874 /* ?? If the fixup isn't to come at the present position,
875 doing the stack adjust here isn't useful. Doing it with our
876 settings at that location isn't useful either. Let's hope
877 someone does it! */
878 if (last_insn == 0)
879 do_pending_stack_adjust ();
880 fixup->target = tree_label;
881 fixup->target_rtl = rtl_label;
882
883 /* Create a BLOCK node and a corresponding matched set of
884 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
885 this point. The notes will encapsulate any and all fixup
886 code which we might later insert at this point in the insn
887 stream. Also, the BLOCK node will be the parent (i.e. the
888 `SUPERBLOCK') of any other BLOCK nodes which we might create
889 later on when we are expanding the fixup code. */
890
891 {
892 register rtx original_before_jump
893 = last_insn ? last_insn : get_last_insn ();
894
895 start_sequence ();
896 pushlevel (0);
897 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
898 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
899 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
900 end_sequence ();
901 emit_insns_after (fixup->before_jump, original_before_jump);
902 }
903
904 fixup->block_start_count = block_start_count;
905 fixup->stack_level = 0;
906 fixup->cleanup_list_list
907 = ((block->data.block.outer_cleanups
908 || block->data.block.cleanups)
909 ? tree_cons (NULL_TREE, block->data.block.cleanups,
910 block->data.block.outer_cleanups)
911 : 0);
912 fixup->next = goto_fixup_chain;
913 goto_fixup_chain = fixup;
914 }
915
916 return block != 0;
917 }
918
919
920 \f
921 /* Expand any needed fixups in the outputmost binding level of the
922 function. FIRST_INSN is the first insn in the function. */
923
924 void
925 expand_fixups (first_insn)
926 rtx first_insn;
927 {
928 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
929 }
930
931 /* When exiting a binding contour, process all pending gotos requiring fixups.
932 THISBLOCK is the structure that describes the block being exited.
933 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
934 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
935 FIRST_INSN is the insn that began this contour.
936
937 Gotos that jump out of this contour must restore the
938 stack level and do the cleanups before actually jumping.
939
940 DONT_JUMP_IN nonzero means report error there is a jump into this
941 contour from before the beginning of the contour.
942 This is also done if STACK_LEVEL is nonzero. */
943
944 static void
945 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
946 struct nesting *thisblock;
947 rtx stack_level;
948 tree cleanup_list;
949 rtx first_insn;
950 int dont_jump_in;
951 {
952 register struct goto_fixup *f, *prev;
953
954 /* F is the fixup we are considering; PREV is the previous one. */
955 /* We run this loop in two passes so that cleanups of exited blocks
956 are run first, and blocks that are exited are marked so
957 afterwards. */
958
959 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
960 {
961 /* Test for a fixup that is inactive because it is already handled. */
962 if (f->before_jump == 0)
963 {
964 /* Delete inactive fixup from the chain, if that is easy to do. */
965 if (prev != 0)
966 prev->next = f->next;
967 }
968 /* Has this fixup's target label been defined?
969 If so, we can finalize it. */
970 else if (PREV_INSN (f->target_rtl) != 0)
971 {
972 register rtx cleanup_insns;
973
974 /* Get the first non-label after the label
975 this goto jumps to. If that's before this scope begins,
976 we don't have a jump into the scope. */
977 rtx after_label = f->target_rtl;
978 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
979 after_label = NEXT_INSN (after_label);
980
981 /* If this fixup jumped into this contour from before the beginning
982 of this contour, report an error. */
983 /* ??? Bug: this does not detect jumping in through intermediate
984 blocks that have stack levels or cleanups.
985 It detects only a problem with the innermost block
986 around the label. */
987 if (f->target != 0
988 && (dont_jump_in || stack_level || cleanup_list)
989 /* If AFTER_LABEL is 0, it means the jump goes to the end
990 of the rtl, which means it jumps into this scope. */
991 && (after_label == 0
992 || INSN_UID (first_insn) < INSN_UID (after_label))
993 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
994 && ! DECL_ERROR_ISSUED (f->target))
995 {
996 error_with_decl (f->target,
997 "label `%s' used before containing binding contour");
998 /* Prevent multiple errors for one label. */
999 DECL_ERROR_ISSUED (f->target) = 1;
1000 }
1001
1002 /* We will expand the cleanups into a sequence of their own and
1003 then later on we will attach this new sequence to the insn
1004 stream just ahead of the actual jump insn. */
1005
1006 start_sequence ();
1007
1008 /* Temporarily restore the lexical context where we will
1009 logically be inserting the fixup code. We do this for the
1010 sake of getting the debugging information right. */
1011
1012 pushlevel (0);
1013 set_block (f->context);
1014
1015 /* Expand the cleanups for blocks this jump exits. */
1016 if (f->cleanup_list_list)
1017 {
1018 tree lists;
1019 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1020 /* Marked elements correspond to blocks that have been closed.
1021 Do their cleanups. */
1022 if (TREE_ADDRESSABLE (lists)
1023 && TREE_VALUE (lists) != 0)
1024 {
1025 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1026 /* Pop any pushes done in the cleanups,
1027 in case function is about to return. */
1028 do_pending_stack_adjust ();
1029 }
1030 }
1031
1032 /* Restore stack level for the biggest contour that this
1033 jump jumps out of. */
1034 if (f->stack_level)
1035 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1036
1037 /* Finish up the sequence containing the insns which implement the
1038 necessary cleanups, and then attach that whole sequence to the
1039 insn stream just ahead of the actual jump insn. Attaching it
1040 at that point insures that any cleanups which are in fact
1041 implicit C++ object destructions (which must be executed upon
1042 leaving the block) appear (to the debugger) to be taking place
1043 in an area of the generated code where the object(s) being
1044 destructed are still "in scope". */
1045
1046 cleanup_insns = get_insns ();
1047 poplevel (1, 0, 0);
1048
1049 end_sequence ();
1050 emit_insns_after (cleanup_insns, f->before_jump);
1051
1052
1053 f->before_jump = 0;
1054 }
1055 }
1056
1057 /* For any still-undefined labels, do the cleanups for this block now.
1058 We must do this now since items in the cleanup list may go out
1059 of scope when the block ends. */
1060 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1061 if (f->before_jump != 0
1062 && PREV_INSN (f->target_rtl) == 0
1063 /* Label has still not appeared. If we are exiting a block with
1064 a stack level to restore, that started before the fixup,
1065 mark this stack level as needing restoration
1066 when the fixup is later finalized. */
1067 && thisblock != 0
1068 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1069 means the label is undefined. That's erroneous, but possible. */
1070 && (thisblock->data.block.block_start_count
1071 <= f->block_start_count))
1072 {
1073 tree lists = f->cleanup_list_list;
1074 rtx cleanup_insns;
1075
1076 for (; lists; lists = TREE_CHAIN (lists))
1077 /* If the following elt. corresponds to our containing block
1078 then the elt. must be for this block. */
1079 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1080 {
1081 start_sequence ();
1082 pushlevel (0);
1083 set_block (f->context);
1084 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1085 do_pending_stack_adjust ();
1086 cleanup_insns = get_insns ();
1087 poplevel (1, 0, 0);
1088 end_sequence ();
1089 if (cleanup_insns != 0)
1090 f->before_jump
1091 = emit_insns_after (cleanup_insns, f->before_jump);
1092
1093 f->cleanup_list_list = TREE_CHAIN (lists);
1094 }
1095
1096 if (stack_level)
1097 f->stack_level = stack_level;
1098 }
1099 }
1100
1101
1102 \f
1103 /* Generate RTL for an asm statement (explicit assembler code).
1104 BODY is a STRING_CST node containing the assembler code text,
1105 or an ADDR_EXPR containing a STRING_CST. */
1106
1107 void
1108 expand_asm (body)
1109 tree body;
1110 {
1111 if (flag_check_memory_usage)
1112 {
1113 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1114 return;
1115 }
1116
1117 if (TREE_CODE (body) == ADDR_EXPR)
1118 body = TREE_OPERAND (body, 0);
1119
1120 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1121 TREE_STRING_POINTER (body)));
1122 last_expr_type = 0;
1123 }
1124
1125 /* Generate RTL for an asm statement with arguments.
1126 STRING is the instruction template.
1127 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1128 Each output or input has an expression in the TREE_VALUE and
1129 a constraint-string in the TREE_PURPOSE.
1130 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1131 that is clobbered by this insn.
1132
1133 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1134 Some elements of OUTPUTS may be replaced with trees representing temporary
1135 values. The caller should copy those temporary values to the originally
1136 specified lvalues.
1137
1138 VOL nonzero means the insn is volatile; don't optimize it. */
1139
1140 void
1141 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1142 tree string, outputs, inputs, clobbers;
1143 int vol;
1144 char *filename;
1145 int line;
1146 {
1147 rtvec argvec, constraints;
1148 rtx body;
1149 int ninputs = list_length (inputs);
1150 int noutputs = list_length (outputs);
1151 int ninout = 0;
1152 int nclobbers;
1153 tree tail;
1154 register int i;
1155 /* Vector of RTX's of evaluated output operands. */
1156 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1157 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1158 enum machine_mode *inout_mode
1159 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1160 /* The insn we have emitted. */
1161 rtx insn;
1162
1163 /* An ASM with no outputs needs to be treated as volatile, for now. */
1164 if (noutputs == 0)
1165 vol = 1;
1166
1167 if (flag_check_memory_usage)
1168 {
1169 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1170 return;
1171 }
1172
1173 /* Count the number of meaningful clobbered registers, ignoring what
1174 we would ignore later. */
1175 nclobbers = 0;
1176 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1177 {
1178 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1179 i = decode_reg_name (regname);
1180 if (i >= 0 || i == -4)
1181 ++nclobbers;
1182 else if (i == -2)
1183 error ("unknown register name `%s' in `asm'", regname);
1184 }
1185
1186 last_expr_type = 0;
1187
1188 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1189 {
1190 tree val = TREE_VALUE (tail);
1191 tree type = TREE_TYPE (val);
1192 int j;
1193 int found_equal = 0;
1194 int found_plus = 0;
1195 int allows_reg = 0;
1196
1197 /* If there's an erroneous arg, emit no insn. */
1198 if (TREE_TYPE (val) == error_mark_node)
1199 return;
1200
1201 /* Make sure constraint has `=' and does not have `+'. Also, see
1202 if it allows any register. Be liberal on the latter test, since
1203 the worst that happens if we get it wrong is we issue an error
1204 message. */
1205
1206 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1207 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1208 {
1209 case '+':
1210 /* Make sure we can specify the matching operand. */
1211 if (i > 9)
1212 {
1213 error ("output operand constraint %d contains `+'", i);
1214 return;
1215 }
1216
1217 /* Replace '+' with '='. */
1218 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] = '=';
1219 found_plus = 1;
1220 break;
1221
1222 case '=':
1223 found_equal = 1;
1224 break;
1225
1226 case '?': case '!': case '*': case '%': case '&':
1227 case 'V': case 'm': case 'o': case '<': case '>':
1228 case 'E': case 'F': case 'G': case 'H': case 'X':
1229 case 's': case 'i': case 'n':
1230 case 'I': case 'J': case 'K': case 'L': case 'M':
1231 case 'N': case 'O': case 'P': case ',':
1232 #ifdef EXTRA_CONSTRAINT
1233 case 'Q': case 'R': case 'S': case 'T': case 'U':
1234 #endif
1235 break;
1236
1237 case '0': case '1': case '2': case '3': case '4':
1238 case '5': case '6': case '7': case '8': case '9':
1239 error ("matching constraint not valid in output operand");
1240 break;
1241
1242 case 'p': case 'g': case 'r':
1243 default:
1244 allows_reg = 1;
1245 break;
1246 }
1247
1248 if (! found_equal && ! found_plus)
1249 {
1250 error ("output operand constraint lacks `='");
1251 return;
1252 }
1253
1254 /* If an output operand is not a decl or indirect ref and our constraint
1255 allows a register, make a temporary to act as an intermediate.
1256 Make the asm insn write into that, then our caller will copy it to
1257 the real output operand. Likewise for promoted variables. */
1258
1259 if (TREE_CODE (val) == INDIRECT_REF
1260 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1261 && ! (GET_CODE (DECL_RTL (val)) == REG
1262 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1263 || ! allows_reg
1264 || found_plus)
1265 {
1266 if (! allows_reg)
1267 mark_addressable (TREE_VALUE (tail));
1268
1269 output_rtx[i]
1270 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1271 EXPAND_MEMORY_USE_WO);
1272
1273 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1274 error ("output number %d not directly addressable", i);
1275 }
1276 else
1277 {
1278 output_rtx[i] = assign_temp (type, 0, 0, 0);
1279 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1280 }
1281
1282 if (found_plus)
1283 {
1284 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1285 inout_opnum[ninout++] = i;
1286 }
1287 }
1288
1289 ninputs += ninout;
1290 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1291 {
1292 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1293 return;
1294 }
1295
1296 /* Make vectors for the expression-rtx and constraint strings. */
1297
1298 argvec = rtvec_alloc (ninputs);
1299 constraints = rtvec_alloc (ninputs);
1300
1301 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1302 TREE_STRING_POINTER (string), "", 0, argvec,
1303 constraints, filename, line);
1304
1305 MEM_VOLATILE_P (body) = vol;
1306
1307 /* Eval the inputs and put them into ARGVEC.
1308 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1309
1310 i = 0;
1311 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1312 {
1313 int j;
1314 int allows_reg = 0;
1315
1316 /* If there's an erroneous arg, emit no insn,
1317 because the ASM_INPUT would get VOIDmode
1318 and that could cause a crash in reload. */
1319 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1320 return;
1321 if (TREE_PURPOSE (tail) == NULL_TREE)
1322 {
1323 error ("hard register `%s' listed as input operand to `asm'",
1324 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1325 return;
1326 }
1327
1328 /* Make sure constraint has neither `=' nor `+'. */
1329
1330 for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1; j++)
1331 switch (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j])
1332 {
1333 case '+': case '=':
1334 error ("input operand constraint contains `%c'",
1335 TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]);
1336 return;
1337
1338 case '?': case '!': case '*': case '%': case '&':
1339 case 'V': case 'm': case 'o': case '<': case '>':
1340 case 'E': case 'F': case 'G': case 'H': case 'X':
1341 case 's': case 'i': case 'n':
1342 case 'I': case 'J': case 'K': case 'L': case 'M':
1343 case 'N': case 'O': case 'P': case ',':
1344 #ifdef EXTRA_CONSTRAINT
1345 case 'Q': case 'R': case 'S': case 'T': case 'U':
1346 #endif
1347 break;
1348
1349 /* Whether or not a numeric constraint allows a register is
1350 decided by the matching constraint, and so there is no need
1351 to do anything special with them. We must handle them in
1352 the default case, so that we don't unnecessarily force
1353 operands to memory. */
1354 case '0': case '1': case '2': case '3': case '4':
1355 case '5': case '6': case '7': case '8': case '9':
1356 if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]
1357 >= '0' + noutputs)
1358 {
1359 error
1360 ("matching constraint references invalid operand number");
1361 return;
1362 }
1363
1364 /* ... fall through ... */
1365
1366 case 'p': case 'g': case 'r':
1367 default:
1368 allows_reg = 1;
1369 break;
1370 }
1371
1372 if (! allows_reg)
1373 mark_addressable (TREE_VALUE (tail));
1374
1375 XVECEXP (body, 3, i) /* argvec */
1376 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1377 if (CONSTANT_P (XVECEXP (body, 3, i))
1378 && ! general_operand (XVECEXP (body, 3, i),
1379 TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)))))
1380 {
1381 if (allows_reg)
1382 XVECEXP (body, 3, i)
1383 = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1384 XVECEXP (body, 3, i));
1385 else
1386 XVECEXP (body, 3, i)
1387 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1388 XVECEXP (body, 3, i));
1389 }
1390
1391 if (! allows_reg
1392 && (GET_CODE (XVECEXP (body, 3, i)) == REG
1393 || GET_CODE (XVECEXP (body, 3, i)) == SUBREG
1394 || GET_CODE (XVECEXP (body, 3, i)) == CONCAT))
1395 {
1396 tree type = TREE_TYPE (TREE_VALUE (tail));
1397 rtx memloc = assign_temp (type, 1, 1, 1);
1398
1399 emit_move_insn (memloc, XVECEXP (body, 3, i));
1400 XVECEXP (body, 3, i) = memloc;
1401 }
1402
1403 XVECEXP (body, 4, i) /* constraints */
1404 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1405 TREE_STRING_POINTER (TREE_PURPOSE (tail)));
1406 i++;
1407 }
1408
1409 /* Protect all the operands from the queue,
1410 now that they have all been evaluated. */
1411
1412 for (i = 0; i < ninputs - ninout; i++)
1413 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1414
1415 for (i = 0; i < noutputs; i++)
1416 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1417
1418 /* For in-out operands, copy output rtx to input rtx. */
1419 for (i = 0; i < ninout; i++)
1420 {
1421 static char match[9+1][2]
1422 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1423 int j = inout_opnum[i];
1424
1425 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1426 = output_rtx[j];
1427 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1428 = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1429 }
1430
1431 /* Now, for each output, construct an rtx
1432 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1433 ARGVEC CONSTRAINTS))
1434 If there is more than one, put them inside a PARALLEL. */
1435
1436 if (noutputs == 1 && nclobbers == 0)
1437 {
1438 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1439 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1440 }
1441 else if (noutputs == 0 && nclobbers == 0)
1442 {
1443 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1444 insn = emit_insn (body);
1445 }
1446 else
1447 {
1448 rtx obody = body;
1449 int num = noutputs;
1450 if (num == 0) num = 1;
1451 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1452
1453 /* For each output operand, store a SET. */
1454
1455 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1456 {
1457 XVECEXP (body, 0, i)
1458 = gen_rtx_SET (VOIDmode,
1459 output_rtx[i],
1460 gen_rtx_ASM_OPERANDS (VOIDmode,
1461 TREE_STRING_POINTER (string),
1462 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1463 i, argvec, constraints,
1464 filename, line));
1465 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1466 }
1467
1468 /* If there are no outputs (but there are some clobbers)
1469 store the bare ASM_OPERANDS into the PARALLEL. */
1470
1471 if (i == 0)
1472 XVECEXP (body, 0, i++) = obody;
1473
1474 /* Store (clobber REG) for each clobbered register specified. */
1475
1476 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1477 {
1478 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1479 int j = decode_reg_name (regname);
1480
1481 if (j < 0)
1482 {
1483 if (j == -3) /* `cc', which is not a register */
1484 continue;
1485
1486 if (j == -4) /* `memory', don't cache memory across asm */
1487 {
1488 XVECEXP (body, 0, i++)
1489 = gen_rtx_CLOBBER (VOIDmode,
1490 gen_rtx_MEM (BLKmode,
1491 gen_rtx_SCRATCH (VOIDmode)));
1492 continue;
1493 }
1494
1495 /* Ignore unknown register, error already signaled. */
1496 continue;
1497 }
1498
1499 /* Use QImode since that's guaranteed to clobber just one reg. */
1500 XVECEXP (body, 0, i++)
1501 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1502 }
1503
1504 insn = emit_insn (body);
1505 }
1506
1507 free_temp_slots ();
1508 }
1509 \f
1510 /* Generate RTL to evaluate the expression EXP
1511 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1512
1513 void
1514 expand_expr_stmt (exp)
1515 tree exp;
1516 {
1517 /* If -W, warn about statements with no side effects,
1518 except for an explicit cast to void (e.g. for assert()), and
1519 except inside a ({...}) where they may be useful. */
1520 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1521 {
1522 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1523 && !(TREE_CODE (exp) == CONVERT_EXPR
1524 && TREE_TYPE (exp) == void_type_node))
1525 warning_with_file_and_line (emit_filename, emit_lineno,
1526 "statement with no effect");
1527 else if (warn_unused)
1528 warn_if_unused_value (exp);
1529 }
1530
1531 /* If EXP is of function type and we are expanding statements for
1532 value, convert it to pointer-to-function. */
1533 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1534 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1535
1536 last_expr_type = TREE_TYPE (exp);
1537 if (! flag_syntax_only || expr_stmts_for_value)
1538 last_expr_value = expand_expr (exp,
1539 (expr_stmts_for_value
1540 ? NULL_RTX : const0_rtx),
1541 VOIDmode, 0);
1542
1543 /* If all we do is reference a volatile value in memory,
1544 copy it to a register to be sure it is actually touched. */
1545 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1546 && TREE_THIS_VOLATILE (exp))
1547 {
1548 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1549 ;
1550 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1551 copy_to_reg (last_expr_value);
1552 else
1553 {
1554 rtx lab = gen_label_rtx ();
1555
1556 /* Compare the value with itself to reference it. */
1557 emit_cmp_insn (last_expr_value, last_expr_value, EQ,
1558 expand_expr (TYPE_SIZE (last_expr_type),
1559 NULL_RTX, VOIDmode, 0),
1560 BLKmode, 0,
1561 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
1562 emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
1563 emit_label (lab);
1564 }
1565 }
1566
1567 /* If this expression is part of a ({...}) and is in memory, we may have
1568 to preserve temporaries. */
1569 preserve_temp_slots (last_expr_value);
1570
1571 /* Free any temporaries used to evaluate this expression. Any temporary
1572 used as a result of this expression will already have been preserved
1573 above. */
1574 free_temp_slots ();
1575
1576 emit_queue ();
1577 }
1578
1579 /* Warn if EXP contains any computations whose results are not used.
1580 Return 1 if a warning is printed; 0 otherwise. */
1581
1582 int
1583 warn_if_unused_value (exp)
1584 tree exp;
1585 {
1586 if (TREE_USED (exp))
1587 return 0;
1588
1589 switch (TREE_CODE (exp))
1590 {
1591 case PREINCREMENT_EXPR:
1592 case POSTINCREMENT_EXPR:
1593 case PREDECREMENT_EXPR:
1594 case POSTDECREMENT_EXPR:
1595 case MODIFY_EXPR:
1596 case INIT_EXPR:
1597 case TARGET_EXPR:
1598 case CALL_EXPR:
1599 case METHOD_CALL_EXPR:
1600 case RTL_EXPR:
1601 case TRY_CATCH_EXPR:
1602 case WITH_CLEANUP_EXPR:
1603 case EXIT_EXPR:
1604 /* We don't warn about COND_EXPR because it may be a useful
1605 construct if either arm contains a side effect. */
1606 case COND_EXPR:
1607 return 0;
1608
1609 case BIND_EXPR:
1610 /* For a binding, warn if no side effect within it. */
1611 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1612
1613 case SAVE_EXPR:
1614 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1615
1616 case TRUTH_ORIF_EXPR:
1617 case TRUTH_ANDIF_EXPR:
1618 /* In && or ||, warn if 2nd operand has no side effect. */
1619 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1620
1621 case COMPOUND_EXPR:
1622 if (TREE_NO_UNUSED_WARNING (exp))
1623 return 0;
1624 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1625 return 1;
1626 /* Let people do `(foo (), 0)' without a warning. */
1627 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1628 return 0;
1629 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1630
1631 case NOP_EXPR:
1632 case CONVERT_EXPR:
1633 case NON_LVALUE_EXPR:
1634 /* Don't warn about values cast to void. */
1635 if (TREE_TYPE (exp) == void_type_node)
1636 return 0;
1637 /* Don't warn about conversions not explicit in the user's program. */
1638 if (TREE_NO_UNUSED_WARNING (exp))
1639 return 0;
1640 /* Assignment to a cast usually results in a cast of a modify.
1641 Don't complain about that. There can be an arbitrary number of
1642 casts before the modify, so we must loop until we find the first
1643 non-cast expression and then test to see if that is a modify. */
1644 {
1645 tree tem = TREE_OPERAND (exp, 0);
1646
1647 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1648 tem = TREE_OPERAND (tem, 0);
1649
1650 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1651 || TREE_CODE (tem) == CALL_EXPR)
1652 return 0;
1653 }
1654 goto warn;
1655
1656 case INDIRECT_REF:
1657 /* Don't warn about automatic dereferencing of references, since
1658 the user cannot control it. */
1659 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1660 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1661 /* ... fall through ... */
1662
1663 default:
1664 /* Referencing a volatile value is a side effect, so don't warn. */
1665 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1666 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1667 && TREE_THIS_VOLATILE (exp))
1668 return 0;
1669 warn:
1670 warning_with_file_and_line (emit_filename, emit_lineno,
1671 "value computed is not used");
1672 return 1;
1673 }
1674 }
1675
1676 /* Clear out the memory of the last expression evaluated. */
1677
1678 void
1679 clear_last_expr ()
1680 {
1681 last_expr_type = 0;
1682 }
1683
1684 /* Begin a statement which will return a value.
1685 Return the RTL_EXPR for this statement expr.
1686 The caller must save that value and pass it to expand_end_stmt_expr. */
1687
1688 tree
1689 expand_start_stmt_expr ()
1690 {
1691 int momentary;
1692 tree t;
1693
1694 /* Make the RTL_EXPR node temporary, not momentary,
1695 so that rtl_expr_chain doesn't become garbage. */
1696 momentary = suspend_momentary ();
1697 t = make_node (RTL_EXPR);
1698 resume_momentary (momentary);
1699 do_pending_stack_adjust ();
1700 start_sequence_for_rtl_expr (t);
1701 NO_DEFER_POP;
1702 expr_stmts_for_value++;
1703 return t;
1704 }
1705
1706 /* Restore the previous state at the end of a statement that returns a value.
1707 Returns a tree node representing the statement's value and the
1708 insns to compute the value.
1709
1710 The nodes of that expression have been freed by now, so we cannot use them.
1711 But we don't want to do that anyway; the expression has already been
1712 evaluated and now we just want to use the value. So generate a RTL_EXPR
1713 with the proper type and RTL value.
1714
1715 If the last substatement was not an expression,
1716 return something with type `void'. */
1717
1718 tree
1719 expand_end_stmt_expr (t)
1720 tree t;
1721 {
1722 OK_DEFER_POP;
1723
1724 if (last_expr_type == 0)
1725 {
1726 last_expr_type = void_type_node;
1727 last_expr_value = const0_rtx;
1728 }
1729 else if (last_expr_value == 0)
1730 /* There are some cases where this can happen, such as when the
1731 statement is void type. */
1732 last_expr_value = const0_rtx;
1733 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1734 /* Remove any possible QUEUED. */
1735 last_expr_value = protect_from_queue (last_expr_value, 0);
1736
1737 emit_queue ();
1738
1739 TREE_TYPE (t) = last_expr_type;
1740 RTL_EXPR_RTL (t) = last_expr_value;
1741 RTL_EXPR_SEQUENCE (t) = get_insns ();
1742
1743 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1744
1745 end_sequence ();
1746
1747 /* Don't consider deleting this expr or containing exprs at tree level. */
1748 TREE_SIDE_EFFECTS (t) = 1;
1749 /* Propagate volatility of the actual RTL expr. */
1750 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1751
1752 last_expr_type = 0;
1753 expr_stmts_for_value--;
1754
1755 return t;
1756 }
1757 \f
1758 /* Generate RTL for the start of an if-then. COND is the expression
1759 whose truth should be tested.
1760
1761 If EXITFLAG is nonzero, this conditional is visible to
1762 `exit_something'. */
1763
1764 void
1765 expand_start_cond (cond, exitflag)
1766 tree cond;
1767 int exitflag;
1768 {
1769 struct nesting *thiscond = ALLOC_NESTING ();
1770
1771 /* Make an entry on cond_stack for the cond we are entering. */
1772
1773 thiscond->next = cond_stack;
1774 thiscond->all = nesting_stack;
1775 thiscond->depth = ++nesting_depth;
1776 thiscond->data.cond.next_label = gen_label_rtx ();
1777 /* Before we encounter an `else', we don't need a separate exit label
1778 unless there are supposed to be exit statements
1779 to exit this conditional. */
1780 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1781 thiscond->data.cond.endif_label = thiscond->exit_label;
1782 cond_stack = thiscond;
1783 nesting_stack = thiscond;
1784
1785 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1786 }
1787
1788 /* Generate RTL between then-clause and the elseif-clause
1789 of an if-then-elseif-.... */
1790
1791 void
1792 expand_start_elseif (cond)
1793 tree cond;
1794 {
1795 if (cond_stack->data.cond.endif_label == 0)
1796 cond_stack->data.cond.endif_label = gen_label_rtx ();
1797 emit_jump (cond_stack->data.cond.endif_label);
1798 emit_label (cond_stack->data.cond.next_label);
1799 cond_stack->data.cond.next_label = gen_label_rtx ();
1800 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1801 }
1802
1803 /* Generate RTL between the then-clause and the else-clause
1804 of an if-then-else. */
1805
1806 void
1807 expand_start_else ()
1808 {
1809 if (cond_stack->data.cond.endif_label == 0)
1810 cond_stack->data.cond.endif_label = gen_label_rtx ();
1811
1812 emit_jump (cond_stack->data.cond.endif_label);
1813 emit_label (cond_stack->data.cond.next_label);
1814 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1815 }
1816
1817 /* After calling expand_start_else, turn this "else" into an "else if"
1818 by providing another condition. */
1819
1820 void
1821 expand_elseif (cond)
1822 tree cond;
1823 {
1824 cond_stack->data.cond.next_label = gen_label_rtx ();
1825 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1826 }
1827
1828 /* Generate RTL for the end of an if-then.
1829 Pop the record for it off of cond_stack. */
1830
1831 void
1832 expand_end_cond ()
1833 {
1834 struct nesting *thiscond = cond_stack;
1835
1836 do_pending_stack_adjust ();
1837 if (thiscond->data.cond.next_label)
1838 emit_label (thiscond->data.cond.next_label);
1839 if (thiscond->data.cond.endif_label)
1840 emit_label (thiscond->data.cond.endif_label);
1841
1842 POPSTACK (cond_stack);
1843 last_expr_type = 0;
1844 }
1845
1846
1847 \f
1848 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1849 loop should be exited by `exit_something'. This is a loop for which
1850 `expand_continue' will jump to the top of the loop.
1851
1852 Make an entry on loop_stack to record the labels associated with
1853 this loop. */
1854
1855 struct nesting *
1856 expand_start_loop (exit_flag)
1857 int exit_flag;
1858 {
1859 register struct nesting *thisloop = ALLOC_NESTING ();
1860
1861 /* Make an entry on loop_stack for the loop we are entering. */
1862
1863 thisloop->next = loop_stack;
1864 thisloop->all = nesting_stack;
1865 thisloop->depth = ++nesting_depth;
1866 thisloop->data.loop.start_label = gen_label_rtx ();
1867 thisloop->data.loop.end_label = gen_label_rtx ();
1868 thisloop->data.loop.alt_end_label = 0;
1869 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
1870 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
1871 loop_stack = thisloop;
1872 nesting_stack = thisloop;
1873
1874 do_pending_stack_adjust ();
1875 emit_queue ();
1876 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1877 emit_label (thisloop->data.loop.start_label);
1878
1879 return thisloop;
1880 }
1881
1882 /* Like expand_start_loop but for a loop where the continuation point
1883 (for expand_continue_loop) will be specified explicitly. */
1884
1885 struct nesting *
1886 expand_start_loop_continue_elsewhere (exit_flag)
1887 int exit_flag;
1888 {
1889 struct nesting *thisloop = expand_start_loop (exit_flag);
1890 loop_stack->data.loop.continue_label = gen_label_rtx ();
1891 return thisloop;
1892 }
1893
1894 /* Specify the continuation point for a loop started with
1895 expand_start_loop_continue_elsewhere.
1896 Use this at the point in the code to which a continue statement
1897 should jump. */
1898
1899 void
1900 expand_loop_continue_here ()
1901 {
1902 do_pending_stack_adjust ();
1903 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1904 emit_label (loop_stack->data.loop.continue_label);
1905 }
1906
1907 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1908 Pop the block off of loop_stack. */
1909
1910 void
1911 expand_end_loop ()
1912 {
1913 register rtx insn;
1914 register rtx start_label;
1915 rtx last_test_insn = 0;
1916 int num_insns = 0;
1917
1918 insn = get_last_insn ();
1919 start_label = loop_stack->data.loop.start_label;
1920
1921 /* Mark the continue-point at the top of the loop if none elsewhere. */
1922 if (start_label == loop_stack->data.loop.continue_label)
1923 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
1924
1925 do_pending_stack_adjust ();
1926
1927 /* If optimizing, perhaps reorder the loop. If the loop
1928 starts with a conditional exit, roll that to the end
1929 where it will optimize together with the jump back.
1930
1931 We look for the last conditional branch to the exit that we encounter
1932 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1933 branch to the exit first, use it.
1934
1935 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1936 because moving them is not valid. */
1937
1938 if (optimize
1939 &&
1940 ! (GET_CODE (insn) == JUMP_INSN
1941 && GET_CODE (PATTERN (insn)) == SET
1942 && SET_DEST (PATTERN (insn)) == pc_rtx
1943 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
1944 {
1945 /* Scan insns from the top of the loop looking for a qualified
1946 conditional exit. */
1947 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
1948 insn = NEXT_INSN (insn))
1949 {
1950 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL)
1951 break;
1952
1953 if (GET_CODE (insn) == NOTE
1954 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1955 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1956 break;
1957
1958 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
1959 num_insns++;
1960
1961 if (last_test_insn && num_insns > 30)
1962 break;
1963
1964 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET
1965 && SET_DEST (PATTERN (insn)) == pc_rtx
1966 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE
1967 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF
1968 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1969 == loop_stack->data.loop.end_label)
1970 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0)
1971 == loop_stack->data.loop.alt_end_label)))
1972 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF
1973 && ((XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1974 == loop_stack->data.loop.end_label)
1975 || (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0)
1976 == loop_stack->data.loop.alt_end_label)))))
1977 last_test_insn = insn;
1978
1979 if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN
1980 && GET_CODE (PATTERN (insn)) == SET
1981 && SET_DEST (PATTERN (insn)) == pc_rtx
1982 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF
1983 && ((XEXP (SET_SRC (PATTERN (insn)), 0)
1984 == loop_stack->data.loop.end_label)
1985 || (XEXP (SET_SRC (PATTERN (insn)), 0)
1986 == loop_stack->data.loop.alt_end_label)))
1987 /* Include BARRIER. */
1988 last_test_insn = NEXT_INSN (insn);
1989 }
1990
1991 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
1992 {
1993 /* We found one. Move everything from there up
1994 to the end of the loop, and add a jump into the loop
1995 to jump to there. */
1996 register rtx newstart_label = gen_label_rtx ();
1997 register rtx start_move = start_label;
1998
1999 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2000 then we want to move this note also. */
2001 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2002 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2003 == NOTE_INSN_LOOP_CONT))
2004 start_move = PREV_INSN (start_move);
2005
2006 emit_label_after (newstart_label, PREV_INSN (start_move));
2007 reorder_insns (start_move, last_test_insn, get_last_insn ());
2008 emit_jump_insn_after (gen_jump (start_label),
2009 PREV_INSN (newstart_label));
2010 emit_barrier_after (PREV_INSN (newstart_label));
2011 start_label = newstart_label;
2012 }
2013 }
2014
2015 emit_jump (start_label);
2016 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2017 emit_label (loop_stack->data.loop.end_label);
2018
2019 POPSTACK (loop_stack);
2020
2021 last_expr_type = 0;
2022 }
2023
2024 /* Generate a jump to the current loop's continue-point.
2025 This is usually the top of the loop, but may be specified
2026 explicitly elsewhere. If not currently inside a loop,
2027 return 0 and do nothing; caller will print an error message. */
2028
2029 int
2030 expand_continue_loop (whichloop)
2031 struct nesting *whichloop;
2032 {
2033 last_expr_type = 0;
2034 if (whichloop == 0)
2035 whichloop = loop_stack;
2036 if (whichloop == 0)
2037 return 0;
2038 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2039 NULL_RTX);
2040 return 1;
2041 }
2042
2043 /* Generate a jump to exit the current loop. If not currently inside a loop,
2044 return 0 and do nothing; caller will print an error message. */
2045
2046 int
2047 expand_exit_loop (whichloop)
2048 struct nesting *whichloop;
2049 {
2050 last_expr_type = 0;
2051 if (whichloop == 0)
2052 whichloop = loop_stack;
2053 if (whichloop == 0)
2054 return 0;
2055 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2056 return 1;
2057 }
2058
2059 /* Generate a conditional jump to exit the current loop if COND
2060 evaluates to zero. If not currently inside a loop,
2061 return 0 and do nothing; caller will print an error message. */
2062
2063 int
2064 expand_exit_loop_if_false (whichloop, cond)
2065 struct nesting *whichloop;
2066 tree cond;
2067 {
2068 rtx label = gen_label_rtx ();
2069 rtx last_insn;
2070 last_expr_type = 0;
2071
2072 if (whichloop == 0)
2073 whichloop = loop_stack;
2074 if (whichloop == 0)
2075 return 0;
2076 /* In order to handle fixups, we actually create a conditional jump
2077 around a unconditional branch to exit the loop. If fixups are
2078 necessary, they go before the unconditional branch. */
2079
2080
2081 do_jump (cond, NULL_RTX, label);
2082 last_insn = get_last_insn ();
2083 if (GET_CODE (last_insn) == CODE_LABEL)
2084 whichloop->data.loop.alt_end_label = last_insn;
2085 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2086 NULL_RTX);
2087 emit_label (label);
2088
2089 return 1;
2090 }
2091
2092 /* Return non-zero if we should preserve sub-expressions as separate
2093 pseudos. We never do so if we aren't optimizing. We always do so
2094 if -fexpensive-optimizations.
2095
2096 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2097 the loop may still be a small one. */
2098
2099 int
2100 preserve_subexpressions_p ()
2101 {
2102 rtx insn;
2103
2104 if (flag_expensive_optimizations)
2105 return 1;
2106
2107 if (optimize == 0 || loop_stack == 0)
2108 return 0;
2109
2110 insn = get_last_insn_anywhere ();
2111
2112 return (insn
2113 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2114 < n_non_fixed_regs * 3));
2115
2116 }
2117
2118 /* Generate a jump to exit the current loop, conditional, binding contour
2119 or case statement. Not all such constructs are visible to this function,
2120 only those started with EXIT_FLAG nonzero. Individual languages use
2121 the EXIT_FLAG parameter to control which kinds of constructs you can
2122 exit this way.
2123
2124 If not currently inside anything that can be exited,
2125 return 0 and do nothing; caller will print an error message. */
2126
2127 int
2128 expand_exit_something ()
2129 {
2130 struct nesting *n;
2131 last_expr_type = 0;
2132 for (n = nesting_stack; n; n = n->all)
2133 if (n->exit_label != 0)
2134 {
2135 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2136 return 1;
2137 }
2138
2139 return 0;
2140 }
2141 \f
2142 /* Generate RTL to return from the current function, with no value.
2143 (That is, we do not do anything about returning any value.) */
2144
2145 void
2146 expand_null_return ()
2147 {
2148 struct nesting *block = block_stack;
2149 rtx last_insn = 0;
2150
2151 /* Does any pending block have cleanups? */
2152
2153 while (block && block->data.block.cleanups == 0)
2154 block = block->next;
2155
2156 /* If yes, use a goto to return, since that runs cleanups. */
2157
2158 expand_null_return_1 (last_insn, block != 0);
2159 }
2160
2161 /* Generate RTL to return from the current function, with value VAL. */
2162
2163 static void
2164 expand_value_return (val)
2165 rtx val;
2166 {
2167 struct nesting *block = block_stack;
2168 rtx last_insn = get_last_insn ();
2169 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2170
2171 /* Copy the value to the return location
2172 unless it's already there. */
2173
2174 if (return_reg != val)
2175 {
2176 #ifdef PROMOTE_FUNCTION_RETURN
2177 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2178 int unsignedp = TREE_UNSIGNED (type);
2179 enum machine_mode mode
2180 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2181 &unsignedp, 1);
2182
2183 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2184 convert_move (return_reg, val, unsignedp);
2185 else
2186 #endif
2187 emit_move_insn (return_reg, val);
2188 }
2189 if (GET_CODE (return_reg) == REG
2190 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2191 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2192 /* Handle calls that return values in multiple non-contiguous locations.
2193 The Irix 6 ABI has examples of this. */
2194 else if (GET_CODE (return_reg) == PARALLEL)
2195 {
2196 int i;
2197
2198 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2199 {
2200 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2201
2202 if (GET_CODE (x) == REG
2203 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2204 emit_insn (gen_rtx_USE (VOIDmode, x));
2205 }
2206 }
2207
2208 /* Does any pending block have cleanups? */
2209
2210 while (block && block->data.block.cleanups == 0)
2211 block = block->next;
2212
2213 /* If yes, use a goto to return, since that runs cleanups.
2214 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2215
2216 expand_null_return_1 (last_insn, block != 0);
2217 }
2218
2219 /* Output a return with no value. If LAST_INSN is nonzero,
2220 pretend that the return takes place after LAST_INSN.
2221 If USE_GOTO is nonzero then don't use a return instruction;
2222 go to the return label instead. This causes any cleanups
2223 of pending blocks to be executed normally. */
2224
2225 static void
2226 expand_null_return_1 (last_insn, use_goto)
2227 rtx last_insn;
2228 int use_goto;
2229 {
2230 rtx end_label = cleanup_label ? cleanup_label : return_label;
2231
2232 clear_pending_stack_adjust ();
2233 do_pending_stack_adjust ();
2234 last_expr_type = 0;
2235
2236 /* PCC-struct return always uses an epilogue. */
2237 if (current_function_returns_pcc_struct || use_goto)
2238 {
2239 if (end_label == 0)
2240 end_label = return_label = gen_label_rtx ();
2241 expand_goto_internal (NULL_TREE, end_label, last_insn);
2242 return;
2243 }
2244
2245 /* Otherwise output a simple return-insn if one is available,
2246 unless it won't do the job. */
2247 #ifdef HAVE_return
2248 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2249 {
2250 emit_jump_insn (gen_return ());
2251 emit_barrier ();
2252 return;
2253 }
2254 #endif
2255
2256 /* Otherwise jump to the epilogue. */
2257 expand_goto_internal (NULL_TREE, end_label, last_insn);
2258 }
2259 \f
2260 /* Generate RTL to evaluate the expression RETVAL and return it
2261 from the current function. */
2262
2263 void
2264 expand_return (retval)
2265 tree retval;
2266 {
2267 /* If there are any cleanups to be performed, then they will
2268 be inserted following LAST_INSN. It is desirable
2269 that the last_insn, for such purposes, should be the
2270 last insn before computing the return value. Otherwise, cleanups
2271 which call functions can clobber the return value. */
2272 /* ??? rms: I think that is erroneous, because in C++ it would
2273 run destructors on variables that might be used in the subsequent
2274 computation of the return value. */
2275 rtx last_insn = 0;
2276 register rtx val = 0;
2277 register rtx op0;
2278 tree retval_rhs;
2279 int cleanups;
2280
2281 /* If function wants no value, give it none. */
2282 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2283 {
2284 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2285 emit_queue ();
2286 expand_null_return ();
2287 return;
2288 }
2289
2290 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2291 /* This is not sufficient. We also need to watch for cleanups of the
2292 expression we are about to expand. Unfortunately, we cannot know
2293 if it has cleanups until we expand it, and we want to change how we
2294 expand it depending upon if we need cleanups. We can't win. */
2295 #if 0
2296 cleanups = any_pending_cleanups (1);
2297 #else
2298 cleanups = 1;
2299 #endif
2300
2301 if (TREE_CODE (retval) == RESULT_DECL)
2302 retval_rhs = retval;
2303 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2304 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2305 retval_rhs = TREE_OPERAND (retval, 1);
2306 else if (TREE_TYPE (retval) == void_type_node)
2307 /* Recognize tail-recursive call to void function. */
2308 retval_rhs = retval;
2309 else
2310 retval_rhs = NULL_TREE;
2311
2312 /* Only use `last_insn' if there are cleanups which must be run. */
2313 if (cleanups || cleanup_label != 0)
2314 last_insn = get_last_insn ();
2315
2316 /* Distribute return down conditional expr if either of the sides
2317 may involve tail recursion (see test below). This enhances the number
2318 of tail recursions we see. Don't do this always since it can produce
2319 sub-optimal code in some cases and we distribute assignments into
2320 conditional expressions when it would help. */
2321
2322 if (optimize && retval_rhs != 0
2323 && frame_offset == 0
2324 && TREE_CODE (retval_rhs) == COND_EXPR
2325 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2326 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2327 {
2328 rtx label = gen_label_rtx ();
2329 tree expr;
2330
2331 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2332 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2333 DECL_RESULT (current_function_decl),
2334 TREE_OPERAND (retval_rhs, 1));
2335 TREE_SIDE_EFFECTS (expr) = 1;
2336 expand_return (expr);
2337 emit_label (label);
2338
2339 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2340 DECL_RESULT (current_function_decl),
2341 TREE_OPERAND (retval_rhs, 2));
2342 TREE_SIDE_EFFECTS (expr) = 1;
2343 expand_return (expr);
2344 return;
2345 }
2346
2347 /* For tail-recursive call to current function,
2348 just jump back to the beginning.
2349 It's unsafe if any auto variable in this function
2350 has its address taken; for simplicity,
2351 require stack frame to be empty. */
2352 if (optimize && retval_rhs != 0
2353 && frame_offset == 0
2354 && TREE_CODE (retval_rhs) == CALL_EXPR
2355 && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR
2356 && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl
2357 /* Finish checking validity, and if valid emit code
2358 to set the argument variables for the new call. */
2359 && tail_recursion_args (TREE_OPERAND (retval_rhs, 1),
2360 DECL_ARGUMENTS (current_function_decl)))
2361 {
2362 if (tail_recursion_label == 0)
2363 {
2364 tail_recursion_label = gen_label_rtx ();
2365 emit_label_after (tail_recursion_label,
2366 tail_recursion_reentry);
2367 }
2368 emit_queue ();
2369 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2370 emit_barrier ();
2371 return;
2372 }
2373 #ifdef HAVE_return
2374 /* This optimization is safe if there are local cleanups
2375 because expand_null_return takes care of them.
2376 ??? I think it should also be safe when there is a cleanup label,
2377 because expand_null_return takes care of them, too.
2378 Any reason why not? */
2379 if (HAVE_return && cleanup_label == 0
2380 && ! current_function_returns_pcc_struct
2381 && BRANCH_COST <= 1)
2382 {
2383 /* If this is return x == y; then generate
2384 if (x == y) return 1; else return 0;
2385 if we can do it with explicit return insns and branches are cheap,
2386 but not if we have the corresponding scc insn. */
2387 int has_scc = 0;
2388 if (retval_rhs)
2389 switch (TREE_CODE (retval_rhs))
2390 {
2391 case EQ_EXPR:
2392 #ifdef HAVE_seq
2393 has_scc = HAVE_seq;
2394 #endif
2395 case NE_EXPR:
2396 #ifdef HAVE_sne
2397 has_scc = HAVE_sne;
2398 #endif
2399 case GT_EXPR:
2400 #ifdef HAVE_sgt
2401 has_scc = HAVE_sgt;
2402 #endif
2403 case GE_EXPR:
2404 #ifdef HAVE_sge
2405 has_scc = HAVE_sge;
2406 #endif
2407 case LT_EXPR:
2408 #ifdef HAVE_slt
2409 has_scc = HAVE_slt;
2410 #endif
2411 case LE_EXPR:
2412 #ifdef HAVE_sle
2413 has_scc = HAVE_sle;
2414 #endif
2415 case TRUTH_ANDIF_EXPR:
2416 case TRUTH_ORIF_EXPR:
2417 case TRUTH_AND_EXPR:
2418 case TRUTH_OR_EXPR:
2419 case TRUTH_NOT_EXPR:
2420 case TRUTH_XOR_EXPR:
2421 if (! has_scc)
2422 {
2423 op0 = gen_label_rtx ();
2424 jumpifnot (retval_rhs, op0);
2425 expand_value_return (const1_rtx);
2426 emit_label (op0);
2427 expand_value_return (const0_rtx);
2428 return;
2429 }
2430 break;
2431
2432 default:
2433 break;
2434 }
2435 }
2436 #endif /* HAVE_return */
2437
2438 /* If the result is an aggregate that is being returned in one (or more)
2439 registers, load the registers here. The compiler currently can't handle
2440 copying a BLKmode value into registers. We could put this code in a
2441 more general area (for use by everyone instead of just function
2442 call/return), but until this feature is generally usable it is kept here
2443 (and in expand_call). The value must go into a pseudo in case there
2444 are cleanups that will clobber the real return register. */
2445
2446 if (retval_rhs != 0
2447 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2448 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2449 {
2450 int i, bitpos, xbitpos;
2451 int big_endian_correction = 0;
2452 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2453 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2454 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),BITS_PER_WORD);
2455 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2456 rtx result_reg, src, dst;
2457 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2458 enum machine_mode tmpmode, result_reg_mode;
2459
2460 /* Structures whose size is not a multiple of a word are aligned
2461 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2462 machine, this means we must skip the empty high order bytes when
2463 calculating the bit offset. */
2464 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2465 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2466 * BITS_PER_UNIT));
2467
2468 /* Copy the structure BITSIZE bits at a time. */
2469 for (bitpos = 0, xbitpos = big_endian_correction;
2470 bitpos < bytes * BITS_PER_UNIT;
2471 bitpos += bitsize, xbitpos += bitsize)
2472 {
2473 /* We need a new destination pseudo each time xbitpos is
2474 on a word boundary and when xbitpos == big_endian_correction
2475 (the first time through). */
2476 if (xbitpos % BITS_PER_WORD == 0
2477 || xbitpos == big_endian_correction)
2478 {
2479 /* Generate an appropriate register. */
2480 dst = gen_reg_rtx (word_mode);
2481 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2482
2483 /* Clobber the destination before we move anything into it. */
2484 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2485 }
2486
2487 /* We need a new source operand each time bitpos is on a word
2488 boundary. */
2489 if (bitpos % BITS_PER_WORD == 0)
2490 src = operand_subword_force (result_val,
2491 bitpos / BITS_PER_WORD,
2492 BLKmode);
2493
2494 /* Use bitpos for the source extraction (left justified) and
2495 xbitpos for the destination store (right justified). */
2496 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2497 extract_bit_field (src, bitsize,
2498 bitpos % BITS_PER_WORD, 1,
2499 NULL_RTX, word_mode,
2500 word_mode,
2501 bitsize / BITS_PER_UNIT,
2502 BITS_PER_WORD),
2503 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2504 }
2505
2506 /* Find the smallest integer mode large enough to hold the
2507 entire structure and use that mode instead of BLKmode
2508 on the USE insn for the return register. */
2509 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2510 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2511 tmpmode != MAX_MACHINE_MODE;
2512 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2513 {
2514 /* Have we found a large enough mode? */
2515 if (GET_MODE_SIZE (tmpmode) >= bytes)
2516 break;
2517 }
2518
2519 /* No suitable mode found. */
2520 if (tmpmode == MAX_MACHINE_MODE)
2521 abort ();
2522
2523 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2524
2525 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2526 result_reg_mode = word_mode;
2527 else
2528 result_reg_mode = tmpmode;
2529 result_reg = gen_reg_rtx (result_reg_mode);
2530
2531 emit_queue ();
2532 for (i = 0; i < n_regs; i++)
2533 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2534 result_pseudos[i]);
2535
2536 if (tmpmode != result_reg_mode)
2537 result_reg = gen_lowpart (tmpmode, result_reg);
2538
2539 expand_value_return (result_reg);
2540 }
2541 else if (cleanups
2542 && retval_rhs != 0
2543 && TREE_TYPE (retval_rhs) != void_type_node
2544 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2545 {
2546 /* Calculate the return value into a pseudo reg. */
2547 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2548 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2549 val = force_not_mem (val);
2550 emit_queue ();
2551 /* Return the calculated value, doing cleanups first. */
2552 expand_value_return (val);
2553 }
2554 else
2555 {
2556 /* No cleanups or no hard reg used;
2557 calculate value into hard return reg. */
2558 expand_expr (retval, const0_rtx, VOIDmode, 0);
2559 emit_queue ();
2560 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2561 }
2562 }
2563
2564 /* Return 1 if the end of the generated RTX is not a barrier.
2565 This means code already compiled can drop through. */
2566
2567 int
2568 drop_through_at_end_p ()
2569 {
2570 rtx insn = get_last_insn ();
2571 while (insn && GET_CODE (insn) == NOTE)
2572 insn = PREV_INSN (insn);
2573 return insn && GET_CODE (insn) != BARRIER;
2574 }
2575 \f
2576 /* Emit code to alter this function's formal parms for a tail-recursive call.
2577 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2578 FORMALS is the chain of decls of formals.
2579 Return 1 if this can be done;
2580 otherwise return 0 and do not emit any code. */
2581
2582 static int
2583 tail_recursion_args (actuals, formals)
2584 tree actuals, formals;
2585 {
2586 register tree a = actuals, f = formals;
2587 register int i;
2588 register rtx *argvec;
2589
2590 /* Check that number and types of actuals are compatible
2591 with the formals. This is not always true in valid C code.
2592 Also check that no formal needs to be addressable
2593 and that all formals are scalars. */
2594
2595 /* Also count the args. */
2596
2597 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2598 {
2599 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2600 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2601 return 0;
2602 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2603 return 0;
2604 }
2605 if (a != 0 || f != 0)
2606 return 0;
2607
2608 /* Compute all the actuals. */
2609
2610 argvec = (rtx *) alloca (i * sizeof (rtx));
2611
2612 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2613 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2614
2615 /* Find which actual values refer to current values of previous formals.
2616 Copy each of them now, before any formal is changed. */
2617
2618 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2619 {
2620 int copy = 0;
2621 register int j;
2622 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2623 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2624 { copy = 1; break; }
2625 if (copy)
2626 argvec[i] = copy_to_reg (argvec[i]);
2627 }
2628
2629 /* Store the values of the actuals into the formals. */
2630
2631 for (f = formals, a = actuals, i = 0; f;
2632 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2633 {
2634 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2635 emit_move_insn (DECL_RTL (f), argvec[i]);
2636 else
2637 convert_move (DECL_RTL (f), argvec[i],
2638 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2639 }
2640
2641 free_temp_slots ();
2642 return 1;
2643 }
2644 \f
2645 /* Generate the RTL code for entering a binding contour.
2646 The variables are declared one by one, by calls to `expand_decl'.
2647
2648 EXIT_FLAG is nonzero if this construct should be visible to
2649 `exit_something'. */
2650
2651 void
2652 expand_start_bindings (exit_flag)
2653 int exit_flag;
2654 {
2655 struct nesting *thisblock = ALLOC_NESTING ();
2656 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
2657
2658 /* Make an entry on block_stack for the block we are entering. */
2659
2660 thisblock->next = block_stack;
2661 thisblock->all = nesting_stack;
2662 thisblock->depth = ++nesting_depth;
2663 thisblock->data.block.stack_level = 0;
2664 thisblock->data.block.cleanups = 0;
2665 thisblock->data.block.function_call_count = 0;
2666 thisblock->data.block.exception_region = 0;
2667 thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
2668
2669 thisblock->data.block.conditional_code = 0;
2670 thisblock->data.block.last_unconditional_cleanup = note;
2671 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
2672
2673 if (block_stack
2674 && !(block_stack->data.block.cleanups == NULL_TREE
2675 && block_stack->data.block.outer_cleanups == NULL_TREE))
2676 thisblock->data.block.outer_cleanups
2677 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2678 block_stack->data.block.outer_cleanups);
2679 else
2680 thisblock->data.block.outer_cleanups = 0;
2681 thisblock->data.block.label_chain = 0;
2682 thisblock->data.block.innermost_stack_block = stack_block_stack;
2683 thisblock->data.block.first_insn = note;
2684 thisblock->data.block.block_start_count = ++block_start_count;
2685 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2686 block_stack = thisblock;
2687 nesting_stack = thisblock;
2688
2689 /* Make a new level for allocating stack slots. */
2690 push_temp_slots ();
2691 }
2692
2693 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2694 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2695 expand_expr are made. After we end the region, we know that all
2696 space for all temporaries that were created by TARGET_EXPRs will be
2697 destroyed and their space freed for reuse. */
2698
2699 void
2700 expand_start_target_temps ()
2701 {
2702 /* This is so that even if the result is preserved, the space
2703 allocated will be freed, as we know that it is no longer in use. */
2704 push_temp_slots ();
2705
2706 /* Start a new binding layer that will keep track of all cleanup
2707 actions to be performed. */
2708 expand_start_bindings (0);
2709
2710 target_temp_slot_level = temp_slot_level;
2711 }
2712
2713 void
2714 expand_end_target_temps ()
2715 {
2716 expand_end_bindings (NULL_TREE, 0, 0);
2717
2718 /* This is so that even if the result is preserved, the space
2719 allocated will be freed, as we know that it is no longer in use. */
2720 pop_temp_slots ();
2721 }
2722
2723 /* Mark top block of block_stack as an implicit binding for an
2724 exception region. This is used to prevent infinite recursion when
2725 ending a binding with expand_end_bindings. It is only ever called
2726 by expand_eh_region_start, as that it the only way to create a
2727 block stack for a exception region. */
2728
2729 void
2730 mark_block_as_eh_region ()
2731 {
2732 block_stack->data.block.exception_region = 1;
2733 if (block_stack->next
2734 && block_stack->next->data.block.conditional_code)
2735 {
2736 block_stack->data.block.conditional_code
2737 = block_stack->next->data.block.conditional_code;
2738 block_stack->data.block.last_unconditional_cleanup
2739 = block_stack->next->data.block.last_unconditional_cleanup;
2740 block_stack->data.block.cleanup_ptr
2741 = block_stack->next->data.block.cleanup_ptr;
2742 }
2743 }
2744
2745 /* True if we are currently emitting insns in an area of output code
2746 that is controlled by a conditional expression. This is used by
2747 the cleanup handling code to generate conditional cleanup actions. */
2748
2749 int
2750 conditional_context ()
2751 {
2752 return block_stack && block_stack->data.block.conditional_code;
2753 }
2754
2755 /* Mark top block of block_stack as not for an implicit binding for an
2756 exception region. This is only ever done by expand_eh_region_end
2757 to let expand_end_bindings know that it is being called explicitly
2758 to end the binding layer for just the binding layer associated with
2759 the exception region, otherwise expand_end_bindings would try and
2760 end all implicit binding layers for exceptions regions, and then
2761 one normal binding layer. */
2762
2763 void
2764 mark_block_as_not_eh_region ()
2765 {
2766 block_stack->data.block.exception_region = 0;
2767 }
2768
2769 /* True if the top block of block_stack was marked as for an exception
2770 region by mark_block_as_eh_region. */
2771
2772 int
2773 is_eh_region ()
2774 {
2775 return block_stack && block_stack->data.block.exception_region;
2776 }
2777
2778 /* Given a pointer to a BLOCK node, save a pointer to the most recently
2779 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
2780 BLOCK node. */
2781
2782 void
2783 remember_end_note (block)
2784 register tree block;
2785 {
2786 BLOCK_END_NOTE (block) = last_block_end_note;
2787 last_block_end_note = NULL_RTX;
2788 }
2789
2790 /* Generate RTL code to terminate a binding contour.
2791 VARS is the chain of VAR_DECL nodes
2792 for the variables bound in this contour.
2793 MARK_ENDS is nonzero if we should put a note at the beginning
2794 and end of this binding contour.
2795
2796 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2797 (That is true automatically if the contour has a saved stack level.) */
2798
2799 void
2800 expand_end_bindings (vars, mark_ends, dont_jump_in)
2801 tree vars;
2802 int mark_ends;
2803 int dont_jump_in;
2804 {
2805 register struct nesting *thisblock;
2806 register tree decl;
2807
2808 while (block_stack->data.block.exception_region)
2809 {
2810 /* Because we don't need or want a new temporary level and
2811 because we didn't create one in expand_eh_region_start,
2812 create a fake one now to avoid removing one in
2813 expand_end_bindings. */
2814 push_temp_slots ();
2815
2816 block_stack->data.block.exception_region = 0;
2817
2818 expand_end_bindings (NULL_TREE, 0, 0);
2819 }
2820
2821 /* Since expand_eh_region_start does an expand_start_bindings, we
2822 have to first end all the bindings that were created by
2823 expand_eh_region_start. */
2824
2825 thisblock = block_stack;
2826
2827 if (warn_unused)
2828 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2829 if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL
2830 && ! DECL_IN_SYSTEM_HEADER (decl)
2831 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2832 warning_with_decl (decl, "unused variable `%s'");
2833
2834 if (thisblock->exit_label)
2835 {
2836 do_pending_stack_adjust ();
2837 emit_label (thisblock->exit_label);
2838 }
2839
2840 /* If necessary, make a handler for nonlocal gotos taking
2841 place in the function calls in this block. */
2842 if (function_call_count != thisblock->data.block.function_call_count
2843 && nonlocal_labels
2844 /* Make handler for outermost block
2845 if there were any nonlocal gotos to this function. */
2846 && (thisblock->next == 0 ? current_function_has_nonlocal_label
2847 /* Make handler for inner block if it has something
2848 special to do when you jump out of it. */
2849 : (thisblock->data.block.cleanups != 0
2850 || thisblock->data.block.stack_level != 0)))
2851 {
2852 tree link;
2853 rtx afterward = gen_label_rtx ();
2854 rtx handler_label = gen_label_rtx ();
2855 rtx save_receiver = gen_reg_rtx (Pmode);
2856 rtx insns;
2857
2858 /* Don't let jump_optimize delete the handler. */
2859 LABEL_PRESERVE_P (handler_label) = 1;
2860
2861 /* Record the handler address in the stack slot for that purpose,
2862 during this block, saving and restoring the outer value. */
2863 if (thisblock->next != 0)
2864 {
2865 emit_move_insn (nonlocal_goto_handler_slot, save_receiver);
2866
2867 start_sequence ();
2868 emit_move_insn (save_receiver, nonlocal_goto_handler_slot);
2869 insns = get_insns ();
2870 end_sequence ();
2871 emit_insns_before (insns, thisblock->data.block.first_insn);
2872 }
2873
2874 start_sequence ();
2875 emit_move_insn (nonlocal_goto_handler_slot,
2876 gen_rtx_LABEL_REF (Pmode, handler_label));
2877 insns = get_insns ();
2878 end_sequence ();
2879 emit_insns_before (insns, thisblock->data.block.first_insn);
2880
2881 /* Jump around the handler; it runs only when specially invoked. */
2882 emit_jump (afterward);
2883 emit_label (handler_label);
2884
2885 #ifdef HAVE_nonlocal_goto
2886 if (! HAVE_nonlocal_goto)
2887 #endif
2888 /* First adjust our frame pointer to its actual value. It was
2889 previously set to the start of the virtual area corresponding to
2890 the stacked variables when we branched here and now needs to be
2891 adjusted to the actual hardware fp value.
2892
2893 Assignments are to virtual registers are converted by
2894 instantiate_virtual_regs into the corresponding assignment
2895 to the underlying register (fp in this case) that makes
2896 the original assignment true.
2897 So the following insn will actually be
2898 decrementing fp by STARTING_FRAME_OFFSET. */
2899 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2900
2901 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2902 if (fixed_regs[ARG_POINTER_REGNUM])
2903 {
2904 #ifdef ELIMINABLE_REGS
2905 /* If the argument pointer can be eliminated in favor of the
2906 frame pointer, we don't need to restore it. We assume here
2907 that if such an elimination is present, it can always be used.
2908 This is the case on all known machines; if we don't make this
2909 assumption, we do unnecessary saving on many machines. */
2910 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
2911 size_t i;
2912
2913 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
2914 if (elim_regs[i].from == ARG_POINTER_REGNUM
2915 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2916 break;
2917
2918 if (i == sizeof elim_regs / sizeof elim_regs [0])
2919 #endif
2920 {
2921 /* Now restore our arg pointer from the address at which it
2922 was saved in our stack frame.
2923 If there hasn't be space allocated for it yet, make
2924 some now. */
2925 if (arg_pointer_save_area == 0)
2926 arg_pointer_save_area
2927 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
2928 emit_move_insn (virtual_incoming_args_rtx,
2929 /* We need a pseudo here, or else
2930 instantiate_virtual_regs_1 complains. */
2931 copy_to_reg (arg_pointer_save_area));
2932 }
2933 }
2934 #endif
2935
2936 #ifdef HAVE_nonlocal_goto_receiver
2937 if (HAVE_nonlocal_goto_receiver)
2938 emit_insn (gen_nonlocal_goto_receiver ());
2939 #endif
2940
2941 /* The handler expects the desired label address in the static chain
2942 register. It tests the address and does an appropriate jump
2943 to whatever label is desired. */
2944 for (link = nonlocal_labels; link; link = TREE_CHAIN (link))
2945 /* Skip any labels we shouldn't be able to jump to from here. */
2946 if (! DECL_TOO_LATE (TREE_VALUE (link)))
2947 {
2948 rtx not_this = gen_label_rtx ();
2949 rtx this = gen_label_rtx ();
2950 do_jump_if_equal (static_chain_rtx,
2951 gen_rtx_LABEL_REF (Pmode, DECL_RTL (TREE_VALUE (link))),
2952 this, 0);
2953 emit_jump (not_this);
2954 emit_label (this);
2955 expand_goto (TREE_VALUE (link));
2956 emit_label (not_this);
2957 }
2958 /* If label is not recognized, abort. */
2959 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
2960 VOIDmode, 0);
2961 emit_barrier ();
2962 emit_label (afterward);
2963 }
2964
2965 /* Don't allow jumping into a block that has a stack level.
2966 Cleanups are allowed, though. */
2967 if (dont_jump_in
2968 || thisblock->data.block.stack_level != 0)
2969 {
2970 struct label_chain *chain;
2971
2972 /* Any labels in this block are no longer valid to go to.
2973 Mark them to cause an error message. */
2974 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2975 {
2976 DECL_TOO_LATE (chain->label) = 1;
2977 /* If any goto without a fixup came to this label,
2978 that must be an error, because gotos without fixups
2979 come from outside all saved stack-levels. */
2980 if (TREE_ADDRESSABLE (chain->label))
2981 error_with_decl (chain->label,
2982 "label `%s' used before containing binding contour");
2983 }
2984 }
2985
2986 /* Restore stack level in effect before the block
2987 (only if variable-size objects allocated). */
2988 /* Perform any cleanups associated with the block. */
2989
2990 if (thisblock->data.block.stack_level != 0
2991 || thisblock->data.block.cleanups != 0)
2992 {
2993 /* Only clean up here if this point can actually be reached. */
2994 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
2995
2996 /* Don't let cleanups affect ({...}) constructs. */
2997 int old_expr_stmts_for_value = expr_stmts_for_value;
2998 rtx old_last_expr_value = last_expr_value;
2999 tree old_last_expr_type = last_expr_type;
3000 expr_stmts_for_value = 0;
3001
3002 /* Do the cleanups. */
3003 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3004 if (reachable)
3005 do_pending_stack_adjust ();
3006
3007 expr_stmts_for_value = old_expr_stmts_for_value;
3008 last_expr_value = old_last_expr_value;
3009 last_expr_type = old_last_expr_type;
3010
3011 /* Restore the stack level. */
3012
3013 if (reachable && thisblock->data.block.stack_level != 0)
3014 {
3015 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3016 thisblock->data.block.stack_level, NULL_RTX);
3017 if (nonlocal_goto_handler_slot != 0)
3018 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3019 NULL_RTX);
3020 }
3021
3022 /* Any gotos out of this block must also do these things.
3023 Also report any gotos with fixups that came to labels in this
3024 level. */
3025 fixup_gotos (thisblock,
3026 thisblock->data.block.stack_level,
3027 thisblock->data.block.cleanups,
3028 thisblock->data.block.first_insn,
3029 dont_jump_in);
3030 }
3031
3032 /* Mark the beginning and end of the scope if requested.
3033 We do this now, after running cleanups on the variables
3034 just going out of scope, so they are in scope for their cleanups. */
3035
3036 if (mark_ends)
3037 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3038 else
3039 /* Get rid of the beginning-mark if we don't make an end-mark. */
3040 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3041
3042 /* If doing stupid register allocation, make sure lives of all
3043 register variables declared here extend thru end of scope. */
3044
3045 if (obey_regdecls)
3046 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3047 {
3048 rtx rtl = DECL_RTL (decl);
3049 if (TREE_CODE (decl) == VAR_DECL && rtl != 0)
3050 use_variable (rtl);
3051 }
3052
3053 /* Restore the temporary level of TARGET_EXPRs. */
3054 target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3055
3056 /* Restore block_stack level for containing block. */
3057
3058 stack_block_stack = thisblock->data.block.innermost_stack_block;
3059 POPSTACK (block_stack);
3060
3061 /* Pop the stack slot nesting and free any slots at this level. */
3062 pop_temp_slots ();
3063 }
3064
3065
3066 \f
3067 /* Generate RTL for the automatic variable declaration DECL.
3068 (Other kinds of declarations are simply ignored if seen here.) */
3069
3070 void
3071 expand_decl (decl)
3072 register tree decl;
3073 {
3074 struct nesting *thisblock = block_stack;
3075 tree type;
3076
3077 type = TREE_TYPE (decl);
3078
3079 /* Only automatic variables need any expansion done.
3080 Static and external variables, and external functions,
3081 will be handled by `assemble_variable' (called from finish_decl).
3082 TYPE_DECL and CONST_DECL require nothing.
3083 PARM_DECLs are handled in `assign_parms'. */
3084
3085 if (TREE_CODE (decl) != VAR_DECL)
3086 return;
3087 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3088 return;
3089
3090 /* Create the RTL representation for the variable. */
3091
3092 if (type == error_mark_node)
3093 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3094 else if (DECL_SIZE (decl) == 0)
3095 /* Variable with incomplete type. */
3096 {
3097 if (DECL_INITIAL (decl) == 0)
3098 /* Error message was already done; now avoid a crash. */
3099 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3100 else
3101 /* An initializer is going to decide the size of this array.
3102 Until we know the size, represent its address with a reg. */
3103 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3104 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
3105 }
3106 else if (DECL_MODE (decl) != BLKmode
3107 /* If -ffloat-store, don't put explicit float vars
3108 into regs. */
3109 && !(flag_float_store
3110 && TREE_CODE (type) == REAL_TYPE)
3111 && ! TREE_THIS_VOLATILE (decl)
3112 && ! TREE_ADDRESSABLE (decl)
3113 && (DECL_REGISTER (decl) || ! obey_regdecls)
3114 /* if -fcheck-memory-usage, check all variables. */
3115 && ! flag_check_memory_usage)
3116 {
3117 /* Automatic variable that can go in a register. */
3118 int unsignedp = TREE_UNSIGNED (type);
3119 enum machine_mode reg_mode
3120 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3121
3122 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3123 mark_user_reg (DECL_RTL (decl));
3124
3125 if (POINTER_TYPE_P (type))
3126 mark_reg_pointer (DECL_RTL (decl),
3127 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3128 / BITS_PER_UNIT));
3129 }
3130
3131 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3132 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3133 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3134 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3135 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3136 {
3137 /* Variable of fixed size that goes on the stack. */
3138 rtx oldaddr = 0;
3139 rtx addr;
3140
3141 /* If we previously made RTL for this decl, it must be an array
3142 whose size was determined by the initializer.
3143 The old address was a register; set that register now
3144 to the proper address. */
3145 if (DECL_RTL (decl) != 0)
3146 {
3147 if (GET_CODE (DECL_RTL (decl)) != MEM
3148 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3149 abort ();
3150 oldaddr = XEXP (DECL_RTL (decl), 0);
3151 }
3152
3153 DECL_RTL (decl)
3154 = assign_stack_temp (DECL_MODE (decl),
3155 ((TREE_INT_CST_LOW (DECL_SIZE (decl))
3156 + BITS_PER_UNIT - 1)
3157 / BITS_PER_UNIT),
3158 1);
3159 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3160
3161 /* Set alignment we actually gave this decl. */
3162 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3163 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3164
3165 if (oldaddr)
3166 {
3167 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3168 if (addr != oldaddr)
3169 emit_move_insn (oldaddr, addr);
3170 }
3171
3172 /* If this is a memory ref that contains aggregate components,
3173 mark it as such for cse and loop optimize. */
3174 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3175 #if 0
3176 /* If this is in memory because of -ffloat-store,
3177 set the volatile bit, to prevent optimizations from
3178 undoing the effects. */
3179 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3180 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3181 #endif
3182 }
3183 else
3184 /* Dynamic-size object: must push space on the stack. */
3185 {
3186 rtx address, size;
3187
3188 /* Record the stack pointer on entry to block, if have
3189 not already done so. */
3190 if (thisblock->data.block.stack_level == 0)
3191 {
3192 do_pending_stack_adjust ();
3193 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3194 &thisblock->data.block.stack_level,
3195 thisblock->data.block.first_insn);
3196 stack_block_stack = thisblock;
3197 }
3198
3199 /* Compute the variable's size, in bytes. */
3200 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3201 DECL_SIZE (decl),
3202 size_int (BITS_PER_UNIT)),
3203 NULL_RTX, VOIDmode, 0);
3204 free_temp_slots ();
3205
3206 /* Allocate space on the stack for the variable. Note that
3207 DECL_ALIGN says how the variable is to be aligned and we
3208 cannot use it to conclude anything about the alignment of
3209 the size. */
3210 address = allocate_dynamic_stack_space (size, NULL_RTX,
3211 TYPE_ALIGN (TREE_TYPE (decl)));
3212
3213 /* Reference the variable indirect through that rtx. */
3214 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3215
3216 /* If this is a memory ref that contains aggregate components,
3217 mark it as such for cse and loop optimize. */
3218 MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (TREE_TYPE (decl));
3219
3220 /* Indicate the alignment we actually gave this variable. */
3221 #ifdef STACK_BOUNDARY
3222 DECL_ALIGN (decl) = STACK_BOUNDARY;
3223 #else
3224 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3225 #endif
3226 }
3227
3228 if (TREE_THIS_VOLATILE (decl))
3229 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3230 #if 0 /* A variable is not necessarily unchanging
3231 just because it is const. RTX_UNCHANGING_P
3232 means no change in the function,
3233 not merely no change in the variable's scope.
3234 It is correct to set RTX_UNCHANGING_P if the variable's scope
3235 is the whole function. There's no convenient way to test that. */
3236 if (TREE_READONLY (decl))
3237 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3238 #endif
3239
3240 /* If doing stupid register allocation, make sure life of any
3241 register variable starts here, at the start of its scope. */
3242
3243 if (obey_regdecls)
3244 use_variable (DECL_RTL (decl));
3245 }
3246
3247
3248 \f
3249 /* Emit code to perform the initialization of a declaration DECL. */
3250
3251 void
3252 expand_decl_init (decl)
3253 tree decl;
3254 {
3255 int was_used = TREE_USED (decl);
3256
3257 /* If this is a CONST_DECL, we don't have to generate any code, but
3258 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3259 to be set while in the obstack containing the constant. If we don't
3260 do this, we can lose if we have functions nested three deep and the middle
3261 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3262 the innermost function is the first to expand that STRING_CST. */
3263 if (TREE_CODE (decl) == CONST_DECL)
3264 {
3265 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3266 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3267 EXPAND_INITIALIZER);
3268 return;
3269 }
3270
3271 if (TREE_STATIC (decl))
3272 return;
3273
3274 /* Compute and store the initial value now. */
3275
3276 if (DECL_INITIAL (decl) == error_mark_node)
3277 {
3278 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3279
3280 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3281 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3282 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3283 0, 0);
3284 emit_queue ();
3285 }
3286 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3287 {
3288 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3289 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3290 emit_queue ();
3291 }
3292
3293 /* Don't let the initialization count as "using" the variable. */
3294 TREE_USED (decl) = was_used;
3295
3296 /* Free any temporaries we made while initializing the decl. */
3297 preserve_temp_slots (NULL_RTX);
3298 free_temp_slots ();
3299 }
3300
3301 /* CLEANUP is an expression to be executed at exit from this binding contour;
3302 for example, in C++, it might call the destructor for this variable.
3303
3304 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3305 CLEANUP multiple times, and have the correct semantics. This
3306 happens in exception handling, for gotos, returns, breaks that
3307 leave the current scope.
3308
3309 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3310 that is not associated with any particular variable. */
3311
3312 int
3313 expand_decl_cleanup (decl, cleanup)
3314 tree decl, cleanup;
3315 {
3316 struct nesting *thisblock = block_stack;
3317
3318 /* Error if we are not in any block. */
3319 if (thisblock == 0)
3320 return 0;
3321
3322 /* Record the cleanup if there is one. */
3323
3324 if (cleanup != 0)
3325 {
3326 tree t;
3327 rtx seq;
3328 tree *cleanups = &thisblock->data.block.cleanups;
3329 int cond_context = conditional_context ();
3330
3331 if (cond_context)
3332 {
3333 rtx flag = gen_reg_rtx (word_mode);
3334 rtx set_flag_0;
3335 tree cond;
3336
3337 start_sequence ();
3338 emit_move_insn (flag, const0_rtx);
3339 set_flag_0 = get_insns ();
3340 end_sequence ();
3341
3342 thisblock->data.block.last_unconditional_cleanup
3343 = emit_insns_after (set_flag_0,
3344 thisblock->data.block.last_unconditional_cleanup);
3345
3346 emit_move_insn (flag, const1_rtx);
3347
3348 /* All cleanups must be on the function_obstack. */
3349 push_obstacks_nochange ();
3350 resume_temporary_allocation ();
3351
3352 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3353 DECL_RTL (cond) = flag;
3354
3355 /* Conditionalize the cleanup. */
3356 cleanup = build (COND_EXPR, void_type_node,
3357 truthvalue_conversion (cond),
3358 cleanup, integer_zero_node);
3359 cleanup = fold (cleanup);
3360
3361 pop_obstacks ();
3362
3363 cleanups = thisblock->data.block.cleanup_ptr;
3364 }
3365
3366 /* All cleanups must be on the function_obstack. */
3367 push_obstacks_nochange ();
3368 resume_temporary_allocation ();
3369 cleanup = unsave_expr (cleanup);
3370 pop_obstacks ();
3371
3372 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3373
3374 if (! cond_context)
3375 /* If this block has a cleanup, it belongs in stack_block_stack. */
3376 stack_block_stack = thisblock;
3377
3378 if (cond_context)
3379 {
3380 start_sequence ();
3381 }
3382
3383 /* If this was optimized so that there is no exception region for the
3384 cleanup, then mark the TREE_LIST node, so that we can later tell
3385 if we need to call expand_eh_region_end. */
3386 if (! using_eh_for_cleanups_p
3387 || expand_eh_region_start_tree (decl, cleanup))
3388 TREE_ADDRESSABLE (t) = 1;
3389 /* If that started a new EH region, we're in a new block. */
3390 thisblock = block_stack;
3391
3392 if (cond_context)
3393 {
3394 seq = get_insns ();
3395 end_sequence ();
3396 if (seq)
3397 thisblock->data.block.last_unconditional_cleanup
3398 = emit_insns_after (seq,
3399 thisblock->data.block.last_unconditional_cleanup);
3400 }
3401 else
3402 {
3403 thisblock->data.block.last_unconditional_cleanup
3404 = get_last_insn ();
3405 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3406 }
3407 }
3408 return 1;
3409 }
3410
3411 /* Like expand_decl_cleanup, but suppress generating an exception handler
3412 to perform the cleanup. */
3413
3414 int
3415 expand_decl_cleanup_no_eh (decl, cleanup)
3416 tree decl, cleanup;
3417 {
3418 int save_eh = using_eh_for_cleanups_p;
3419 int result;
3420
3421 using_eh_for_cleanups_p = 0;
3422 result = expand_decl_cleanup (decl, cleanup);
3423 using_eh_for_cleanups_p = save_eh;
3424
3425 return result;
3426 }
3427
3428 /* Arrange for the top element of the dynamic cleanup chain to be
3429 popped if we exit the current binding contour. DECL is the
3430 associated declaration, if any, otherwise NULL_TREE. If the
3431 current contour is left via an exception, then __sjthrow will pop
3432 the top element off the dynamic cleanup chain. The code that
3433 avoids doing the action we push into the cleanup chain in the
3434 exceptional case is contained in expand_cleanups.
3435
3436 This routine is only used by expand_eh_region_start, and that is
3437 the only way in which an exception region should be started. This
3438 routine is only used when using the setjmp/longjmp codegen method
3439 for exception handling. */
3440
3441 int
3442 expand_dcc_cleanup (decl)
3443 tree decl;
3444 {
3445 struct nesting *thisblock = block_stack;
3446 tree cleanup;
3447
3448 /* Error if we are not in any block. */
3449 if (thisblock == 0)
3450 return 0;
3451
3452 /* Record the cleanup for the dynamic handler chain. */
3453
3454 /* All cleanups must be on the function_obstack. */
3455 push_obstacks_nochange ();
3456 resume_temporary_allocation ();
3457 cleanup = make_node (POPDCC_EXPR);
3458 pop_obstacks ();
3459
3460 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3461 thisblock->data.block.cleanups
3462 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3463
3464 /* If this block has a cleanup, it belongs in stack_block_stack. */
3465 stack_block_stack = thisblock;
3466 return 1;
3467 }
3468
3469 /* Arrange for the top element of the dynamic handler chain to be
3470 popped if we exit the current binding contour. DECL is the
3471 associated declaration, if any, otherwise NULL_TREE. If the current
3472 contour is left via an exception, then __sjthrow will pop the top
3473 element off the dynamic handler chain. The code that avoids doing
3474 the action we push into the handler chain in the exceptional case
3475 is contained in expand_cleanups.
3476
3477 This routine is only used by expand_eh_region_start, and that is
3478 the only way in which an exception region should be started. This
3479 routine is only used when using the setjmp/longjmp codegen method
3480 for exception handling. */
3481
3482 int
3483 expand_dhc_cleanup (decl)
3484 tree decl;
3485 {
3486 struct nesting *thisblock = block_stack;
3487 tree cleanup;
3488
3489 /* Error if we are not in any block. */
3490 if (thisblock == 0)
3491 return 0;
3492
3493 /* Record the cleanup for the dynamic handler chain. */
3494
3495 /* All cleanups must be on the function_obstack. */
3496 push_obstacks_nochange ();
3497 resume_temporary_allocation ();
3498 cleanup = make_node (POPDHC_EXPR);
3499 pop_obstacks ();
3500
3501 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
3502 thisblock->data.block.cleanups
3503 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3504
3505 /* If this block has a cleanup, it belongs in stack_block_stack. */
3506 stack_block_stack = thisblock;
3507 return 1;
3508 }
3509 \f
3510 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3511 DECL_ELTS is the list of elements that belong to DECL's type.
3512 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3513
3514 void
3515 expand_anon_union_decl (decl, cleanup, decl_elts)
3516 tree decl, cleanup, decl_elts;
3517 {
3518 struct nesting *thisblock = block_stack;
3519 rtx x;
3520
3521 expand_decl (decl);
3522 expand_decl_cleanup (decl, cleanup);
3523 x = DECL_RTL (decl);
3524
3525 while (decl_elts)
3526 {
3527 tree decl_elt = TREE_VALUE (decl_elts);
3528 tree cleanup_elt = TREE_PURPOSE (decl_elts);
3529 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3530
3531 /* Propagate the union's alignment to the elements. */
3532 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3533
3534 /* If the element has BLKmode and the union doesn't, the union is
3535 aligned such that the element doesn't need to have BLKmode, so
3536 change the element's mode to the appropriate one for its size. */
3537 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3538 DECL_MODE (decl_elt) = mode
3539 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3540 MODE_INT, 1);
3541
3542 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3543 instead create a new MEM rtx with the proper mode. */
3544 if (GET_CODE (x) == MEM)
3545 {
3546 if (mode == GET_MODE (x))
3547 DECL_RTL (decl_elt) = x;
3548 else
3549 {
3550 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3551 MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
3552 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3553 }
3554 }
3555 else if (GET_CODE (x) == REG)
3556 {
3557 if (mode == GET_MODE (x))
3558 DECL_RTL (decl_elt) = x;
3559 else
3560 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3561 }
3562 else
3563 abort ();
3564
3565 /* Record the cleanup if there is one. */
3566
3567 if (cleanup != 0)
3568 thisblock->data.block.cleanups
3569 = temp_tree_cons (decl_elt, cleanup_elt,
3570 thisblock->data.block.cleanups);
3571
3572 decl_elts = TREE_CHAIN (decl_elts);
3573 }
3574 }
3575 \f
3576 /* Expand a list of cleanups LIST.
3577 Elements may be expressions or may be nested lists.
3578
3579 If DONT_DO is nonnull, then any list-element
3580 whose TREE_PURPOSE matches DONT_DO is omitted.
3581 This is sometimes used to avoid a cleanup associated with
3582 a value that is being returned out of the scope.
3583
3584 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3585 goto and handle protection regions specially in that case.
3586
3587 If REACHABLE, we emit code, otherwise just inform the exception handling
3588 code about this finalization. */
3589
3590 static void
3591 expand_cleanups (list, dont_do, in_fixup, reachable)
3592 tree list;
3593 tree dont_do;
3594 int in_fixup;
3595 int reachable;
3596 {
3597 tree tail;
3598 for (tail = list; tail; tail = TREE_CHAIN (tail))
3599 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
3600 {
3601 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3602 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
3603 else
3604 {
3605 if (! in_fixup)
3606 {
3607 tree cleanup = TREE_VALUE (tail);
3608
3609 /* See expand_d{h,c}c_cleanup for why we avoid this. */
3610 if (TREE_CODE (cleanup) != POPDHC_EXPR
3611 && TREE_CODE (cleanup) != POPDCC_EXPR
3612 /* See expand_eh_region_start_tree for this case. */
3613 && ! TREE_ADDRESSABLE (tail))
3614 {
3615 cleanup = protect_with_terminate (cleanup);
3616 expand_eh_region_end (cleanup);
3617 }
3618 }
3619
3620 if (reachable)
3621 {
3622 /* Cleanups may be run multiple times. For example,
3623 when exiting a binding contour, we expand the
3624 cleanups associated with that contour. When a goto
3625 within that binding contour has a target outside that
3626 contour, it will expand all cleanups from its scope to
3627 the target. Though the cleanups are expanded multiple
3628 times, the control paths are non-overlapping so the
3629 cleanups will not be executed twice. */
3630
3631 /* We may need to protect fixups with rethrow regions. */
3632 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
3633
3634 if (protect)
3635 expand_fixup_region_start ();
3636
3637 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3638 if (protect)
3639 expand_fixup_region_end (TREE_VALUE (tail));
3640 free_temp_slots ();
3641 }
3642 }
3643 }
3644 }
3645
3646 /* Mark when the context we are emitting RTL for as a conditional
3647 context, so that any cleanup actions we register with
3648 expand_decl_init will be properly conditionalized when those
3649 cleanup actions are later performed. Must be called before any
3650 expression (tree) is expanded that is within a conditional context. */
3651
3652 void
3653 start_cleanup_deferral ()
3654 {
3655 /* block_stack can be NULL if we are inside the parameter list. It is
3656 OK to do nothing, because cleanups aren't possible here. */
3657 if (block_stack)
3658 ++block_stack->data.block.conditional_code;
3659 }
3660
3661 /* Mark the end of a conditional region of code. Because cleanup
3662 deferrals may be nested, we may still be in a conditional region
3663 after we end the currently deferred cleanups, only after we end all
3664 deferred cleanups, are we back in unconditional code. */
3665
3666 void
3667 end_cleanup_deferral ()
3668 {
3669 /* block_stack can be NULL if we are inside the parameter list. It is
3670 OK to do nothing, because cleanups aren't possible here. */
3671 if (block_stack)
3672 --block_stack->data.block.conditional_code;
3673 }
3674
3675 /* Move all cleanups from the current block_stack
3676 to the containing block_stack, where they are assumed to
3677 have been created. If anything can cause a temporary to
3678 be created, but not expanded for more than one level of
3679 block_stacks, then this code will have to change. */
3680
3681 void
3682 move_cleanups_up ()
3683 {
3684 struct nesting *block = block_stack;
3685 struct nesting *outer = block->next;
3686
3687 outer->data.block.cleanups
3688 = chainon (block->data.block.cleanups,
3689 outer->data.block.cleanups);
3690 block->data.block.cleanups = 0;
3691 }
3692
3693 tree
3694 last_cleanup_this_contour ()
3695 {
3696 if (block_stack == 0)
3697 return 0;
3698
3699 return block_stack->data.block.cleanups;
3700 }
3701
3702 /* Return 1 if there are any pending cleanups at this point.
3703 If THIS_CONTOUR is nonzero, check the current contour as well.
3704 Otherwise, look only at the contours that enclose this one. */
3705
3706 int
3707 any_pending_cleanups (this_contour)
3708 int this_contour;
3709 {
3710 struct nesting *block;
3711
3712 if (block_stack == 0)
3713 return 0;
3714
3715 if (this_contour && block_stack->data.block.cleanups != NULL)
3716 return 1;
3717 if (block_stack->data.block.cleanups == 0
3718 && block_stack->data.block.outer_cleanups == 0)
3719 return 0;
3720
3721 for (block = block_stack->next; block; block = block->next)
3722 if (block->data.block.cleanups != 0)
3723 return 1;
3724
3725 return 0;
3726 }
3727 \f
3728 /* Enter a case (Pascal) or switch (C) statement.
3729 Push a block onto case_stack and nesting_stack
3730 to accumulate the case-labels that are seen
3731 and to record the labels generated for the statement.
3732
3733 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3734 Otherwise, this construct is transparent for `exit_something'.
3735
3736 EXPR is the index-expression to be dispatched on.
3737 TYPE is its nominal type. We could simply convert EXPR to this type,
3738 but instead we take short cuts. */
3739
3740 void
3741 expand_start_case (exit_flag, expr, type, printname)
3742 int exit_flag;
3743 tree expr;
3744 tree type;
3745 char *printname;
3746 {
3747 register struct nesting *thiscase = ALLOC_NESTING ();
3748
3749 /* Make an entry on case_stack for the case we are entering. */
3750
3751 thiscase->next = case_stack;
3752 thiscase->all = nesting_stack;
3753 thiscase->depth = ++nesting_depth;
3754 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3755 thiscase->data.case_stmt.case_list = 0;
3756 thiscase->data.case_stmt.index_expr = expr;
3757 thiscase->data.case_stmt.nominal_type = type;
3758 thiscase->data.case_stmt.default_label = 0;
3759 thiscase->data.case_stmt.num_ranges = 0;
3760 thiscase->data.case_stmt.printname = printname;
3761 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
3762 case_stack = thiscase;
3763 nesting_stack = thiscase;
3764
3765 do_pending_stack_adjust ();
3766
3767 /* Make sure case_stmt.start points to something that won't
3768 need any transformation before expand_end_case. */
3769 if (GET_CODE (get_last_insn ()) != NOTE)
3770 emit_note (NULL_PTR, NOTE_INSN_DELETED);
3771
3772 thiscase->data.case_stmt.start = get_last_insn ();
3773
3774 start_cleanup_deferral ();
3775 }
3776
3777
3778 /* Start a "dummy case statement" within which case labels are invalid
3779 and are not connected to any larger real case statement.
3780 This can be used if you don't want to let a case statement jump
3781 into the middle of certain kinds of constructs. */
3782
3783 void
3784 expand_start_case_dummy ()
3785 {
3786 register struct nesting *thiscase = ALLOC_NESTING ();
3787
3788 /* Make an entry on case_stack for the dummy. */
3789
3790 thiscase->next = case_stack;
3791 thiscase->all = nesting_stack;
3792 thiscase->depth = ++nesting_depth;
3793 thiscase->exit_label = 0;
3794 thiscase->data.case_stmt.case_list = 0;
3795 thiscase->data.case_stmt.start = 0;
3796 thiscase->data.case_stmt.nominal_type = 0;
3797 thiscase->data.case_stmt.default_label = 0;
3798 thiscase->data.case_stmt.num_ranges = 0;
3799 case_stack = thiscase;
3800 nesting_stack = thiscase;
3801 start_cleanup_deferral ();
3802 }
3803
3804 /* End a dummy case statement. */
3805
3806 void
3807 expand_end_case_dummy ()
3808 {
3809 end_cleanup_deferral ();
3810 POPSTACK (case_stack);
3811 }
3812
3813 /* Return the data type of the index-expression
3814 of the innermost case statement, or null if none. */
3815
3816 tree
3817 case_index_expr_type ()
3818 {
3819 if (case_stack)
3820 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
3821 return 0;
3822 }
3823 \f
3824 static void
3825 check_seenlabel ()
3826 {
3827 /* If this is the first label, warn if any insns have been emitted. */
3828 if (case_stack->data.case_stmt.line_number_status >= 0)
3829 {
3830 rtx insn;
3831
3832 restore_line_number_status
3833 (case_stack->data.case_stmt.line_number_status);
3834 case_stack->data.case_stmt.line_number_status = -1;
3835
3836 for (insn = case_stack->data.case_stmt.start;
3837 insn;
3838 insn = NEXT_INSN (insn))
3839 {
3840 if (GET_CODE (insn) == CODE_LABEL)
3841 break;
3842 if (GET_CODE (insn) != NOTE
3843 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
3844 {
3845 do
3846 insn = PREV_INSN (insn);
3847 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
3848
3849 /* If insn is zero, then there must have been a syntax error. */
3850 if (insn)
3851 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
3852 NOTE_LINE_NUMBER(insn),
3853 "unreachable code at beginning of %s",
3854 case_stack->data.case_stmt.printname);
3855 break;
3856 }
3857 }
3858 }
3859 }
3860
3861 /* Accumulate one case or default label inside a case or switch statement.
3862 VALUE is the value of the case (a null pointer, for a default label).
3863 The function CONVERTER, when applied to arguments T and V,
3864 converts the value V to the type T.
3865
3866 If not currently inside a case or switch statement, return 1 and do
3867 nothing. The caller will print a language-specific error message.
3868 If VALUE is a duplicate or overlaps, return 2 and do nothing
3869 except store the (first) duplicate node in *DUPLICATE.
3870 If VALUE is out of range, return 3 and do nothing.
3871 If we are jumping into the scope of a cleanup or var-sized array, return 5.
3872 Return 0 on success.
3873
3874 Extended to handle range statements. */
3875
3876 int
3877 pushcase (value, converter, label, duplicate)
3878 register tree value;
3879 tree (*converter) PROTO((tree, tree));
3880 register tree label;
3881 tree *duplicate;
3882 {
3883 tree index_type;
3884 tree nominal_type;
3885
3886 /* Fail if not inside a real case statement. */
3887 if (! (case_stack && case_stack->data.case_stmt.start))
3888 return 1;
3889
3890 if (stack_block_stack
3891 && stack_block_stack->depth > case_stack->depth)
3892 return 5;
3893
3894 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3895 nominal_type = case_stack->data.case_stmt.nominal_type;
3896
3897 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3898 if (index_type == error_mark_node)
3899 return 0;
3900
3901 /* Convert VALUE to the type in which the comparisons are nominally done. */
3902 if (value != 0)
3903 value = (*converter) (nominal_type, value);
3904
3905 check_seenlabel ();
3906
3907 /* Fail if this value is out of range for the actual type of the index
3908 (which may be narrower than NOMINAL_TYPE). */
3909 if (value != 0 && ! int_fits_type_p (value, index_type))
3910 return 3;
3911
3912 /* Fail if this is a duplicate or overlaps another entry. */
3913 if (value == 0)
3914 {
3915 if (case_stack->data.case_stmt.default_label != 0)
3916 {
3917 *duplicate = case_stack->data.case_stmt.default_label;
3918 return 2;
3919 }
3920 case_stack->data.case_stmt.default_label = label;
3921 }
3922 else
3923 return add_case_node (value, value, label, duplicate);
3924
3925 expand_label (label);
3926 return 0;
3927 }
3928
3929 /* Like pushcase but this case applies to all values between VALUE1 and
3930 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3931 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3932 starts at VALUE1 and ends at the highest value of the index type.
3933 If both are NULL, this case applies to all values.
3934
3935 The return value is the same as that of pushcase but there is one
3936 additional error code: 4 means the specified range was empty. */
3937
3938 int
3939 pushcase_range (value1, value2, converter, label, duplicate)
3940 register tree value1, value2;
3941 tree (*converter) PROTO((tree, tree));
3942 register tree label;
3943 tree *duplicate;
3944 {
3945 tree index_type;
3946 tree nominal_type;
3947
3948 /* Fail if not inside a real case statement. */
3949 if (! (case_stack && case_stack->data.case_stmt.start))
3950 return 1;
3951
3952 if (stack_block_stack
3953 && stack_block_stack->depth > case_stack->depth)
3954 return 5;
3955
3956 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3957 nominal_type = case_stack->data.case_stmt.nominal_type;
3958
3959 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3960 if (index_type == error_mark_node)
3961 return 0;
3962
3963 check_seenlabel ();
3964
3965 /* Convert VALUEs to type in which the comparisons are nominally done
3966 and replace any unspecified value with the corresponding bound. */
3967 if (value1 == 0)
3968 value1 = TYPE_MIN_VALUE (index_type);
3969 if (value2 == 0)
3970 value2 = TYPE_MAX_VALUE (index_type);
3971
3972 /* Fail if the range is empty. Do this before any conversion since
3973 we want to allow out-of-range empty ranges. */
3974 if (value2 && tree_int_cst_lt (value2, value1))
3975 return 4;
3976
3977 value1 = (*converter) (nominal_type, value1);
3978
3979 /* If the max was unbounded, use the max of the nominal_type we are
3980 converting to. Do this after the < check above to suppress false
3981 positives. */
3982 if (!value2)
3983 value2 = TYPE_MAX_VALUE (nominal_type);
3984 value2 = (*converter) (nominal_type, value2);
3985
3986 /* Fail if these values are out of range. */
3987 if (TREE_CONSTANT_OVERFLOW (value1)
3988 || ! int_fits_type_p (value1, index_type))
3989 return 3;
3990
3991 if (TREE_CONSTANT_OVERFLOW (value2)
3992 || ! int_fits_type_p (value2, index_type))
3993 return 3;
3994
3995 return add_case_node (value1, value2, label, duplicate);
3996 }
3997
3998 /* Do the actual insertion of a case label for pushcase and pushcase_range
3999 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4000 slowdown for large switch statements. */
4001
4002 static int
4003 add_case_node (low, high, label, duplicate)
4004 tree low, high;
4005 tree label;
4006 tree *duplicate;
4007 {
4008 struct case_node *p, **q, *r;
4009
4010 q = &case_stack->data.case_stmt.case_list;
4011 p = *q;
4012
4013 while ((r = *q))
4014 {
4015 p = r;
4016
4017 /* Keep going past elements distinctly greater than HIGH. */
4018 if (tree_int_cst_lt (high, p->low))
4019 q = &p->left;
4020
4021 /* or distinctly less than LOW. */
4022 else if (tree_int_cst_lt (p->high, low))
4023 q = &p->right;
4024
4025 else
4026 {
4027 /* We have an overlap; this is an error. */
4028 *duplicate = p->code_label;
4029 return 2;
4030 }
4031 }
4032
4033 /* Add this label to the chain, and succeed.
4034 Copy LOW, HIGH so they are on temporary rather than momentary
4035 obstack and will thus survive till the end of the case statement. */
4036
4037 r = (struct case_node *) oballoc (sizeof (struct case_node));
4038 r->low = copy_node (low);
4039
4040 /* If the bounds are equal, turn this into the one-value case. */
4041
4042 if (tree_int_cst_equal (low, high))
4043 r->high = r->low;
4044 else
4045 {
4046 r->high = copy_node (high);
4047 case_stack->data.case_stmt.num_ranges++;
4048 }
4049
4050 r->code_label = label;
4051 expand_label (label);
4052
4053 *q = r;
4054 r->parent = p;
4055 r->left = 0;
4056 r->right = 0;
4057 r->balance = 0;
4058
4059 while (p)
4060 {
4061 struct case_node *s;
4062
4063 if (r == p->left)
4064 {
4065 int b;
4066
4067 if (! (b = p->balance))
4068 /* Growth propagation from left side. */
4069 p->balance = -1;
4070 else if (b < 0)
4071 {
4072 if (r->balance < 0)
4073 {
4074 /* R-Rotation */
4075 if ((p->left = s = r->right))
4076 s->parent = p;
4077
4078 r->right = p;
4079 p->balance = 0;
4080 r->balance = 0;
4081 s = p->parent;
4082 p->parent = r;
4083
4084 if ((r->parent = s))
4085 {
4086 if (s->left == p)
4087 s->left = r;
4088 else
4089 s->right = r;
4090 }
4091 else
4092 case_stack->data.case_stmt.case_list = r;
4093 }
4094 else
4095 /* r->balance == +1 */
4096 {
4097 /* LR-Rotation */
4098
4099 int b2;
4100 struct case_node *t = r->right;
4101
4102 if ((p->left = s = t->right))
4103 s->parent = p;
4104
4105 t->right = p;
4106 if ((r->right = s = t->left))
4107 s->parent = r;
4108
4109 t->left = r;
4110 b = t->balance;
4111 b2 = b < 0;
4112 p->balance = b2;
4113 b2 = -b2 - b;
4114 r->balance = b2;
4115 t->balance = 0;
4116 s = p->parent;
4117 p->parent = t;
4118 r->parent = t;
4119
4120 if ((t->parent = s))
4121 {
4122 if (s->left == p)
4123 s->left = t;
4124 else
4125 s->right = t;
4126 }
4127 else
4128 case_stack->data.case_stmt.case_list = t;
4129 }
4130 break;
4131 }
4132
4133 else
4134 {
4135 /* p->balance == +1; growth of left side balances the node. */
4136 p->balance = 0;
4137 break;
4138 }
4139 }
4140 else
4141 /* r == p->right */
4142 {
4143 int b;
4144
4145 if (! (b = p->balance))
4146 /* Growth propagation from right side. */
4147 p->balance++;
4148 else if (b > 0)
4149 {
4150 if (r->balance > 0)
4151 {
4152 /* L-Rotation */
4153
4154 if ((p->right = s = r->left))
4155 s->parent = p;
4156
4157 r->left = p;
4158 p->balance = 0;
4159 r->balance = 0;
4160 s = p->parent;
4161 p->parent = r;
4162 if ((r->parent = s))
4163 {
4164 if (s->left == p)
4165 s->left = r;
4166 else
4167 s->right = r;
4168 }
4169
4170 else
4171 case_stack->data.case_stmt.case_list = r;
4172 }
4173
4174 else
4175 /* r->balance == -1 */
4176 {
4177 /* RL-Rotation */
4178 int b2;
4179 struct case_node *t = r->left;
4180
4181 if ((p->right = s = t->left))
4182 s->parent = p;
4183
4184 t->left = p;
4185
4186 if ((r->left = s = t->right))
4187 s->parent = r;
4188
4189 t->right = r;
4190 b = t->balance;
4191 b2 = b < 0;
4192 r->balance = b2;
4193 b2 = -b2 - b;
4194 p->balance = b2;
4195 t->balance = 0;
4196 s = p->parent;
4197 p->parent = t;
4198 r->parent = t;
4199
4200 if ((t->parent = s))
4201 {
4202 if (s->left == p)
4203 s->left = t;
4204 else
4205 s->right = t;
4206 }
4207
4208 else
4209 case_stack->data.case_stmt.case_list = t;
4210 }
4211 break;
4212 }
4213 else
4214 {
4215 /* p->balance == -1; growth of right side balances the node. */
4216 p->balance = 0;
4217 break;
4218 }
4219 }
4220
4221 r = p;
4222 p = p->parent;
4223 }
4224
4225 return 0;
4226 }
4227
4228 \f
4229 /* Returns the number of possible values of TYPE.
4230 Returns -1 if the number is unknown or variable.
4231 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4232 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4233 do not increase monotonically (there may be duplicates);
4234 to 1 if the values increase monotonically, but not always by 1;
4235 otherwise sets it to 0. */
4236
4237 HOST_WIDE_INT
4238 all_cases_count (type, spareness)
4239 tree type;
4240 int *spareness;
4241 {
4242 HOST_WIDE_INT count;
4243 *spareness = 0;
4244
4245 switch (TREE_CODE (type))
4246 {
4247 tree t;
4248 case BOOLEAN_TYPE:
4249 count = 2;
4250 break;
4251 case CHAR_TYPE:
4252 count = 1 << BITS_PER_UNIT;
4253 break;
4254 default:
4255 case INTEGER_TYPE:
4256 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4257 || TYPE_MAX_VALUE (type) == NULL
4258 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4259 return -1;
4260 else
4261 {
4262 /* count
4263 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4264 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4265 but with overflow checking. */
4266 tree mint = TYPE_MIN_VALUE (type);
4267 tree maxt = TYPE_MAX_VALUE (type);
4268 HOST_WIDE_INT lo, hi;
4269 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4270 &lo, &hi);
4271 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4272 lo, hi, &lo, &hi);
4273 add_double (lo, hi, 1, 0, &lo, &hi);
4274 if (hi != 0 || lo < 0)
4275 return -2;
4276 count = lo;
4277 }
4278 break;
4279 case ENUMERAL_TYPE:
4280 count = 0;
4281 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4282 {
4283 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4284 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4285 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4286 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4287 *spareness = 1;
4288 count++;
4289 }
4290 if (*spareness == 1)
4291 {
4292 tree prev = TREE_VALUE (TYPE_VALUES (type));
4293 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4294 {
4295 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4296 {
4297 *spareness = 2;
4298 break;
4299 }
4300 prev = TREE_VALUE (t);
4301 }
4302
4303 }
4304 }
4305 return count;
4306 }
4307
4308
4309 #define BITARRAY_TEST(ARRAY, INDEX) \
4310 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4311 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4312 #define BITARRAY_SET(ARRAY, INDEX) \
4313 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4314 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4315
4316 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4317 with the case values we have seen, assuming the case expression
4318 has the given TYPE.
4319 SPARSENESS is as determined by all_cases_count.
4320
4321 The time needed is proportional to COUNT, unless
4322 SPARSENESS is 2, in which case quadratic time is needed. */
4323
4324 static void
4325 mark_seen_cases (type, cases_seen, count, sparseness)
4326 tree type;
4327 unsigned char *cases_seen;
4328 long count;
4329 int sparseness;
4330 {
4331 tree next_node_to_try = NULL_TREE;
4332 long next_node_offset = 0;
4333
4334 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4335 tree val = make_node (INTEGER_CST);
4336 TREE_TYPE (val) = type;
4337 if (! root)
4338 ; /* Do nothing */
4339 else if (sparseness == 2)
4340 {
4341 tree t;
4342 HOST_WIDE_INT xlo;
4343
4344 /* This less efficient loop is only needed to handle
4345 duplicate case values (multiple enum constants
4346 with the same value). */
4347 TREE_TYPE (val) = TREE_TYPE (root->low);
4348 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4349 t = TREE_CHAIN (t), xlo++)
4350 {
4351 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4352 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4353 n = root;
4354 do
4355 {
4356 /* Keep going past elements distinctly greater than VAL. */
4357 if (tree_int_cst_lt (val, n->low))
4358 n = n->left;
4359
4360 /* or distinctly less than VAL. */
4361 else if (tree_int_cst_lt (n->high, val))
4362 n = n->right;
4363
4364 else
4365 {
4366 /* We have found a matching range. */
4367 BITARRAY_SET (cases_seen, xlo);
4368 break;
4369 }
4370 }
4371 while (n);
4372 }
4373 }
4374 else
4375 {
4376 if (root->left)
4377 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4378 for (n = root; n; n = n->right)
4379 {
4380 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4381 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4382 while ( ! tree_int_cst_lt (n->high, val))
4383 {
4384 /* Calculate (into xlo) the "offset" of the integer (val).
4385 The element with lowest value has offset 0, the next smallest
4386 element has offset 1, etc. */
4387
4388 HOST_WIDE_INT xlo, xhi;
4389 tree t;
4390 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4391 {
4392 /* The TYPE_VALUES will be in increasing order, so
4393 starting searching where we last ended. */
4394 t = next_node_to_try;
4395 xlo = next_node_offset;
4396 xhi = 0;
4397 for (;;)
4398 {
4399 if (t == NULL_TREE)
4400 {
4401 t = TYPE_VALUES (type);
4402 xlo = 0;
4403 }
4404 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4405 {
4406 next_node_to_try = TREE_CHAIN (t);
4407 next_node_offset = xlo + 1;
4408 break;
4409 }
4410 xlo++;
4411 t = TREE_CHAIN (t);
4412 if (t == next_node_to_try)
4413 {
4414 xlo = -1;
4415 break;
4416 }
4417 }
4418 }
4419 else
4420 {
4421 t = TYPE_MIN_VALUE (type);
4422 if (t)
4423 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4424 &xlo, &xhi);
4425 else
4426 xlo = xhi = 0;
4427 add_double (xlo, xhi,
4428 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4429 &xlo, &xhi);
4430 }
4431
4432 if (xhi == 0 && xlo >= 0 && xlo < count)
4433 BITARRAY_SET (cases_seen, xlo);
4434 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4435 1, 0,
4436 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4437 }
4438 }
4439 }
4440 }
4441
4442 /* Called when the index of a switch statement is an enumerated type
4443 and there is no default label.
4444
4445 Checks that all enumeration literals are covered by the case
4446 expressions of a switch. Also, warn if there are any extra
4447 switch cases that are *not* elements of the enumerated type.
4448
4449 If all enumeration literals were covered by the case expressions,
4450 turn one of the expressions into the default expression since it should
4451 not be possible to fall through such a switch. */
4452
4453 void
4454 check_for_full_enumeration_handling (type)
4455 tree type;
4456 {
4457 register struct case_node *n;
4458 register tree chain;
4459 #if 0 /* variable used by 'if 0'ed code below. */
4460 register struct case_node **l;
4461 int all_values = 1;
4462 #endif
4463
4464 /* True iff the selector type is a numbered set mode. */
4465 int sparseness = 0;
4466
4467 /* The number of possible selector values. */
4468 HOST_WIDE_INT size;
4469
4470 /* For each possible selector value. a one iff it has been matched
4471 by a case value alternative. */
4472 unsigned char *cases_seen;
4473
4474 /* The allocated size of cases_seen, in chars. */
4475 long bytes_needed;
4476
4477 if (! warn_switch)
4478 return;
4479
4480 size = all_cases_count (type, &sparseness);
4481 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4482
4483 if (size > 0 && size < 600000
4484 /* We deliberately use malloc here - not xmalloc. */
4485 && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4486 {
4487 long i;
4488 tree v = TYPE_VALUES (type);
4489 bzero (cases_seen, bytes_needed);
4490
4491 /* The time complexity of this code is normally O(N), where
4492 N being the number of members in the enumerated type.
4493 However, if type is a ENUMERAL_TYPE whose values do not
4494 increase monotonically, O(N*log(N)) time may be needed. */
4495
4496 mark_seen_cases (type, cases_seen, size, sparseness);
4497
4498 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4499 {
4500 if (BITARRAY_TEST(cases_seen, i) == 0)
4501 warning ("enumeration value `%s' not handled in switch",
4502 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4503 }
4504
4505 free (cases_seen);
4506 }
4507
4508 /* Now we go the other way around; we warn if there are case
4509 expressions that don't correspond to enumerators. This can
4510 occur since C and C++ don't enforce type-checking of
4511 assignments to enumeration variables. */
4512
4513 if (case_stack->data.case_stmt.case_list
4514 && case_stack->data.case_stmt.case_list->left)
4515 case_stack->data.case_stmt.case_list
4516 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4517 if (warn_switch)
4518 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4519 {
4520 for (chain = TYPE_VALUES (type);
4521 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4522 chain = TREE_CHAIN (chain))
4523 ;
4524
4525 if (!chain)
4526 {
4527 if (TYPE_NAME (type) == 0)
4528 warning ("case value `%d' not in enumerated type",
4529 TREE_INT_CST_LOW (n->low));
4530 else
4531 warning ("case value `%d' not in enumerated type `%s'",
4532 TREE_INT_CST_LOW (n->low),
4533 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4534 == IDENTIFIER_NODE)
4535 ? TYPE_NAME (type)
4536 : DECL_NAME (TYPE_NAME (type))));
4537 }
4538 if (!tree_int_cst_equal (n->low, n->high))
4539 {
4540 for (chain = TYPE_VALUES (type);
4541 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4542 chain = TREE_CHAIN (chain))
4543 ;
4544
4545 if (!chain)
4546 {
4547 if (TYPE_NAME (type) == 0)
4548 warning ("case value `%d' not in enumerated type",
4549 TREE_INT_CST_LOW (n->high));
4550 else
4551 warning ("case value `%d' not in enumerated type `%s'",
4552 TREE_INT_CST_LOW (n->high),
4553 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4554 == IDENTIFIER_NODE)
4555 ? TYPE_NAME (type)
4556 : DECL_NAME (TYPE_NAME (type))));
4557 }
4558 }
4559 }
4560
4561 #if 0
4562 /* ??? This optimization is disabled because it causes valid programs to
4563 fail. ANSI C does not guarantee that an expression with enum type
4564 will have a value that is the same as one of the enumeration literals. */
4565
4566 /* If all values were found as case labels, make one of them the default
4567 label. Thus, this switch will never fall through. We arbitrarily pick
4568 the last one to make the default since this is likely the most
4569 efficient choice. */
4570
4571 if (all_values)
4572 {
4573 for (l = &case_stack->data.case_stmt.case_list;
4574 (*l)->right != 0;
4575 l = &(*l)->right)
4576 ;
4577
4578 case_stack->data.case_stmt.default_label = (*l)->code_label;
4579 *l = 0;
4580 }
4581 #endif /* 0 */
4582 }
4583
4584 \f
4585 /* Terminate a case (Pascal) or switch (C) statement
4586 in which ORIG_INDEX is the expression to be tested.
4587 Generate the code to test it and jump to the right place. */
4588
4589 void
4590 expand_end_case (orig_index)
4591 tree orig_index;
4592 {
4593 tree minval, maxval, range, orig_minval;
4594 rtx default_label = 0;
4595 register struct case_node *n;
4596 unsigned int count;
4597 rtx index;
4598 rtx table_label;
4599 int ncases;
4600 rtx *labelvec;
4601 register int i;
4602 rtx before_case;
4603 register struct nesting *thiscase = case_stack;
4604 tree index_expr, index_type;
4605 int unsignedp;
4606
4607 table_label = gen_label_rtx ();
4608 index_expr = thiscase->data.case_stmt.index_expr;
4609 index_type = TREE_TYPE (index_expr);
4610 unsignedp = TREE_UNSIGNED (index_type);
4611
4612 do_pending_stack_adjust ();
4613
4614 /* This might get an spurious warning in the presence of a syntax error;
4615 it could be fixed by moving the call to check_seenlabel after the
4616 check for error_mark_node, and copying the code of check_seenlabel that
4617 deals with case_stack->data.case_stmt.line_number_status /
4618 restore_line_number_status in front of the call to end_cleanup_deferral;
4619 However, this might miss some useful warnings in the presence of
4620 non-syntax errors. */
4621 check_seenlabel ();
4622
4623 /* An ERROR_MARK occurs for various reasons including invalid data type. */
4624 if (index_type != error_mark_node)
4625 {
4626 /* If switch expression was an enumerated type, check that all
4627 enumeration literals are covered by the cases.
4628 No sense trying this if there's a default case, however. */
4629
4630 if (!thiscase->data.case_stmt.default_label
4631 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
4632 && TREE_CODE (index_expr) != INTEGER_CST)
4633 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
4634
4635 /* If we don't have a default-label, create one here,
4636 after the body of the switch. */
4637 if (thiscase->data.case_stmt.default_label == 0)
4638 {
4639 thiscase->data.case_stmt.default_label
4640 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4641 expand_label (thiscase->data.case_stmt.default_label);
4642 }
4643 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4644
4645 before_case = get_last_insn ();
4646
4647 if (thiscase->data.case_stmt.case_list
4648 && thiscase->data.case_stmt.case_list->left)
4649 thiscase->data.case_stmt.case_list
4650 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
4651
4652 /* Simplify the case-list before we count it. */
4653 group_case_nodes (thiscase->data.case_stmt.case_list);
4654
4655 /* Get upper and lower bounds of case values.
4656 Also convert all the case values to the index expr's data type. */
4657
4658 count = 0;
4659 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4660 {
4661 /* Check low and high label values are integers. */
4662 if (TREE_CODE (n->low) != INTEGER_CST)
4663 abort ();
4664 if (TREE_CODE (n->high) != INTEGER_CST)
4665 abort ();
4666
4667 n->low = convert (index_type, n->low);
4668 n->high = convert (index_type, n->high);
4669
4670 /* Count the elements and track the largest and smallest
4671 of them (treating them as signed even if they are not). */
4672 if (count++ == 0)
4673 {
4674 minval = n->low;
4675 maxval = n->high;
4676 }
4677 else
4678 {
4679 if (INT_CST_LT (n->low, minval))
4680 minval = n->low;
4681 if (INT_CST_LT (maxval, n->high))
4682 maxval = n->high;
4683 }
4684 /* A range counts double, since it requires two compares. */
4685 if (! tree_int_cst_equal (n->low, n->high))
4686 count++;
4687 }
4688
4689 orig_minval = minval;
4690
4691 /* Compute span of values. */
4692 if (count != 0)
4693 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
4694
4695 end_cleanup_deferral ();
4696
4697 if (count == 0)
4698 {
4699 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4700 emit_queue ();
4701 emit_jump (default_label);
4702 }
4703
4704 /* If range of values is much bigger than number of values,
4705 make a sequence of conditional branches instead of a dispatch.
4706 If the switch-index is a constant, do it this way
4707 because we can optimize it. */
4708
4709 #ifndef CASE_VALUES_THRESHOLD
4710 #ifdef HAVE_casesi
4711 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
4712 #else
4713 /* If machine does not have a case insn that compares the
4714 bounds, this means extra overhead for dispatch tables
4715 which raises the threshold for using them. */
4716 #define CASE_VALUES_THRESHOLD 5
4717 #endif /* HAVE_casesi */
4718 #endif /* CASE_VALUES_THRESHOLD */
4719
4720 else if (TREE_INT_CST_HIGH (range) != 0
4721 || count < CASE_VALUES_THRESHOLD
4722 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
4723 > 10 * count)
4724 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4725 || flag_pic
4726 #endif
4727 || TREE_CODE (index_expr) == INTEGER_CST
4728 /* These will reduce to a constant. */
4729 || (TREE_CODE (index_expr) == CALL_EXPR
4730 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
4731 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
4732 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
4733 || (TREE_CODE (index_expr) == COMPOUND_EXPR
4734 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
4735 {
4736 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4737
4738 /* If the index is a short or char that we do not have
4739 an insn to handle comparisons directly, convert it to
4740 a full integer now, rather than letting each comparison
4741 generate the conversion. */
4742
4743 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
4744 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
4745 == CODE_FOR_nothing))
4746 {
4747 enum machine_mode wider_mode;
4748 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4749 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
4750 if (cmp_optab->handlers[(int) wider_mode].insn_code
4751 != CODE_FOR_nothing)
4752 {
4753 index = convert_to_mode (wider_mode, index, unsignedp);
4754 break;
4755 }
4756 }
4757
4758 emit_queue ();
4759 do_pending_stack_adjust ();
4760
4761 index = protect_from_queue (index, 0);
4762 if (GET_CODE (index) == MEM)
4763 index = copy_to_reg (index);
4764 if (GET_CODE (index) == CONST_INT
4765 || TREE_CODE (index_expr) == INTEGER_CST)
4766 {
4767 /* Make a tree node with the proper constant value
4768 if we don't already have one. */
4769 if (TREE_CODE (index_expr) != INTEGER_CST)
4770 {
4771 index_expr
4772 = build_int_2 (INTVAL (index),
4773 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
4774 index_expr = convert (index_type, index_expr);
4775 }
4776
4777 /* For constant index expressions we need only
4778 issue a unconditional branch to the appropriate
4779 target code. The job of removing any unreachable
4780 code is left to the optimisation phase if the
4781 "-O" option is specified. */
4782 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4783 if (! tree_int_cst_lt (index_expr, n->low)
4784 && ! tree_int_cst_lt (n->high, index_expr))
4785 break;
4786
4787 if (n)
4788 emit_jump (label_rtx (n->code_label));
4789 else
4790 emit_jump (default_label);
4791 }
4792 else
4793 {
4794 /* If the index expression is not constant we generate
4795 a binary decision tree to select the appropriate
4796 target code. This is done as follows:
4797
4798 The list of cases is rearranged into a binary tree,
4799 nearly optimal assuming equal probability for each case.
4800
4801 The tree is transformed into RTL, eliminating
4802 redundant test conditions at the same time.
4803
4804 If program flow could reach the end of the
4805 decision tree an unconditional jump to the
4806 default code is emitted. */
4807
4808 use_cost_table
4809 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
4810 && estimate_case_costs (thiscase->data.case_stmt.case_list));
4811 balance_case_nodes (&thiscase->data.case_stmt.case_list,
4812 NULL_PTR);
4813 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
4814 default_label, index_type);
4815 emit_jump_if_reachable (default_label);
4816 }
4817 }
4818 else
4819 {
4820 int win = 0;
4821 #ifdef HAVE_casesi
4822 if (HAVE_casesi)
4823 {
4824 enum machine_mode index_mode = SImode;
4825 int index_bits = GET_MODE_BITSIZE (index_mode);
4826 rtx op1, op2;
4827 enum machine_mode op_mode;
4828
4829 /* Convert the index to SImode. */
4830 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
4831 > GET_MODE_BITSIZE (index_mode))
4832 {
4833 enum machine_mode omode = TYPE_MODE (index_type);
4834 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
4835
4836 /* We must handle the endpoints in the original mode. */
4837 index_expr = build (MINUS_EXPR, index_type,
4838 index_expr, minval);
4839 minval = integer_zero_node;
4840 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4841 emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
4842 emit_jump_insn (gen_bltu (default_label));
4843 /* Now we can safely truncate. */
4844 index = convert_to_mode (index_mode, index, 0);
4845 }
4846 else
4847 {
4848 if (TYPE_MODE (index_type) != index_mode)
4849 {
4850 index_expr = convert (type_for_size (index_bits, 0),
4851 index_expr);
4852 index_type = TREE_TYPE (index_expr);
4853 }
4854
4855 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4856 }
4857 emit_queue ();
4858 index = protect_from_queue (index, 0);
4859 do_pending_stack_adjust ();
4860
4861 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
4862 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
4863 (index, op_mode))
4864 index = copy_to_mode_reg (op_mode, index);
4865
4866 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
4867
4868 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
4869 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
4870 (op1, op_mode))
4871 op1 = copy_to_mode_reg (op_mode, op1);
4872
4873 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
4874
4875 op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
4876 if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
4877 (op2, op_mode))
4878 op2 = copy_to_mode_reg (op_mode, op2);
4879
4880 emit_jump_insn (gen_casesi (index, op1, op2,
4881 table_label, default_label));
4882 win = 1;
4883 }
4884 #endif
4885 #ifdef HAVE_tablejump
4886 if (! win && HAVE_tablejump)
4887 {
4888 index_expr = convert (thiscase->data.case_stmt.nominal_type,
4889 fold (build (MINUS_EXPR, index_type,
4890 index_expr, minval)));
4891 index_type = TREE_TYPE (index_expr);
4892 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4893 emit_queue ();
4894 index = protect_from_queue (index, 0);
4895 do_pending_stack_adjust ();
4896
4897 do_tablejump (index, TYPE_MODE (index_type),
4898 expand_expr (range, NULL_RTX, VOIDmode, 0),
4899 table_label, default_label);
4900 win = 1;
4901 }
4902 #endif
4903 if (! win)
4904 abort ();
4905
4906 /* Get table of labels to jump to, in order of case index. */
4907
4908 ncases = TREE_INT_CST_LOW (range) + 1;
4909 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
4910 bzero ((char *) labelvec, ncases * sizeof (rtx));
4911
4912 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4913 {
4914 register HOST_WIDE_INT i
4915 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
4916
4917 while (1)
4918 {
4919 labelvec[i]
4920 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
4921 if (i + TREE_INT_CST_LOW (orig_minval)
4922 == TREE_INT_CST_LOW (n->high))
4923 break;
4924 i++;
4925 }
4926 }
4927
4928 /* Fill in the gaps with the default. */
4929 for (i = 0; i < ncases; i++)
4930 if (labelvec[i] == 0)
4931 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
4932
4933 /* Output the table */
4934 emit_label (table_label);
4935
4936 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
4937 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4938 gen_rtx_LABEL_REF (Pmode, table_label),
4939 gen_rtvec_v (ncases, labelvec),
4940 const0_rtx, const0_rtx, 0));
4941 else
4942 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4943 gen_rtvec_v (ncases, labelvec)));
4944
4945 /* If the case insn drops through the table,
4946 after the table we must jump to the default-label.
4947 Otherwise record no drop-through after the table. */
4948 #ifdef CASE_DROPS_THROUGH
4949 emit_jump (default_label);
4950 #else
4951 emit_barrier ();
4952 #endif
4953 }
4954
4955 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
4956 reorder_insns (before_case, get_last_insn (),
4957 thiscase->data.case_stmt.start);
4958 }
4959 else
4960 end_cleanup_deferral ();
4961
4962 if (thiscase->exit_label)
4963 emit_label (thiscase->exit_label);
4964
4965 POPSTACK (case_stack);
4966
4967 free_temp_slots ();
4968 }
4969
4970 /* Convert the tree NODE into a list linked by the right field, with the left
4971 field zeroed. RIGHT is used for recursion; it is a list to be placed
4972 rightmost in the resulting list. */
4973
4974 static struct case_node *
4975 case_tree2list (node, right)
4976 struct case_node *node, *right;
4977 {
4978 struct case_node *left;
4979
4980 if (node->right)
4981 right = case_tree2list (node->right, right);
4982
4983 node->right = right;
4984 if ((left = node->left))
4985 {
4986 node->left = 0;
4987 return case_tree2list (left, node);
4988 }
4989
4990 return node;
4991 }
4992
4993 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4994
4995 static void
4996 do_jump_if_equal (op1, op2, label, unsignedp)
4997 rtx op1, op2, label;
4998 int unsignedp;
4999 {
5000 if (GET_CODE (op1) == CONST_INT
5001 && GET_CODE (op2) == CONST_INT)
5002 {
5003 if (INTVAL (op1) == INTVAL (op2))
5004 emit_jump (label);
5005 }
5006 else
5007 {
5008 enum machine_mode mode = GET_MODE (op1);
5009 if (mode == VOIDmode)
5010 mode = GET_MODE (op2);
5011 emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
5012 emit_jump_insn (gen_beq (label));
5013 }
5014 }
5015 \f
5016 /* Not all case values are encountered equally. This function
5017 uses a heuristic to weight case labels, in cases where that
5018 looks like a reasonable thing to do.
5019
5020 Right now, all we try to guess is text, and we establish the
5021 following weights:
5022
5023 chars above space: 16
5024 digits: 16
5025 default: 12
5026 space, punct: 8
5027 tab: 4
5028 newline: 2
5029 other "\" chars: 1
5030 remaining chars: 0
5031
5032 If we find any cases in the switch that are not either -1 or in the range
5033 of valid ASCII characters, or are control characters other than those
5034 commonly used with "\", don't treat this switch scanning text.
5035
5036 Return 1 if these nodes are suitable for cost estimation, otherwise
5037 return 0. */
5038
5039 static int
5040 estimate_case_costs (node)
5041 case_node_ptr node;
5042 {
5043 tree min_ascii = build_int_2 (-1, -1);
5044 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5045 case_node_ptr n;
5046 int i;
5047
5048 /* If we haven't already made the cost table, make it now. Note that the
5049 lower bound of the table is -1, not zero. */
5050
5051 if (cost_table == NULL)
5052 {
5053 cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5054 bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5055
5056 for (i = 0; i < 128; i++)
5057 {
5058 if (isalnum (i))
5059 cost_table[i] = 16;
5060 else if (ispunct (i))
5061 cost_table[i] = 8;
5062 else if (iscntrl (i))
5063 cost_table[i] = -1;
5064 }
5065
5066 cost_table[' '] = 8;
5067 cost_table['\t'] = 4;
5068 cost_table['\0'] = 4;
5069 cost_table['\n'] = 2;
5070 cost_table['\f'] = 1;
5071 cost_table['\v'] = 1;
5072 cost_table['\b'] = 1;
5073 }
5074
5075 /* See if all the case expressions look like text. It is text if the
5076 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5077 as signed arithmetic since we don't want to ever access cost_table with a
5078 value less than -1. Also check that none of the constants in a range
5079 are strange control characters. */
5080
5081 for (n = node; n; n = n->right)
5082 {
5083 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5084 return 0;
5085
5086 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5087 if (cost_table[i] < 0)
5088 return 0;
5089 }
5090
5091 /* All interesting values are within the range of interesting
5092 ASCII characters. */
5093 return 1;
5094 }
5095
5096 /* Scan an ordered list of case nodes
5097 combining those with consecutive values or ranges.
5098
5099 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5100
5101 static void
5102 group_case_nodes (head)
5103 case_node_ptr head;
5104 {
5105 case_node_ptr node = head;
5106
5107 while (node)
5108 {
5109 rtx lb = next_real_insn (label_rtx (node->code_label));
5110 rtx lb2;
5111 case_node_ptr np = node;
5112
5113 /* Try to group the successors of NODE with NODE. */
5114 while (((np = np->right) != 0)
5115 /* Do they jump to the same place? */
5116 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5117 || (lb != 0 && lb2 != 0
5118 && simplejump_p (lb)
5119 && simplejump_p (lb2)
5120 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5121 SET_SRC (PATTERN (lb2)))))
5122 /* Are their ranges consecutive? */
5123 && tree_int_cst_equal (np->low,
5124 fold (build (PLUS_EXPR,
5125 TREE_TYPE (node->high),
5126 node->high,
5127 integer_one_node)))
5128 /* An overflow is not consecutive. */
5129 && tree_int_cst_lt (node->high,
5130 fold (build (PLUS_EXPR,
5131 TREE_TYPE (node->high),
5132 node->high,
5133 integer_one_node))))
5134 {
5135 node->high = np->high;
5136 }
5137 /* NP is the first node after NODE which can't be grouped with it.
5138 Delete the nodes in between, and move on to that node. */
5139 node->right = np;
5140 node = np;
5141 }
5142 }
5143
5144 /* Take an ordered list of case nodes
5145 and transform them into a near optimal binary tree,
5146 on the assumption that any target code selection value is as
5147 likely as any other.
5148
5149 The transformation is performed by splitting the ordered
5150 list into two equal sections plus a pivot. The parts are
5151 then attached to the pivot as left and right branches. Each
5152 branch is is then transformed recursively. */
5153
5154 static void
5155 balance_case_nodes (head, parent)
5156 case_node_ptr *head;
5157 case_node_ptr parent;
5158 {
5159 register case_node_ptr np;
5160
5161 np = *head;
5162 if (np)
5163 {
5164 int cost = 0;
5165 int i = 0;
5166 int ranges = 0;
5167 register case_node_ptr *npp;
5168 case_node_ptr left;
5169
5170 /* Count the number of entries on branch. Also count the ranges. */
5171
5172 while (np)
5173 {
5174 if (!tree_int_cst_equal (np->low, np->high))
5175 {
5176 ranges++;
5177 if (use_cost_table)
5178 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5179 }
5180
5181 if (use_cost_table)
5182 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5183
5184 i++;
5185 np = np->right;
5186 }
5187
5188 if (i > 2)
5189 {
5190 /* Split this list if it is long enough for that to help. */
5191 npp = head;
5192 left = *npp;
5193 if (use_cost_table)
5194 {
5195 /* Find the place in the list that bisects the list's total cost,
5196 Here I gets half the total cost. */
5197 int n_moved = 0;
5198 i = (cost + 1) / 2;
5199 while (1)
5200 {
5201 /* Skip nodes while their cost does not reach that amount. */
5202 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5203 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5204 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5205 if (i <= 0)
5206 break;
5207 npp = &(*npp)->right;
5208 n_moved += 1;
5209 }
5210 if (n_moved == 0)
5211 {
5212 /* Leave this branch lopsided, but optimize left-hand
5213 side and fill in `parent' fields for right-hand side. */
5214 np = *head;
5215 np->parent = parent;
5216 balance_case_nodes (&np->left, np);
5217 for (; np->right; np = np->right)
5218 np->right->parent = np;
5219 return;
5220 }
5221 }
5222 /* If there are just three nodes, split at the middle one. */
5223 else if (i == 3)
5224 npp = &(*npp)->right;
5225 else
5226 {
5227 /* Find the place in the list that bisects the list's total cost,
5228 where ranges count as 2.
5229 Here I gets half the total cost. */
5230 i = (i + ranges + 1) / 2;
5231 while (1)
5232 {
5233 /* Skip nodes while their cost does not reach that amount. */
5234 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5235 i--;
5236 i--;
5237 if (i <= 0)
5238 break;
5239 npp = &(*npp)->right;
5240 }
5241 }
5242 *head = np = *npp;
5243 *npp = 0;
5244 np->parent = parent;
5245 np->left = left;
5246
5247 /* Optimize each of the two split parts. */
5248 balance_case_nodes (&np->left, np);
5249 balance_case_nodes (&np->right, np);
5250 }
5251 else
5252 {
5253 /* Else leave this branch as one level,
5254 but fill in `parent' fields. */
5255 np = *head;
5256 np->parent = parent;
5257 for (; np->right; np = np->right)
5258 np->right->parent = np;
5259 }
5260 }
5261 }
5262 \f
5263 /* Search the parent sections of the case node tree
5264 to see if a test for the lower bound of NODE would be redundant.
5265 INDEX_TYPE is the type of the index expression.
5266
5267 The instructions to generate the case decision tree are
5268 output in the same order as nodes are processed so it is
5269 known that if a parent node checks the range of the current
5270 node minus one that the current node is bounded at its lower
5271 span. Thus the test would be redundant. */
5272
5273 static int
5274 node_has_low_bound (node, index_type)
5275 case_node_ptr node;
5276 tree index_type;
5277 {
5278 tree low_minus_one;
5279 case_node_ptr pnode;
5280
5281 /* If the lower bound of this node is the lowest value in the index type,
5282 we need not test it. */
5283
5284 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5285 return 1;
5286
5287 /* If this node has a left branch, the value at the left must be less
5288 than that at this node, so it cannot be bounded at the bottom and
5289 we need not bother testing any further. */
5290
5291 if (node->left)
5292 return 0;
5293
5294 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5295 node->low, integer_one_node));
5296
5297 /* If the subtraction above overflowed, we can't verify anything.
5298 Otherwise, look for a parent that tests our value - 1. */
5299
5300 if (! tree_int_cst_lt (low_minus_one, node->low))
5301 return 0;
5302
5303 for (pnode = node->parent; pnode; pnode = pnode->parent)
5304 if (tree_int_cst_equal (low_minus_one, pnode->high))
5305 return 1;
5306
5307 return 0;
5308 }
5309
5310 /* Search the parent sections of the case node tree
5311 to see if a test for the upper bound of NODE would be redundant.
5312 INDEX_TYPE is the type of the index expression.
5313
5314 The instructions to generate the case decision tree are
5315 output in the same order as nodes are processed so it is
5316 known that if a parent node checks the range of the current
5317 node plus one that the current node is bounded at its upper
5318 span. Thus the test would be redundant. */
5319
5320 static int
5321 node_has_high_bound (node, index_type)
5322 case_node_ptr node;
5323 tree index_type;
5324 {
5325 tree high_plus_one;
5326 case_node_ptr pnode;
5327
5328 /* If there is no upper bound, obviously no test is needed. */
5329
5330 if (TYPE_MAX_VALUE (index_type) == NULL)
5331 return 1;
5332
5333 /* If the upper bound of this node is the highest value in the type
5334 of the index expression, we need not test against it. */
5335
5336 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5337 return 1;
5338
5339 /* If this node has a right branch, the value at the right must be greater
5340 than that at this node, so it cannot be bounded at the top and
5341 we need not bother testing any further. */
5342
5343 if (node->right)
5344 return 0;
5345
5346 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5347 node->high, integer_one_node));
5348
5349 /* If the addition above overflowed, we can't verify anything.
5350 Otherwise, look for a parent that tests our value + 1. */
5351
5352 if (! tree_int_cst_lt (node->high, high_plus_one))
5353 return 0;
5354
5355 for (pnode = node->parent; pnode; pnode = pnode->parent)
5356 if (tree_int_cst_equal (high_plus_one, pnode->low))
5357 return 1;
5358
5359 return 0;
5360 }
5361
5362 /* Search the parent sections of the
5363 case node tree to see if both tests for the upper and lower
5364 bounds of NODE would be redundant. */
5365
5366 static int
5367 node_is_bounded (node, index_type)
5368 case_node_ptr node;
5369 tree index_type;
5370 {
5371 return (node_has_low_bound (node, index_type)
5372 && node_has_high_bound (node, index_type));
5373 }
5374
5375 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5376
5377 static void
5378 emit_jump_if_reachable (label)
5379 rtx label;
5380 {
5381 if (GET_CODE (get_last_insn ()) != BARRIER)
5382 emit_jump (label);
5383 }
5384 \f
5385 /* Emit step-by-step code to select a case for the value of INDEX.
5386 The thus generated decision tree follows the form of the
5387 case-node binary tree NODE, whose nodes represent test conditions.
5388 INDEX_TYPE is the type of the index of the switch.
5389
5390 Care is taken to prune redundant tests from the decision tree
5391 by detecting any boundary conditions already checked by
5392 emitted rtx. (See node_has_high_bound, node_has_low_bound
5393 and node_is_bounded, above.)
5394
5395 Where the test conditions can be shown to be redundant we emit
5396 an unconditional jump to the target code. As a further
5397 optimization, the subordinates of a tree node are examined to
5398 check for bounded nodes. In this case conditional and/or
5399 unconditional jumps as a result of the boundary check for the
5400 current node are arranged to target the subordinates associated
5401 code for out of bound conditions on the current node node.
5402
5403 We can assume that when control reaches the code generated here,
5404 the index value has already been compared with the parents
5405 of this node, and determined to be on the same side of each parent
5406 as this node is. Thus, if this node tests for the value 51,
5407 and a parent tested for 52, we don't need to consider
5408 the possibility of a value greater than 51. If another parent
5409 tests for the value 50, then this node need not test anything. */
5410
5411 static void
5412 emit_case_nodes (index, node, default_label, index_type)
5413 rtx index;
5414 case_node_ptr node;
5415 rtx default_label;
5416 tree index_type;
5417 {
5418 /* If INDEX has an unsigned type, we must make unsigned branches. */
5419 int unsignedp = TREE_UNSIGNED (index_type);
5420 typedef rtx rtx_function ();
5421 rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
5422 rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
5423 rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
5424 rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
5425 enum machine_mode mode = GET_MODE (index);
5426
5427 /* See if our parents have already tested everything for us.
5428 If they have, emit an unconditional jump for this node. */
5429 if (node_is_bounded (node, index_type))
5430 emit_jump (label_rtx (node->code_label));
5431
5432 else if (tree_int_cst_equal (node->low, node->high))
5433 {
5434 /* Node is single valued. First see if the index expression matches
5435 this node and then check our children, if any. */
5436
5437 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5438 label_rtx (node->code_label), unsignedp);
5439
5440 if (node->right != 0 && node->left != 0)
5441 {
5442 /* This node has children on both sides.
5443 Dispatch to one side or the other
5444 by comparing the index value with this node's value.
5445 If one subtree is bounded, check that one first,
5446 so we can avoid real branches in the tree. */
5447
5448 if (node_is_bounded (node->right, index_type))
5449 {
5450 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5451 VOIDmode, 0),
5452 GT, NULL_RTX, mode, unsignedp, 0);
5453
5454 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5455 emit_case_nodes (index, node->left, default_label, index_type);
5456 }
5457
5458 else if (node_is_bounded (node->left, index_type))
5459 {
5460 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5461 VOIDmode, 0),
5462 LT, NULL_RTX, mode, unsignedp, 0);
5463 emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
5464 emit_case_nodes (index, node->right, default_label, index_type);
5465 }
5466
5467 else
5468 {
5469 /* Neither node is bounded. First distinguish the two sides;
5470 then emit the code for one side at a time. */
5471
5472 tree test_label
5473 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5474
5475 /* See if the value is on the right. */
5476 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5477 VOIDmode, 0),
5478 GT, NULL_RTX, mode, unsignedp, 0);
5479 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5480
5481 /* Value must be on the left.
5482 Handle the left-hand subtree. */
5483 emit_case_nodes (index, node->left, default_label, index_type);
5484 /* If left-hand subtree does nothing,
5485 go to default. */
5486 emit_jump_if_reachable (default_label);
5487
5488 /* Code branches here for the right-hand subtree. */
5489 expand_label (test_label);
5490 emit_case_nodes (index, node->right, default_label, index_type);
5491 }
5492 }
5493
5494 else if (node->right != 0 && node->left == 0)
5495 {
5496 /* Here we have a right child but no left so we issue conditional
5497 branch to default and process the right child.
5498
5499 Omit the conditional branch to default if we it avoid only one
5500 right child; it costs too much space to save so little time. */
5501
5502 if (node->right->right || node->right->left
5503 || !tree_int_cst_equal (node->right->low, node->right->high))
5504 {
5505 if (!node_has_low_bound (node, index_type))
5506 {
5507 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5508 VOIDmode, 0),
5509 LT, NULL_RTX, mode, unsignedp, 0);
5510 emit_jump_insn ((*gen_blt_pat) (default_label));
5511 }
5512
5513 emit_case_nodes (index, node->right, default_label, index_type);
5514 }
5515 else
5516 /* We cannot process node->right normally
5517 since we haven't ruled out the numbers less than
5518 this node's value. So handle node->right explicitly. */
5519 do_jump_if_equal (index,
5520 expand_expr (node->right->low, NULL_RTX,
5521 VOIDmode, 0),
5522 label_rtx (node->right->code_label), unsignedp);
5523 }
5524
5525 else if (node->right == 0 && node->left != 0)
5526 {
5527 /* Just one subtree, on the left. */
5528
5529 #if 0 /* The following code and comment were formerly part
5530 of the condition here, but they didn't work
5531 and I don't understand what the idea was. -- rms. */
5532 /* If our "most probable entry" is less probable
5533 than the default label, emit a jump to
5534 the default label using condition codes
5535 already lying around. With no right branch,
5536 a branch-greater-than will get us to the default
5537 label correctly. */
5538 if (use_cost_table
5539 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5540 ;
5541 #endif /* 0 */
5542 if (node->left->left || node->left->right
5543 || !tree_int_cst_equal (node->left->low, node->left->high))
5544 {
5545 if (!node_has_high_bound (node, index_type))
5546 {
5547 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5548 VOIDmode, 0),
5549 GT, NULL_RTX, mode, unsignedp, 0);
5550 emit_jump_insn ((*gen_bgt_pat) (default_label));
5551 }
5552
5553 emit_case_nodes (index, node->left, default_label, index_type);
5554 }
5555 else
5556 /* We cannot process node->left normally
5557 since we haven't ruled out the numbers less than
5558 this node's value. So handle node->left explicitly. */
5559 do_jump_if_equal (index,
5560 expand_expr (node->left->low, NULL_RTX,
5561 VOIDmode, 0),
5562 label_rtx (node->left->code_label), unsignedp);
5563 }
5564 }
5565 else
5566 {
5567 /* Node is a range. These cases are very similar to those for a single
5568 value, except that we do not start by testing whether this node
5569 is the one to branch to. */
5570
5571 if (node->right != 0 && node->left != 0)
5572 {
5573 /* Node has subtrees on both sides.
5574 If the right-hand subtree is bounded,
5575 test for it first, since we can go straight there.
5576 Otherwise, we need to make a branch in the control structure,
5577 then handle the two subtrees. */
5578 tree test_label = 0;
5579
5580 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5581 VOIDmode, 0),
5582 GT, NULL_RTX, mode, unsignedp, 0);
5583
5584 if (node_is_bounded (node->right, index_type))
5585 /* Right hand node is fully bounded so we can eliminate any
5586 testing and branch directly to the target code. */
5587 emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
5588 else
5589 {
5590 /* Right hand node requires testing.
5591 Branch to a label where we will handle it later. */
5592
5593 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5594 emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
5595 }
5596
5597 /* Value belongs to this node or to the left-hand subtree. */
5598
5599 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5600 GE, NULL_RTX, mode, unsignedp, 0);
5601 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5602
5603 /* Handle the left-hand subtree. */
5604 emit_case_nodes (index, node->left, default_label, index_type);
5605
5606 /* If right node had to be handled later, do that now. */
5607
5608 if (test_label)
5609 {
5610 /* If the left-hand subtree fell through,
5611 don't let it fall into the right-hand subtree. */
5612 emit_jump_if_reachable (default_label);
5613
5614 expand_label (test_label);
5615 emit_case_nodes (index, node->right, default_label, index_type);
5616 }
5617 }
5618
5619 else if (node->right != 0 && node->left == 0)
5620 {
5621 /* Deal with values to the left of this node,
5622 if they are possible. */
5623 if (!node_has_low_bound (node, index_type))
5624 {
5625 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5626 VOIDmode, 0),
5627 LT, NULL_RTX, mode, unsignedp, 0);
5628 emit_jump_insn ((*gen_blt_pat) (default_label));
5629 }
5630
5631 /* Value belongs to this node or to the right-hand subtree. */
5632
5633 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5634 VOIDmode, 0),
5635 LE, NULL_RTX, mode, unsignedp, 0);
5636 emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
5637
5638 emit_case_nodes (index, node->right, default_label, index_type);
5639 }
5640
5641 else if (node->right == 0 && node->left != 0)
5642 {
5643 /* Deal with values to the right of this node,
5644 if they are possible. */
5645 if (!node_has_high_bound (node, index_type))
5646 {
5647 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5648 VOIDmode, 0),
5649 GT, NULL_RTX, mode, unsignedp, 0);
5650 emit_jump_insn ((*gen_bgt_pat) (default_label));
5651 }
5652
5653 /* Value belongs to this node or to the left-hand subtree. */
5654
5655 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5656 GE, NULL_RTX, mode, unsignedp, 0);
5657 emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
5658
5659 emit_case_nodes (index, node->left, default_label, index_type);
5660 }
5661
5662 else
5663 {
5664 /* Node has no children so we check low and high bounds to remove
5665 redundant tests. Only one of the bounds can exist,
5666 since otherwise this node is bounded--a case tested already. */
5667
5668 if (!node_has_high_bound (node, index_type))
5669 {
5670 emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
5671 VOIDmode, 0),
5672 GT, NULL_RTX, mode, unsignedp, 0);
5673 emit_jump_insn ((*gen_bgt_pat) (default_label));
5674 }
5675
5676 if (!node_has_low_bound (node, index_type))
5677 {
5678 emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
5679 VOIDmode, 0),
5680 LT, NULL_RTX, mode, unsignedp, 0);
5681 emit_jump_insn ((*gen_blt_pat) (default_label));
5682 }
5683
5684 emit_jump (label_rtx (node->code_label));
5685 }
5686 }
5687 }
5688 \f
5689 /* These routines are used by the loop unrolling code. They copy BLOCK trees
5690 so that the debugging info will be correct for the unrolled loop. */
5691
5692 /* Indexed by block number, contains a pointer to the N'th block node.
5693
5694 Allocated by the call to identify_blocks, then released after the call
5695 to reorder_blocks in the function unroll_block_trees. */
5696
5697 static tree *block_vector;
5698
5699 void
5700 find_loop_tree_blocks ()
5701 {
5702 tree block = DECL_INITIAL (current_function_decl);
5703
5704 block_vector = identify_blocks (block, get_insns ());
5705 }
5706
5707 void
5708 unroll_block_trees ()
5709 {
5710 tree block = DECL_INITIAL (current_function_decl);
5711
5712 reorder_blocks (block_vector, block, get_insns ());
5713
5714 /* Release any memory allocated by identify_blocks. */
5715 if (block_vector)
5716 free (block_vector);
5717 }