1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This file handles the generation of rtl code from tree structure
22 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
23 It also creates the rtl expressions for parameters and auto variables
24 and has full responsibility for allocating stack slots.
26 The functions whose names start with `expand_' are called by the
27 parser to generate RTL instructions for various kinds of constructs.
29 Some control and binding constructs require calling several such
30 functions at different times. For example, a simple if-then
31 is expanded by calling `expand_start_cond' (with the condition-expression
32 as argument) before parsing the then-clause and calling `expand_end_cond'
33 after parsing the then-clause. */
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
48 #include "hard-reg-set.h"
53 #define obstack_chunk_alloc xmalloc
54 #define obstack_chunk_free free
55 struct obstack stmt_obstack
;
57 /* Filename and line number of last line-number note,
58 whether we actually emitted it or not. */
62 /* Nonzero if within a ({...}) grouping, in which case we must
63 always compute a value for each expr-stmt in case it is the last one. */
65 int expr_stmts_for_value
;
67 /* Each time we expand an expression-statement,
68 record the expr's type and its RTL value here. */
70 static tree last_expr_type
;
71 static rtx last_expr_value
;
73 /* Number of binding contours started so far in this function. */
75 int block_start_count
;
77 /* Nonzero if function being compiled needs to
78 return the address of where it has put a structure value. */
80 extern int current_function_returns_pcc_struct
;
82 /* Label that will go on parm cleanup code, if any.
83 Jumping to this label runs cleanup code for parameters, if
84 such code must be run. Following this code is the logical return label. */
86 extern rtx cleanup_label
;
88 /* Label that will go on function epilogue.
89 Jumping to this label serves as a "return" instruction
90 on machines which require execution of the epilogue on all returns. */
92 extern rtx return_label
;
94 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
95 So we can mark them all live at the end of the function, if nonopt. */
96 extern rtx save_expr_regs
;
98 /* Offset to end of allocated area of stack frame.
99 If stack grows down, this is the address of the last stack slot allocated.
100 If stack grows up, this is the address for the next slot. */
101 extern int frame_offset
;
103 /* Label to jump back to for tail recursion, or 0 if we have
104 not yet needed one for this function. */
105 extern rtx tail_recursion_label
;
107 /* Place after which to insert the tail_recursion_label if we need one. */
108 extern rtx tail_recursion_reentry
;
110 /* Location at which to save the argument pointer if it will need to be
111 referenced. There are two cases where this is done: if nonlocal gotos
112 exist, or if vars whose is an offset from the argument pointer will be
113 needed by inner routines. */
115 extern rtx arg_pointer_save_area
;
117 /* Chain of all RTL_EXPRs that have insns in them. */
118 extern tree rtl_expr_chain
;
120 #if 0 /* Turned off because 0 seems to work just as well. */
121 /* Cleanup lists are required for binding levels regardless of whether
122 that binding level has cleanups or not. This node serves as the
123 cleanup list whenever an empty list is required. */
124 static tree empty_cleanup_list
;
127 /* Functions and data structures for expanding case statements. */
129 /* Case label structure, used to hold info on labels within case
130 statements. We handle "range" labels; for a single-value label
131 as in C, the high and low limits are the same.
133 A chain of case nodes is initially maintained via the RIGHT fields
134 in the nodes. Nodes with higher case values are later in the list.
136 Switch statements can be output in one of two forms. A branch table
137 is used if there are more than a few labels and the labels are dense
138 within the range between the smallest and largest case value. If a
139 branch table is used, no further manipulations are done with the case
142 The alternative to the use of a branch table is to generate a series
143 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
144 and PARENT fields to hold a binary tree. Initially the tree is
145 totally unbalanced, with everything on the right. We balance the tree
146 with nodes on the left having lower case values than the parent
147 and nodes on the right having higher values. We then output the tree
152 struct case_node
*left
; /* Left son in binary tree */
153 struct case_node
*right
; /* Right son in binary tree; also node chain */
154 struct case_node
*parent
; /* Parent of node in binary tree */
155 tree low
; /* Lowest index value for this label */
156 tree high
; /* Highest index value for this label */
157 tree code_label
; /* Label to jump to when node matches */
160 typedef struct case_node case_node
;
161 typedef struct case_node
*case_node_ptr
;
163 /* These are used by estimate_case_costs and balance_case_nodes. */
165 /* This must be a signed type, and non-ANSI compilers lack signed char. */
166 static short *cost_table
;
167 static int use_cost_table
;
169 static int estimate_case_costs ();
170 static void balance_case_nodes ();
171 static void emit_case_nodes ();
172 static void group_case_nodes ();
173 static void emit_jump_if_reachable ();
175 static int warn_if_unused_value ();
176 static void expand_goto_internal ();
177 static int expand_fixup ();
179 void free_temp_slots ();
180 static void expand_cleanups ();
181 static void fixup_cleanups ();
182 static void expand_null_return_1 ();
183 static int tail_recursion_args ();
184 static void do_jump_if_equal ();
186 /* Stack of control and binding constructs we are currently inside.
188 These constructs begin when you call `expand_start_WHATEVER'
189 and end when you call `expand_end_WHATEVER'. This stack records
190 info about how the construct began that tells the end-function
191 what to do. It also may provide information about the construct
192 to alter the behavior of other constructs within the body.
193 For example, they may affect the behavior of C `break' and `continue'.
195 Each construct gets one `struct nesting' object.
196 All of these objects are chained through the `all' field.
197 `nesting_stack' points to the first object (innermost construct).
198 The position of an entry on `nesting_stack' is in its `depth' field.
200 Each type of construct has its own individual stack.
201 For example, loops have `loop_stack'. Each object points to the
202 next object of the same type through the `next' field.
204 Some constructs are visible to `break' exit-statements and others
205 are not. Which constructs are visible depends on the language.
206 Therefore, the data structure allows each construct to be visible
207 or not, according to the args given when the construct is started.
208 The construct is visible if the `exit_label' field is non-null.
209 In that case, the value should be a CODE_LABEL rtx. */
214 struct nesting
*next
;
219 /* For conds (if-then and if-then-else statements). */
222 /* Label for the end of the if construct.
223 There is none if EXITFLAG was not set
224 and no `else' has been seen yet. */
226 /* Label for the end of this alternative.
227 This may be the end of the if or the next else/elseif. */
233 /* Label at the top of the loop; place to loop back to. */
235 /* Label at the end of the whole construct. */
237 /* Label for `continue' statement to jump to;
238 this is in front of the stepper of the loop. */
241 /* For variable binding contours. */
244 /* Sequence number of this binding contour within the function,
245 in order of entry. */
246 int block_start_count
;
247 /* Nonzero => value to restore stack to on exit. */
249 /* The NOTE that starts this contour.
250 Used by expand_goto to check whether the destination
251 is within each contour or not. */
253 /* Innermost containing binding contour that has a stack level. */
254 struct nesting
*innermost_stack_block
;
255 /* List of cleanups to be run on exit from this contour.
256 This is a list of expressions to be evaluated.
257 The TREE_PURPOSE of each link is the ..._DECL node
258 which the cleanup pertains to. */
260 /* List of cleanup-lists of blocks containing this block,
261 as they were at the locus where this block appears.
262 There is an element for each containing block,
263 ordered innermost containing block first.
264 The tail of this list can be 0 (was empty_cleanup_list),
265 if all remaining elements would be empty lists.
266 The element's TREE_VALUE is the cleanup-list of that block,
267 which may be null. */
269 /* Chain of labels defined inside this binding contour.
270 For contours that have stack levels or cleanups. */
271 struct label_chain
*label_chain
;
272 /* Number of function calls seen, as of start of this block. */
273 int function_call_count
;
275 /* For switch (C) or case (Pascal) statements,
276 and also for dummies (see `expand_start_case_dummy'). */
279 /* The insn after which the case dispatch should finally
280 be emitted. Zero for a dummy. */
282 /* A list of case labels, kept in ascending order by value
283 as the list is built.
284 During expand_end_case, this list may be rearranged into a
285 nearly balanced binary tree. */
286 struct case_node
*case_list
;
287 /* Label to jump to if no case matches. */
289 /* The expression to be dispatched on. */
291 /* Type that INDEX_EXPR should be converted to. */
293 /* Number of range exprs in case statement. */
295 /* Name of this kind of statement, for warnings. */
297 /* Nonzero if a case label has been seen in this case stmt. */
300 /* For exception contours. */
303 /* List of exceptions raised. This is a TREE_LIST
304 of whatever you want. */
306 /* List of exceptions caught. This is also a TREE_LIST
307 of whatever you want. As a special case, it has the
308 value `void_type_node' if it handles default exceptions. */
311 /* First insn of TRY block, in case resumptive model is needed. */
313 /* Label for the catch clauses. */
315 /* Label for unhandled exceptions. */
317 /* Label at the end of whole construct. */
319 /* Label which "escapes" the exception construct.
320 Like EXIT_LABEL for BREAK construct, but for exceptions. */
326 /* Chain of all pending binding contours. */
327 struct nesting
*block_stack
;
329 /* Chain of all pending binding contours that restore stack levels
331 struct nesting
*stack_block_stack
;
333 /* Chain of all pending conditional statements. */
334 struct nesting
*cond_stack
;
336 /* Chain of all pending loops. */
337 struct nesting
*loop_stack
;
339 /* Chain of all pending case or switch statements. */
340 struct nesting
*case_stack
;
342 /* Chain of all pending exception contours. */
343 struct nesting
*except_stack
;
345 /* Separate chain including all of the above,
346 chained through the `all' field. */
347 struct nesting
*nesting_stack
;
349 /* Number of entries on nesting_stack now. */
352 /* Allocate and return a new `struct nesting'. */
354 #define ALLOC_NESTING() \
355 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
357 /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack';
358 and pop off `nesting_stack' down to the same level. */
360 #define POPSTACK(STACK) \
361 do { int initial_depth = nesting_stack->depth; \
362 do { struct nesting *this = STACK; \
363 STACK = this->next; \
364 nesting_stack = this->all; \
365 nesting_depth = this->depth; \
366 obstack_free (&stmt_obstack, this); } \
367 while (nesting_depth > initial_depth); } while (0)
369 /* In some cases it is impossible to generate code for a forward goto
370 until the label definition is seen. This happens when it may be necessary
371 for the goto to reset the stack pointer: we don't yet know how to do that.
372 So expand_goto puts an entry on this fixup list.
373 Each time a binding contour that resets the stack is exited,
375 If the target label has now been defined, we can insert the proper code. */
379 /* Points to following fixup. */
380 struct goto_fixup
*next
;
381 /* Points to the insn before the jump insn.
382 If more code must be inserted, it goes after this insn. */
384 /* The LABEL_DECL that this jump is jumping to, or 0
385 for break, continue or return. */
387 /* The CODE_LABEL rtx that this is jumping to. */
389 /* Number of binding contours started in current function
390 before the label reference. */
391 int block_start_count
;
392 /* The outermost stack level that should be restored for this jump.
393 Each time a binding contour that resets the stack is exited,
394 if the target label is *not* yet defined, this slot is updated. */
396 /* List of lists of cleanup expressions to be run by this goto.
397 There is one element for each block that this goto is within.
398 The tail of this list can be 0 (was empty_cleanup_list),
399 if all remaining elements would be empty.
400 The TREE_VALUE contains the cleanup list of that block as of the
401 time this goto was seen.
402 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
403 tree cleanup_list_list
;
406 static struct goto_fixup
*goto_fixup_chain
;
408 /* Within any binding contour that must restore a stack level,
409 all labels are recorded with a chain of these structures. */
413 /* Points to following fixup. */
414 struct label_chain
*next
;
421 gcc_obstack_init (&stmt_obstack
);
423 empty_cleanup_list
= build_tree_list (NULL_TREE
, NULL_TREE
);
428 init_stmt_for_function ()
430 /* We are not currently within any block, conditional, loop or case. */
438 block_start_count
= 0;
440 /* No gotos have been expanded yet. */
441 goto_fixup_chain
= 0;
443 /* We are not processing a ({...}) grouping. */
444 expr_stmts_for_value
= 0;
452 p
->block_stack
= block_stack
;
453 p
->stack_block_stack
= stack_block_stack
;
454 p
->cond_stack
= cond_stack
;
455 p
->loop_stack
= loop_stack
;
456 p
->case_stack
= case_stack
;
457 p
->nesting_stack
= nesting_stack
;
458 p
->nesting_depth
= nesting_depth
;
459 p
->block_start_count
= block_start_count
;
460 p
->last_expr_type
= last_expr_type
;
461 p
->last_expr_value
= last_expr_value
;
462 p
->expr_stmts_for_value
= expr_stmts_for_value
;
463 p
->emit_filename
= emit_filename
;
464 p
->emit_lineno
= emit_lineno
;
465 p
->goto_fixup_chain
= goto_fixup_chain
;
469 restore_stmt_status (p
)
472 block_stack
= p
->block_stack
;
473 stack_block_stack
= p
->stack_block_stack
;
474 cond_stack
= p
->cond_stack
;
475 loop_stack
= p
->loop_stack
;
476 case_stack
= p
->case_stack
;
477 nesting_stack
= p
->nesting_stack
;
478 nesting_depth
= p
->nesting_depth
;
479 block_start_count
= p
->block_start_count
;
480 last_expr_type
= p
->last_expr_type
;
481 last_expr_value
= p
->last_expr_value
;
482 expr_stmts_for_value
= p
->expr_stmts_for_value
;
483 emit_filename
= p
->emit_filename
;
484 emit_lineno
= p
->emit_lineno
;
485 goto_fixup_chain
= p
->goto_fixup_chain
;
488 /* Emit a no-op instruction. */
493 rtx last_insn
= get_last_insn ();
495 && (GET_CODE (last_insn
) == CODE_LABEL
496 || prev_real_insn (last_insn
) == 0))
497 emit_insn (gen_nop ());
500 /* Return the rtx-label that corresponds to a LABEL_DECL,
501 creating it if necessary. */
507 if (TREE_CODE (label
) != LABEL_DECL
)
510 if (DECL_RTL (label
))
511 return DECL_RTL (label
);
513 return DECL_RTL (label
) = gen_label_rtx ();
516 /* Add an unconditional jump to LABEL as the next sequential instruction. */
522 do_pending_stack_adjust ();
523 emit_jump_insn (gen_jump (label
));
527 /* Emit code to jump to the address
528 specified by the pointer expression EXP. */
531 expand_computed_goto (exp
)
534 rtx x
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
536 emit_indirect_jump (x
);
539 /* Handle goto statements and the labels that they can go to. */
541 /* Specify the location in the RTL code of a label LABEL,
542 which is a LABEL_DECL tree node.
544 This is used for the kind of label that the user can jump to with a
545 goto statement, and for alternatives of a switch or case statement.
546 RTL labels generated for loops and conditionals don't go through here;
547 they are generated directly at the RTL level, by other functions below.
549 Note that this has nothing to do with defining label *names*.
550 Languages vary in how they do that and what that even means. */
556 struct label_chain
*p
;
558 do_pending_stack_adjust ();
559 emit_label (label_rtx (label
));
560 if (DECL_NAME (label
))
561 LABEL_NAME (DECL_RTL (label
)) = IDENTIFIER_POINTER (DECL_NAME (label
));
563 if (stack_block_stack
!= 0)
565 p
= (struct label_chain
*) oballoc (sizeof (struct label_chain
));
566 p
->next
= stack_block_stack
->data
.block
.label_chain
;
567 stack_block_stack
->data
.block
.label_chain
= p
;
572 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
573 from nested functions. */
576 declare_nonlocal_label (label
)
579 nonlocal_labels
= tree_cons (NULL_TREE
, label
, nonlocal_labels
);
580 LABEL_PRESERVE_P (label_rtx (label
)) = 1;
581 if (nonlocal_goto_handler_slot
== 0)
583 nonlocal_goto_handler_slot
584 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
585 emit_stack_save (SAVE_NONLOCAL
,
586 &nonlocal_goto_stack_level
,
587 PREV_INSN (tail_recursion_reentry
));
591 /* Generate RTL code for a `goto' statement with target label LABEL.
592 LABEL should be a LABEL_DECL tree node that was or will later be
593 defined with `expand_label'. */
599 /* Check for a nonlocal goto to a containing function. */
600 tree context
= decl_function_context (label
);
601 if (context
!= 0 && context
!= current_function_decl
)
603 struct function
*p
= find_function_data (context
);
605 p
->has_nonlocal_label
= 1;
607 /* Copy the rtl for the slots so that they won't be shared in
608 case the virtual stack vars register gets instantiated differently
609 in the parent than in the child. */
611 #if HAVE_nonlocal_goto
612 if (HAVE_nonlocal_goto
)
613 emit_insn (gen_nonlocal_goto (lookup_static_chain (label
),
614 copy_rtx (p
->nonlocal_goto_handler_slot
),
615 copy_rtx (p
->nonlocal_goto_stack_level
),
616 gen_rtx (LABEL_REF
, Pmode
,
617 label_rtx (label
))));
623 /* Restore frame pointer for containing function.
624 This sets the actual hard register used for the frame pointer
625 to the location of the function's incoming static chain info.
626 The non-local goto handler will then adjust it to contain the
627 proper value and reload the argument pointer, if needed. */
628 emit_move_insn (frame_pointer_rtx
, lookup_static_chain (label
));
630 /* We have now loaded the frame pointer hardware register with
631 the address of that corresponds to the start of the virtual
632 stack vars. So replace virtual_stack_vars_rtx in all
633 addresses we use with stack_pointer_rtx. */
635 /* Get addr of containing function's current nonlocal goto handler,
636 which will do any cleanups and then jump to the label. */
637 addr
= copy_rtx (p
->nonlocal_goto_handler_slot
);
638 temp
= copy_to_reg (replace_rtx (addr
, virtual_stack_vars_rtx
,
641 /* Restore the stack pointer. Note this uses fp just restored. */
642 addr
= p
->nonlocal_goto_stack_level
;
644 addr
= replace_rtx (copy_rtx (addr
),
645 virtual_stack_vars_rtx
, frame_pointer_rtx
);
647 emit_stack_restore (SAVE_NONLOCAL
, addr
, NULL_RTX
);
649 /* Put in the static chain register the nonlocal label address. */
650 emit_move_insn (static_chain_rtx
,
651 gen_rtx (LABEL_REF
, Pmode
, label_rtx (label
)));
652 /* USE of frame_pointer_rtx added for consistency; not clear if
654 emit_insn (gen_rtx (USE
, VOIDmode
, frame_pointer_rtx
));
655 emit_insn (gen_rtx (USE
, VOIDmode
, stack_pointer_rtx
));
656 emit_insn (gen_rtx (USE
, VOIDmode
, static_chain_rtx
));
657 emit_indirect_jump (temp
);
661 expand_goto_internal (label
, label_rtx (label
), NULL_RTX
);
664 /* Generate RTL code for a `goto' statement with target label BODY.
665 LABEL should be a LABEL_REF.
666 LAST_INSN, if non-0, is the rtx we should consider as the last
667 insn emitted (for the purposes of cleaning up a return). */
670 expand_goto_internal (body
, label
, last_insn
)
675 struct nesting
*block
;
678 if (GET_CODE (label
) != CODE_LABEL
)
681 /* If label has already been defined, we can tell now
682 whether and how we must alter the stack level. */
684 if (PREV_INSN (label
) != 0)
686 /* Find the innermost pending block that contains the label.
687 (Check containment by comparing insn-uids.)
688 Then restore the outermost stack level within that block,
689 and do cleanups of all blocks contained in it. */
690 for (block
= block_stack
; block
; block
= block
->next
)
692 if (INSN_UID (block
->data
.block
.first_insn
) < INSN_UID (label
))
694 if (block
->data
.block
.stack_level
!= 0)
695 stack_level
= block
->data
.block
.stack_level
;
696 /* Execute the cleanups for blocks we are exiting. */
697 if (block
->data
.block
.cleanups
!= 0)
699 expand_cleanups (block
->data
.block
.cleanups
, NULL_TREE
);
700 do_pending_stack_adjust ();
706 /* Ensure stack adjust isn't done by emit_jump, as this would clobber
707 the stack pointer. This one should be deleted as dead by flow. */
708 clear_pending_stack_adjust ();
709 do_pending_stack_adjust ();
710 emit_stack_restore (SAVE_BLOCK
, stack_level
, NULL_RTX
);
713 if (body
!= 0 && DECL_TOO_LATE (body
))
714 error ("jump to `%s' invalidly jumps into binding contour",
715 IDENTIFIER_POINTER (DECL_NAME (body
)));
717 /* Label not yet defined: may need to put this goto
718 on the fixup list. */
719 else if (! expand_fixup (body
, label
, last_insn
))
721 /* No fixup needed. Record that the label is the target
722 of at least one goto that has no fixup. */
724 TREE_ADDRESSABLE (body
) = 1;
730 /* Generate if necessary a fixup for a goto
731 whose target label in tree structure (if any) is TREE_LABEL
732 and whose target in rtl is RTL_LABEL.
734 If LAST_INSN is nonzero, we pretend that the jump appears
735 after insn LAST_INSN instead of at the current point in the insn stream.
737 The fixup will be used later to insert insns at this point
738 to restore the stack level as appropriate for the target label.
740 Value is nonzero if a fixup is made. */
743 expand_fixup (tree_label
, rtl_label
, last_insn
)
748 struct nesting
*block
, *end_block
;
750 /* See if we can recognize which block the label will be output in.
751 This is possible in some very common cases.
752 If we succeed, set END_BLOCK to that block.
753 Otherwise, set it to 0. */
756 && (rtl_label
== cond_stack
->data
.cond
.endif_label
757 || rtl_label
== cond_stack
->data
.cond
.next_label
))
758 end_block
= cond_stack
;
759 /* If we are in a loop, recognize certain labels which
760 are likely targets. This reduces the number of fixups
761 we need to create. */
763 && (rtl_label
== loop_stack
->data
.loop
.start_label
764 || rtl_label
== loop_stack
->data
.loop
.end_label
765 || rtl_label
== loop_stack
->data
.loop
.continue_label
))
766 end_block
= loop_stack
;
770 /* Now set END_BLOCK to the binding level to which we will return. */
774 struct nesting
*next_block
= end_block
->all
;
777 /* First see if the END_BLOCK is inside the innermost binding level.
778 If so, then no cleanups or stack levels are relevant. */
779 while (next_block
&& next_block
!= block
)
780 next_block
= next_block
->all
;
785 /* Otherwise, set END_BLOCK to the innermost binding level
786 which is outside the relevant control-structure nesting. */
787 next_block
= block_stack
->next
;
788 for (block
= block_stack
; block
!= end_block
; block
= block
->all
)
789 if (block
== next_block
)
790 next_block
= next_block
->next
;
791 end_block
= next_block
;
794 /* Does any containing block have a stack level or cleanups?
795 If not, no fixup is needed, and that is the normal case
796 (the only case, for standard C). */
797 for (block
= block_stack
; block
!= end_block
; block
= block
->next
)
798 if (block
->data
.block
.stack_level
!= 0
799 || block
->data
.block
.cleanups
!= 0)
802 if (block
!= end_block
)
804 /* Ok, a fixup is needed. Add a fixup to the list of such. */
805 struct goto_fixup
*fixup
806 = (struct goto_fixup
*) oballoc (sizeof (struct goto_fixup
));
807 /* In case an old stack level is restored, make sure that comes
808 after any pending stack adjust. */
809 /* ?? If the fixup isn't to come at the present position,
810 doing the stack adjust here isn't useful. Doing it with our
811 settings at that location isn't useful either. Let's hope
814 do_pending_stack_adjust ();
815 fixup
->before_jump
= last_insn
? last_insn
: get_last_insn ();
816 fixup
->target
= tree_label
;
817 fixup
->target_rtl
= rtl_label
;
818 fixup
->block_start_count
= block_start_count
;
819 fixup
->stack_level
= 0;
820 fixup
->cleanup_list_list
821 = (((block
->data
.block
.outer_cleanups
823 && block
->data
.block
.outer_cleanups
!= empty_cleanup_list
826 || block
->data
.block
.cleanups
)
827 ? tree_cons (NULL_TREE
, block
->data
.block
.cleanups
,
828 block
->data
.block
.outer_cleanups
)
830 fixup
->next
= goto_fixup_chain
;
831 goto_fixup_chain
= fixup
;
837 /* When exiting a binding contour, process all pending gotos requiring fixups.
838 THISBLOCK is the structure that describes the block being exited.
839 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
840 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
841 FIRST_INSN is the insn that began this contour.
843 Gotos that jump out of this contour must restore the
844 stack level and do the cleanups before actually jumping.
846 DONT_JUMP_IN nonzero means report error there is a jump into this
847 contour from before the beginning of the contour.
848 This is also done if STACK_LEVEL is nonzero. */
851 fixup_gotos (thisblock
, stack_level
, cleanup_list
, first_insn
, dont_jump_in
)
852 struct nesting
*thisblock
;
858 register struct goto_fixup
*f
, *prev
;
860 /* F is the fixup we are considering; PREV is the previous one. */
861 /* We run this loop in two passes so that cleanups of exited blocks
862 are run first, and blocks that are exited are marked so
865 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
867 /* Test for a fixup that is inactive because it is already handled. */
868 if (f
->before_jump
== 0)
870 /* Delete inactive fixup from the chain, if that is easy to do. */
872 prev
->next
= f
->next
;
874 /* Has this fixup's target label been defined?
875 If so, we can finalize it. */
876 else if (PREV_INSN (f
->target_rtl
) != 0)
878 /* Get the first non-label after the label
879 this goto jumps to. If that's before this scope begins,
880 we don't have a jump into the scope. */
881 rtx after_label
= f
->target_rtl
;
882 while (after_label
!= 0 && GET_CODE (after_label
) == CODE_LABEL
)
883 after_label
= NEXT_INSN (after_label
);
885 /* If this fixup jumped into this contour from before the beginning
886 of this contour, report an error. */
887 /* ??? Bug: this does not detect jumping in through intermediate
888 blocks that have stack levels or cleanups.
889 It detects only a problem with the innermost block
892 && (dont_jump_in
|| stack_level
|| cleanup_list
)
893 /* If AFTER_LABEL is 0, it means the jump goes to the end
894 of the rtl, which means it jumps into this scope. */
896 || INSN_UID (first_insn
) < INSN_UID (after_label
))
897 && INSN_UID (first_insn
) > INSN_UID (f
->before_jump
)
898 && ! DECL_REGISTER (f
->target
))
900 error_with_decl (f
->target
,
901 "label `%s' used before containing binding contour");
902 /* Prevent multiple errors for one label. */
903 DECL_REGISTER (f
->target
) = 1;
906 /* Execute cleanups for blocks this jump exits. */
907 if (f
->cleanup_list_list
)
910 for (lists
= f
->cleanup_list_list
; lists
; lists
= TREE_CHAIN (lists
))
911 /* Marked elements correspond to blocks that have been closed.
912 Do their cleanups. */
913 if (TREE_ADDRESSABLE (lists
)
914 && TREE_VALUE (lists
) != 0)
915 fixup_cleanups (TREE_VALUE (lists
), &f
->before_jump
);
918 /* Restore stack level for the biggest contour that this
919 jump jumps out of. */
921 emit_stack_restore (SAVE_BLOCK
, f
->stack_level
, f
->before_jump
);
926 /* Mark the cleanups of exited blocks so that they are executed
927 by the code above. */
928 for (prev
= 0, f
= goto_fixup_chain
; f
; prev
= f
, f
= f
->next
)
929 if (f
->before_jump
!= 0
930 && PREV_INSN (f
->target_rtl
) == 0
931 /* Label has still not appeared. If we are exiting a block with
932 a stack level to restore, that started before the fixup,
933 mark this stack level as needing restoration
934 when the fixup is later finalized.
935 Also mark the cleanup_list_list element for F
936 that corresponds to this block, so that ultimately
937 this block's cleanups will be executed by the code above. */
939 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared,
940 it means the label is undefined. That's erroneous, but possible. */
941 && (thisblock
->data
.block
.block_start_count
942 <= f
->block_start_count
))
944 tree lists
= f
->cleanup_list_list
;
945 for (; lists
; lists
= TREE_CHAIN (lists
))
946 /* If the following elt. corresponds to our containing block
947 then the elt. must be for this block. */
948 if (TREE_CHAIN (lists
) == thisblock
->data
.block
.outer_cleanups
)
949 TREE_ADDRESSABLE (lists
) = 1;
952 f
->stack_level
= stack_level
;
956 /* Generate RTL for an asm statement (explicit assembler code).
957 BODY is a STRING_CST node containing the assembler code text,
958 or an ADDR_EXPR containing a STRING_CST. */
964 if (TREE_CODE (body
) == ADDR_EXPR
)
965 body
= TREE_OPERAND (body
, 0);
967 emit_insn (gen_rtx (ASM_INPUT
, VOIDmode
,
968 TREE_STRING_POINTER (body
)));
972 /* Generate RTL for an asm statement with arguments.
973 STRING is the instruction template.
974 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
975 Each output or input has an expression in the TREE_VALUE and
976 a constraint-string in the TREE_PURPOSE.
977 CLOBBERS is a list of STRING_CST nodes each naming a hard register
978 that is clobbered by this insn.
980 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
981 Some elements of OUTPUTS may be replaced with trees representing temporary
982 values. The caller should copy those temporary values to the originally
985 VOL nonzero means the insn is volatile; don't optimize it. */
988 expand_asm_operands (string
, outputs
, inputs
, clobbers
, vol
, filename
, line
)
989 tree string
, outputs
, inputs
, clobbers
;
994 rtvec argvec
, constraints
;
996 int ninputs
= list_length (inputs
);
997 int noutputs
= list_length (outputs
);
1001 /* Vector of RTX's of evaluated output operands. */
1002 rtx
*output_rtx
= (rtx
*) alloca (noutputs
* sizeof (rtx
));
1003 /* The insn we have emitted. */
1006 /* Count the number of meaningful clobbered registers, ignoring what
1007 we would ignore later. */
1009 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1011 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1012 i
= decode_reg_name (regname
);
1013 if (i
>= 0 || i
== -4)
1019 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1021 tree val
= TREE_VALUE (tail
);
1026 /* If there's an erroneous arg, emit no insn. */
1027 if (TREE_TYPE (val
) == error_mark_node
)
1030 /* Make sure constraint has `=' and does not have `+'. */
1033 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1035 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1037 error ("output operand constraint contains `+'");
1040 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '=')
1045 error ("output operand constraint lacks `='");
1049 /* If an output operand is not a variable or indirect ref,
1051 create a SAVE_EXPR which is a pseudo-reg
1052 to act as an intermediate temporary.
1053 Make the asm insn write into that, then copy it to
1054 the real output operand. */
1056 while (TREE_CODE (val
) == COMPONENT_REF
1057 || TREE_CODE (val
) == ARRAY_REF
)
1058 val
= TREE_OPERAND (val
, 0);
1060 if (TREE_CODE (val
) != VAR_DECL
1061 && TREE_CODE (val
) != PARM_DECL
1062 && TREE_CODE (val
) != INDIRECT_REF
)
1063 TREE_VALUE (tail
) = save_expr (TREE_VALUE (tail
));
1065 output_rtx
[i
] = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1068 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
1070 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS
);
1074 /* Make vectors for the expression-rtx and constraint strings. */
1076 argvec
= rtvec_alloc (ninputs
);
1077 constraints
= rtvec_alloc (ninputs
);
1079 body
= gen_rtx (ASM_OPERANDS
, VOIDmode
,
1080 TREE_STRING_POINTER (string
), "", 0, argvec
, constraints
,
1082 MEM_VOLATILE_P (body
) = vol
;
1084 /* Eval the inputs and put them into ARGVEC.
1085 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1088 for (tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
))
1092 /* If there's an erroneous arg, emit no insn,
1093 because the ASM_INPUT would get VOIDmode
1094 and that could cause a crash in reload. */
1095 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
1097 if (TREE_PURPOSE (tail
) == NULL_TREE
)
1099 error ("hard register `%s' listed as input operand to `asm'",
1100 TREE_STRING_POINTER (TREE_VALUE (tail
)) );
1104 /* Make sure constraint has neither `=' nor `+'. */
1106 for (j
= 0; j
< TREE_STRING_LENGTH (TREE_PURPOSE (tail
)); j
++)
1107 if (TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '='
1108 || TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
] == '+')
1110 error ("input operand constraint contains `%c'",
1111 TREE_STRING_POINTER (TREE_PURPOSE (tail
))[j
]);
1115 XVECEXP (body
, 3, i
) /* argvec */
1116 = expand_expr (TREE_VALUE (tail
), NULL_RTX
, VOIDmode
, 0);
1117 XVECEXP (body
, 4, i
) /* constraints */
1118 = gen_rtx (ASM_INPUT
, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail
))),
1119 TREE_STRING_POINTER (TREE_PURPOSE (tail
)));
1123 /* Protect all the operands from the queue,
1124 now that they have all been evaluated. */
1126 for (i
= 0; i
< ninputs
; i
++)
1127 XVECEXP (body
, 3, i
) = protect_from_queue (XVECEXP (body
, 3, i
), 0);
1129 for (i
= 0; i
< noutputs
; i
++)
1130 output_rtx
[i
] = protect_from_queue (output_rtx
[i
], 1);
1132 /* Now, for each output, construct an rtx
1133 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1134 ARGVEC CONSTRAINTS))
1135 If there is more than one, put them inside a PARALLEL. */
1137 if (noutputs
== 1 && nclobbers
== 0)
1139 XSTR (body
, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs
));
1140 insn
= emit_insn (gen_rtx (SET
, VOIDmode
, output_rtx
[0], body
));
1142 else if (noutputs
== 0 && nclobbers
== 0)
1144 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1145 insn
= emit_insn (body
);
1151 if (num
== 0) num
= 1;
1152 body
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (num
+ nclobbers
));
1154 /* For each output operand, store a SET. */
1156 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
1158 XVECEXP (body
, 0, i
)
1159 = gen_rtx (SET
, VOIDmode
,
1161 gen_rtx (ASM_OPERANDS
, VOIDmode
,
1162 TREE_STRING_POINTER (string
),
1163 TREE_STRING_POINTER (TREE_PURPOSE (tail
)),
1164 i
, argvec
, constraints
,
1166 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
1169 /* If there are no outputs (but there are some clobbers)
1170 store the bare ASM_OPERANDS into the PARALLEL. */
1173 XVECEXP (body
, 0, i
++) = obody
;
1175 /* Store (clobber REG) for each clobbered register specified. */
1177 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
1179 char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
1180 int j
= decode_reg_name (regname
);
1184 if (j
== -3) /* `cc', which is not a register */
1187 if (j
== -4) /* `memory', don't cache memory across asm */
1189 XVECEXP (body
, 0, i
++) = gen_rtx (CLOBBER
, VOIDmode
, const0_rtx
);
1193 error ("unknown register name `%s' in `asm'", regname
);
1197 /* Use QImode since that's guaranteed to clobber just one reg. */
1198 XVECEXP (body
, 0, i
++)
1199 = gen_rtx (CLOBBER
, VOIDmode
, gen_rtx (REG
, QImode
, j
));
1202 insn
= emit_insn (body
);
1208 /* Generate RTL to evaluate the expression EXP
1209 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1212 expand_expr_stmt (exp
)
1215 /* If -W, warn about statements with no side effects,
1216 except for an explicit cast to void (e.g. for assert()), and
1217 except inside a ({...}) where they may be useful. */
1218 if (expr_stmts_for_value
== 0 && exp
!= error_mark_node
)
1220 if (! TREE_SIDE_EFFECTS (exp
) && (extra_warnings
|| warn_unused
)
1221 && !(TREE_CODE (exp
) == CONVERT_EXPR
1222 && TREE_TYPE (exp
) == void_type_node
))
1223 warning_with_file_and_line (emit_filename
, emit_lineno
,
1224 "statement with no effect");
1225 else if (warn_unused
)
1226 warn_if_unused_value (exp
);
1228 last_expr_type
= TREE_TYPE (exp
);
1229 if (! flag_syntax_only
)
1230 last_expr_value
= expand_expr (exp
,
1231 (expr_stmts_for_value
1232 ? NULL_RTX
: const0_rtx
),
1235 /* If all we do is reference a volatile value in memory,
1236 copy it to a register to be sure it is actually touched. */
1237 if (last_expr_value
!= 0 && GET_CODE (last_expr_value
) == MEM
1238 && TREE_THIS_VOLATILE (exp
))
1240 if (TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
)
1241 copy_to_reg (last_expr_value
);
1244 rtx lab
= gen_label_rtx ();
1246 /* Compare the value with itself to reference it. */
1247 emit_cmp_insn (last_expr_value
, last_expr_value
, EQ
,
1248 expand_expr (TYPE_SIZE (last_expr_type
),
1249 NULL_RTX
, VOIDmode
, 0),
1251 TYPE_ALIGN (last_expr_type
) / BITS_PER_UNIT
);
1252 emit_jump_insn ((*bcc_gen_fctn
[(int) EQ
]) (lab
));
1257 /* If this expression is part of a ({...}) and is in memory, we may have
1258 to preserve temporaries. */
1259 preserve_temp_slots (last_expr_value
);
1261 /* Free any temporaries used to evaluate this expression. Any temporary
1262 used as a result of this expression will already have been preserved
1269 /* Warn if EXP contains any computations whose results are not used.
1270 Return 1 if a warning is printed; 0 otherwise. */
1273 warn_if_unused_value (exp
)
1276 if (TREE_USED (exp
))
1279 switch (TREE_CODE (exp
))
1281 case PREINCREMENT_EXPR
:
1282 case POSTINCREMENT_EXPR
:
1283 case PREDECREMENT_EXPR
:
1284 case POSTDECREMENT_EXPR
:
1289 case METHOD_CALL_EXPR
:
1292 case ANTI_WRAPPER_EXPR
:
1293 case WITH_CLEANUP_EXPR
:
1295 /* We don't warn about COND_EXPR because it may be a useful
1296 construct if either arm contains a side effect. */
1301 /* For a binding, warn if no side effect within it. */
1302 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1304 case TRUTH_ORIF_EXPR
:
1305 case TRUTH_ANDIF_EXPR
:
1306 /* In && or ||, warn if 2nd operand has no side effect. */
1307 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1310 if (warn_if_unused_value (TREE_OPERAND (exp
, 0)))
1312 /* Let people do `(foo (), 0)' without a warning. */
1313 if (TREE_CONSTANT (TREE_OPERAND (exp
, 1)))
1315 return warn_if_unused_value (TREE_OPERAND (exp
, 1));
1319 case NON_LVALUE_EXPR
:
1320 /* Don't warn about values cast to void. */
1321 if (TREE_TYPE (exp
) == void_type_node
)
1323 /* Don't warn about conversions not explicit in the user's program. */
1324 if (TREE_NO_UNUSED_WARNING (exp
))
1326 /* Assignment to a cast usually results in a cast of a modify.
1327 Don't complain about that. */
1328 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MODIFY_EXPR
)
1330 /* Sometimes it results in a cast of a cast of a modify.
1331 Don't complain about that. */
1332 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == CONVERT_EXPR
1333 || TREE_CODE (TREE_OPERAND (exp
, 0)) == NOP_EXPR
)
1334 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) == MODIFY_EXPR
)
1338 /* Referencing a volatile value is a side effect, so don't warn. */
1339 if ((TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
1340 || TREE_CODE_CLASS (TREE_CODE (exp
)) == 'r')
1341 && TREE_THIS_VOLATILE (exp
))
1343 warning_with_file_and_line (emit_filename
, emit_lineno
,
1344 "value computed is not used");
1349 /* Clear out the memory of the last expression evaluated. */
1357 /* Begin a statement which will return a value.
1358 Return the RTL_EXPR for this statement expr.
1359 The caller must save that value and pass it to expand_end_stmt_expr. */
1362 expand_start_stmt_expr ()
1364 /* Make the RTL_EXPR node temporary, not momentary,
1365 so that rtl_expr_chain doesn't become garbage. */
1366 int momentary
= suspend_momentary ();
1367 tree t
= make_node (RTL_EXPR
);
1368 resume_momentary (momentary
);
1371 expr_stmts_for_value
++;
1375 /* Restore the previous state at the end of a statement that returns a value.
1376 Returns a tree node representing the statement's value and the
1377 insns to compute the value.
1379 The nodes of that expression have been freed by now, so we cannot use them.
1380 But we don't want to do that anyway; the expression has already been
1381 evaluated and now we just want to use the value. So generate a RTL_EXPR
1382 with the proper type and RTL value.
1384 If the last substatement was not an expression,
1385 return something with type `void'. */
1388 expand_end_stmt_expr (t
)
1393 if (last_expr_type
== 0)
1395 last_expr_type
= void_type_node
;
1396 last_expr_value
= const0_rtx
;
1398 else if (last_expr_value
== 0)
1399 /* There are some cases where this can happen, such as when the
1400 statement is void type. */
1401 last_expr_value
= const0_rtx
;
1402 else if (GET_CODE (last_expr_value
) != REG
&& ! CONSTANT_P (last_expr_value
))
1403 /* Remove any possible QUEUED. */
1404 last_expr_value
= protect_from_queue (last_expr_value
, 0);
1408 TREE_TYPE (t
) = last_expr_type
;
1409 RTL_EXPR_RTL (t
) = last_expr_value
;
1410 RTL_EXPR_SEQUENCE (t
) = get_insns ();
1412 rtl_expr_chain
= tree_cons (NULL_TREE
, t
, rtl_expr_chain
);
1416 /* Don't consider deleting this expr or containing exprs at tree level. */
1417 TREE_SIDE_EFFECTS (t
) = 1;
1418 /* Propagate volatility of the actual RTL expr. */
1419 TREE_THIS_VOLATILE (t
) = volatile_refs_p (last_expr_value
);
1422 expr_stmts_for_value
--;
1427 /* The exception handling nesting looks like this:
1430 { <-- exception handler block
1432 <-- in an exception handler
1434 : <-- in a TRY block
1435 : <-- in an exception handler
1440 : <-- in an except block
1441 : <-- in an exception handler
1448 /* Return nonzero iff in a try block at level LEVEL. */
1451 in_try_block (level
)
1454 struct nesting
*n
= except_stack
;
1457 while (n
&& n
->data
.except_stmt
.after_label
!= 0)
1468 /* Return nonzero iff in an except block at level LEVEL. */
1471 in_except_block (level
)
1474 struct nesting
*n
= except_stack
;
1477 while (n
&& n
->data
.except_stmt
.after_label
== 0)
1488 /* Return nonzero iff in an exception handler at level LEVEL. */
1491 in_exception_handler (level
)
1494 struct nesting
*n
= except_stack
;
1495 while (n
&& level
--)
1500 /* Record the fact that the current exception nesting raises
1501 exception EX. If not in an exception handler, return 0. */
1508 if (except_stack
== 0)
1510 raises_ptr
= &except_stack
->data
.except_stmt
.raised
;
1511 if (! value_member (ex
, *raises_ptr
))
1512 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1516 /* Generate RTL for the start of a try block.
1518 TRY_CLAUSE is the condition to test to enter the try block. */
1521 expand_start_try (try_clause
, exitflag
, escapeflag
)
1526 struct nesting
*thishandler
= ALLOC_NESTING ();
1528 /* Make an entry on cond_stack for the cond we are entering. */
1530 thishandler
->next
= except_stack
;
1531 thishandler
->all
= nesting_stack
;
1532 thishandler
->depth
= ++nesting_depth
;
1533 thishandler
->data
.except_stmt
.raised
= 0;
1534 thishandler
->data
.except_stmt
.handled
= 0;
1535 thishandler
->data
.except_stmt
.first_insn
= get_insns ();
1536 thishandler
->data
.except_stmt
.except_label
= gen_label_rtx ();
1537 thishandler
->data
.except_stmt
.unhandled_label
= 0;
1538 thishandler
->data
.except_stmt
.after_label
= 0;
1539 thishandler
->data
.except_stmt
.escape_label
1540 = escapeflag
? thishandler
->data
.except_stmt
.except_label
: 0;
1541 thishandler
->exit_label
= exitflag
? gen_label_rtx () : 0;
1542 except_stack
= thishandler
;
1543 nesting_stack
= thishandler
;
1545 do_jump (try_clause
, thishandler
->data
.except_stmt
.except_label
, NULL_RTX
);
1548 /* End of a TRY block. Nothing to do for now. */
1553 except_stack
->data
.except_stmt
.after_label
= gen_label_rtx ();
1554 expand_goto_internal (NULL_TREE
, except_stack
->data
.except_stmt
.after_label
,
1558 /* Start an `except' nesting contour.
1559 EXITFLAG says whether this contour should be able to `exit' something.
1560 ESCAPEFLAG says whether this contour should be escapable. */
1563 expand_start_except (exitflag
, escapeflag
)
1570 /* An `exit' from catch clauses goes out to next exit level,
1571 if there is one. Otherwise, it just goes to the end
1572 of the construct. */
1573 for (n
= except_stack
->next
; n
; n
= n
->next
)
1574 if (n
->exit_label
!= 0)
1576 except_stack
->exit_label
= n
->exit_label
;
1580 except_stack
->exit_label
= except_stack
->data
.except_stmt
.after_label
;
1585 /* An `escape' from catch clauses goes out to next escape level,
1586 if there is one. Otherwise, it just goes to the end
1587 of the construct. */
1588 for (n
= except_stack
->next
; n
; n
= n
->next
)
1589 if (n
->data
.except_stmt
.escape_label
!= 0)
1591 except_stack
->data
.except_stmt
.escape_label
1592 = n
->data
.except_stmt
.escape_label
;
1596 except_stack
->data
.except_stmt
.escape_label
1597 = except_stack
->data
.except_stmt
.after_label
;
1599 do_pending_stack_adjust ();
1600 emit_label (except_stack
->data
.except_stmt
.except_label
);
1603 /* Generate code to `escape' from an exception contour. This
1604 is like `exiting', but does not conflict with constructs which
1607 Return nonzero if this contour is escapable, otherwise
1608 return zero, and language-specific code will emit the
1609 appropriate error message. */
1611 expand_escape_except ()
1615 for (n
= except_stack
; n
; n
= n
->next
)
1616 if (n
->data
.except_stmt
.escape_label
!= 0)
1618 expand_goto_internal (NULL_TREE
,
1619 n
->data
.except_stmt
.escape_label
, NULL_RTX
);
1626 /* Finish processing and `except' contour.
1627 Culls out all exceptions which might be raise but not
1628 handled, and returns the list to the caller.
1629 Language-specific code is responsible for dealing with these
1633 expand_end_except ()
1636 tree raised
= NULL_TREE
;
1638 do_pending_stack_adjust ();
1639 emit_label (except_stack
->data
.except_stmt
.after_label
);
1641 n
= except_stack
->next
;
1644 /* Propagate exceptions raised but not handled to next
1646 tree handled
= except_stack
->data
.except_stmt
.raised
;
1647 if (handled
!= void_type_node
)
1649 tree prev
= NULL_TREE
;
1650 raised
= except_stack
->data
.except_stmt
.raised
;
1654 for (this_raise
= raised
, prev
= 0; this_raise
;
1655 this_raise
= TREE_CHAIN (this_raise
))
1657 if (value_member (TREE_VALUE (this_raise
), handled
))
1660 TREE_CHAIN (prev
) = TREE_CHAIN (this_raise
);
1663 raised
= TREE_CHAIN (raised
);
1664 if (raised
== NULL_TREE
)
1671 handled
= TREE_CHAIN (handled
);
1673 if (prev
== NULL_TREE
)
1676 TREE_CHAIN (prev
) = n
->data
.except_stmt
.raised
;
1678 n
->data
.except_stmt
.raised
= raised
;
1682 POPSTACK (except_stack
);
1687 /* Record that exception EX is caught by this exception handler.
1688 Return nonzero if in exception handling construct, otherwise return 0. */
1695 if (except_stack
== 0)
1697 raises_ptr
= &except_stack
->data
.except_stmt
.handled
;
1698 if (*raises_ptr
!= void_type_node
1700 && ! value_member (ex
, *raises_ptr
))
1701 *raises_ptr
= tree_cons (NULL_TREE
, ex
, *raises_ptr
);
1705 /* Record that this exception handler catches all exceptions.
1706 Return nonzero if in exception handling construct, otherwise return 0. */
1709 expand_catch_default ()
1711 if (except_stack
== 0)
1713 except_stack
->data
.except_stmt
.handled
= void_type_node
;
1720 if (except_stack
== 0 || except_stack
->data
.except_stmt
.after_label
== 0)
1722 expand_goto_internal (NULL_TREE
, except_stack
->data
.except_stmt
.after_label
,
1727 /* Generate RTL for the start of an if-then. COND is the expression
1728 whose truth should be tested.
1730 If EXITFLAG is nonzero, this conditional is visible to
1731 `exit_something'. */
1734 expand_start_cond (cond
, exitflag
)
1738 struct nesting
*thiscond
= ALLOC_NESTING ();
1740 /* Make an entry on cond_stack for the cond we are entering. */
1742 thiscond
->next
= cond_stack
;
1743 thiscond
->all
= nesting_stack
;
1744 thiscond
->depth
= ++nesting_depth
;
1745 thiscond
->data
.cond
.next_label
= gen_label_rtx ();
1746 /* Before we encounter an `else', we don't need a separate exit label
1747 unless there are supposed to be exit statements
1748 to exit this conditional. */
1749 thiscond
->exit_label
= exitflag
? gen_label_rtx () : 0;
1750 thiscond
->data
.cond
.endif_label
= thiscond
->exit_label
;
1751 cond_stack
= thiscond
;
1752 nesting_stack
= thiscond
;
1754 do_jump (cond
, thiscond
->data
.cond
.next_label
, NULL_RTX
);
1757 /* Generate RTL between then-clause and the elseif-clause
1758 of an if-then-elseif-.... */
1761 expand_start_elseif (cond
)
1764 if (cond_stack
->data
.cond
.endif_label
== 0)
1765 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1766 emit_jump (cond_stack
->data
.cond
.endif_label
);
1767 emit_label (cond_stack
->data
.cond
.next_label
);
1768 cond_stack
->data
.cond
.next_label
= gen_label_rtx ();
1769 do_jump (cond
, cond_stack
->data
.cond
.next_label
, NULL_RTX
);
1772 /* Generate RTL between the then-clause and the else-clause
1773 of an if-then-else. */
1776 expand_start_else ()
1778 if (cond_stack
->data
.cond
.endif_label
== 0)
1779 cond_stack
->data
.cond
.endif_label
= gen_label_rtx ();
1780 emit_jump (cond_stack
->data
.cond
.endif_label
);
1781 emit_label (cond_stack
->data
.cond
.next_label
);
1782 cond_stack
->data
.cond
.next_label
= 0; /* No more _else or _elseif calls. */
1785 /* Generate RTL for the end of an if-then.
1786 Pop the record for it off of cond_stack. */
1791 struct nesting
*thiscond
= cond_stack
;
1793 do_pending_stack_adjust ();
1794 if (thiscond
->data
.cond
.next_label
)
1795 emit_label (thiscond
->data
.cond
.next_label
);
1796 if (thiscond
->data
.cond
.endif_label
)
1797 emit_label (thiscond
->data
.cond
.endif_label
);
1799 POPSTACK (cond_stack
);
1803 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
1804 loop should be exited by `exit_something'. This is a loop for which
1805 `expand_continue' will jump to the top of the loop.
1807 Make an entry on loop_stack to record the labels associated with
1811 expand_start_loop (exit_flag
)
1814 register struct nesting
*thisloop
= ALLOC_NESTING ();
1816 /* Make an entry on loop_stack for the loop we are entering. */
1818 thisloop
->next
= loop_stack
;
1819 thisloop
->all
= nesting_stack
;
1820 thisloop
->depth
= ++nesting_depth
;
1821 thisloop
->data
.loop
.start_label
= gen_label_rtx ();
1822 thisloop
->data
.loop
.end_label
= gen_label_rtx ();
1823 thisloop
->data
.loop
.continue_label
= thisloop
->data
.loop
.start_label
;
1824 thisloop
->exit_label
= exit_flag
? thisloop
->data
.loop
.end_label
: 0;
1825 loop_stack
= thisloop
;
1826 nesting_stack
= thisloop
;
1828 do_pending_stack_adjust ();
1830 emit_note (NULL_PTR
, NOTE_INSN_LOOP_BEG
);
1831 emit_label (thisloop
->data
.loop
.start_label
);
1836 /* Like expand_start_loop but for a loop where the continuation point
1837 (for expand_continue_loop) will be specified explicitly. */
1840 expand_start_loop_continue_elsewhere (exit_flag
)
1843 struct nesting
*thisloop
= expand_start_loop (exit_flag
);
1844 loop_stack
->data
.loop
.continue_label
= gen_label_rtx ();
1848 /* Specify the continuation point for a loop started with
1849 expand_start_loop_continue_elsewhere.
1850 Use this at the point in the code to which a continue statement
1854 expand_loop_continue_here ()
1856 do_pending_stack_adjust ();
1857 emit_note (NULL_PTR
, NOTE_INSN_LOOP_CONT
);
1858 emit_label (loop_stack
->data
.loop
.continue_label
);
1861 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
1862 Pop the block off of loop_stack. */
1867 register rtx insn
= get_last_insn ();
1868 register rtx start_label
= loop_stack
->data
.loop
.start_label
;
1869 rtx last_test_insn
= 0;
1872 /* Mark the continue-point at the top of the loop if none elsewhere. */
1873 if (start_label
== loop_stack
->data
.loop
.continue_label
)
1874 emit_note_before (NOTE_INSN_LOOP_CONT
, start_label
);
1876 do_pending_stack_adjust ();
1878 /* If optimizing, perhaps reorder the loop. If the loop
1879 starts with a conditional exit, roll that to the end
1880 where it will optimize together with the jump back.
1882 We look for the last conditional branch to the exit that we encounter
1883 before hitting 30 insns or a CALL_INSN. If we see an unconditional
1884 branch to the exit first, use it.
1886 We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes
1887 because moving them is not valid. */
1891 ! (GET_CODE (insn
) == JUMP_INSN
1892 && GET_CODE (PATTERN (insn
)) == SET
1893 && SET_DEST (PATTERN (insn
)) == pc_rtx
1894 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
))
1896 /* Scan insns from the top of the loop looking for a qualified
1897 conditional exit. */
1898 for (insn
= NEXT_INSN (loop_stack
->data
.loop
.start_label
); insn
;
1899 insn
= NEXT_INSN (insn
))
1901 if (GET_CODE (insn
) == CALL_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
1904 if (GET_CODE (insn
) == NOTE
1905 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
1906 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
1909 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == INSN
)
1912 if (last_test_insn
&& num_insns
> 30)
1915 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == SET
1916 && SET_DEST (PATTERN (insn
)) == pc_rtx
1917 && GET_CODE (SET_SRC (PATTERN (insn
))) == IF_THEN_ELSE
1918 && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 1)) == LABEL_REF
1919 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 1), 0)
1920 == loop_stack
->data
.loop
.end_label
))
1921 || (GET_CODE (XEXP (SET_SRC (PATTERN (insn
)), 2)) == LABEL_REF
1922 && (XEXP (XEXP (SET_SRC (PATTERN (insn
)), 2), 0)
1923 == loop_stack
->data
.loop
.end_label
))))
1924 last_test_insn
= insn
;
1926 if (last_test_insn
== 0 && GET_CODE (insn
) == JUMP_INSN
1927 && GET_CODE (PATTERN (insn
)) == SET
1928 && SET_DEST (PATTERN (insn
)) == pc_rtx
1929 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
1930 && (XEXP (SET_SRC (PATTERN (insn
)), 0)
1931 == loop_stack
->data
.loop
.end_label
))
1932 /* Include BARRIER. */
1933 last_test_insn
= NEXT_INSN (insn
);
1936 if (last_test_insn
!= 0 && last_test_insn
!= get_last_insn ())
1938 /* We found one. Move everything from there up
1939 to the end of the loop, and add a jump into the loop
1940 to jump to there. */
1941 register rtx newstart_label
= gen_label_rtx ();
1942 register rtx start_move
= start_label
;
1944 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
1945 then we want to move this note also. */
1946 if (GET_CODE (PREV_INSN (start_move
)) == NOTE
1947 && (NOTE_LINE_NUMBER (PREV_INSN (start_move
))
1948 == NOTE_INSN_LOOP_CONT
))
1949 start_move
= PREV_INSN (start_move
);
1951 emit_label_after (newstart_label
, PREV_INSN (start_move
));
1952 reorder_insns (start_move
, last_test_insn
, get_last_insn ());
1953 emit_jump_insn_after (gen_jump (start_label
),
1954 PREV_INSN (newstart_label
));
1955 emit_barrier_after (PREV_INSN (newstart_label
));
1956 start_label
= newstart_label
;
1960 emit_jump (start_label
);
1961 emit_note (NULL_PTR
, NOTE_INSN_LOOP_END
);
1962 emit_label (loop_stack
->data
.loop
.end_label
);
1964 POPSTACK (loop_stack
);
1969 /* Generate a jump to the current loop's continue-point.
1970 This is usually the top of the loop, but may be specified
1971 explicitly elsewhere. If not currently inside a loop,
1972 return 0 and do nothing; caller will print an error message. */
1975 expand_continue_loop (whichloop
)
1976 struct nesting
*whichloop
;
1980 whichloop
= loop_stack
;
1983 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.continue_label
,
1988 /* Generate a jump to exit the current loop. If not currently inside a loop,
1989 return 0 and do nothing; caller will print an error message. */
1992 expand_exit_loop (whichloop
)
1993 struct nesting
*whichloop
;
1997 whichloop
= loop_stack
;
2000 expand_goto_internal (NULL_TREE
, whichloop
->data
.loop
.end_label
, NULL_RTX
);
2004 /* Generate a conditional jump to exit the current loop if COND
2005 evaluates to zero. If not currently inside a loop,
2006 return 0 and do nothing; caller will print an error message. */
2009 expand_exit_loop_if_false (whichloop
, cond
)
2010 struct nesting
*whichloop
;
2015 whichloop
= loop_stack
;
2018 do_jump (cond
, whichloop
->data
.loop
.end_label
, NULL_RTX
);
2022 /* Return non-zero if we should preserve sub-expressions as separate
2023 pseudos. We never do so if we aren't optimizing. We always do so
2024 if -fexpensive-optimizations.
2026 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2027 the loop may still be a small one. */
2030 preserve_subexpressions_p ()
2034 if (flag_expensive_optimizations
)
2037 if (optimize
== 0 || loop_stack
== 0)
2040 insn
= get_last_insn_anywhere ();
2043 && (INSN_UID (insn
) - INSN_UID (loop_stack
->data
.loop
.start_label
)
2044 < n_non_fixed_regs
* 3));
2048 /* Generate a jump to exit the current loop, conditional, binding contour
2049 or case statement. Not all such constructs are visible to this function,
2050 only those started with EXIT_FLAG nonzero. Individual languages use
2051 the EXIT_FLAG parameter to control which kinds of constructs you can
2054 If not currently inside anything that can be exited,
2055 return 0 and do nothing; caller will print an error message. */
2058 expand_exit_something ()
2062 for (n
= nesting_stack
; n
; n
= n
->all
)
2063 if (n
->exit_label
!= 0)
2065 expand_goto_internal (NULL_TREE
, n
->exit_label
, NULL_RTX
);
2072 /* Generate RTL to return from the current function, with no value.
2073 (That is, we do not do anything about returning any value.) */
2076 expand_null_return ()
2078 struct nesting
*block
= block_stack
;
2081 /* Does any pending block have cleanups? */
2083 while (block
&& block
->data
.block
.cleanups
== 0)
2084 block
= block
->next
;
2086 /* If yes, use a goto to return, since that runs cleanups. */
2088 expand_null_return_1 (last_insn
, block
!= 0);
2091 /* Generate RTL to return from the current function, with value VAL. */
2094 expand_value_return (val
)
2097 struct nesting
*block
= block_stack
;
2098 rtx last_insn
= get_last_insn ();
2099 rtx return_reg
= DECL_RTL (DECL_RESULT (current_function_decl
));
2101 /* Copy the value to the return location
2102 unless it's already there. */
2104 if (return_reg
!= val
)
2105 emit_move_insn (return_reg
, val
);
2106 if (GET_CODE (return_reg
) == REG
2107 && REGNO (return_reg
) < FIRST_PSEUDO_REGISTER
)
2108 emit_insn (gen_rtx (USE
, VOIDmode
, return_reg
));
2110 /* Does any pending block have cleanups? */
2112 while (block
&& block
->data
.block
.cleanups
== 0)
2113 block
= block
->next
;
2115 /* If yes, use a goto to return, since that runs cleanups.
2116 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2118 expand_null_return_1 (last_insn
, block
!= 0);
2121 /* Output a return with no value. If LAST_INSN is nonzero,
2122 pretend that the return takes place after LAST_INSN.
2123 If USE_GOTO is nonzero then don't use a return instruction;
2124 go to the return label instead. This causes any cleanups
2125 of pending blocks to be executed normally. */
2128 expand_null_return_1 (last_insn
, use_goto
)
2132 rtx end_label
= cleanup_label
? cleanup_label
: return_label
;
2134 clear_pending_stack_adjust ();
2135 do_pending_stack_adjust ();
2138 /* PCC-struct return always uses an epilogue. */
2139 if (current_function_returns_pcc_struct
|| use_goto
)
2142 end_label
= return_label
= gen_label_rtx ();
2143 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2147 /* Otherwise output a simple return-insn if one is available,
2148 unless it won't do the job. */
2150 if (HAVE_return
&& use_goto
== 0 && cleanup_label
== 0)
2152 emit_jump_insn (gen_return ());
2158 /* Otherwise jump to the epilogue. */
2159 expand_goto_internal (NULL_TREE
, end_label
, last_insn
);
2162 /* Generate RTL to evaluate the expression RETVAL and return it
2163 from the current function. */
2166 expand_return (retval
)
2169 /* If there are any cleanups to be performed, then they will
2170 be inserted following LAST_INSN. It is desirable
2171 that the last_insn, for such purposes, should be the
2172 last insn before computing the return value. Otherwise, cleanups
2173 which call functions can clobber the return value. */
2174 /* ??? rms: I think that is erroneous, because in C++ it would
2175 run destructors on variables that might be used in the subsequent
2176 computation of the return value. */
2178 register rtx val
= 0;
2182 struct nesting
*block
;
2184 /* If function wants no value, give it none. */
2185 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
2187 expand_expr (retval
, NULL_RTX
, VOIDmode
, 0);
2188 expand_null_return ();
2192 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2193 cleanups
= any_pending_cleanups (1);
2195 if (TREE_CODE (retval
) == RESULT_DECL
)
2196 retval_rhs
= retval
;
2197 else if ((TREE_CODE (retval
) == MODIFY_EXPR
|| TREE_CODE (retval
) == INIT_EXPR
)
2198 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
2199 retval_rhs
= TREE_OPERAND (retval
, 1);
2200 else if (TREE_TYPE (retval
) == void_type_node
)
2201 /* Recognize tail-recursive call to void function. */
2202 retval_rhs
= retval
;
2204 retval_rhs
= NULL_TREE
;
2206 /* Only use `last_insn' if there are cleanups which must be run. */
2207 if (cleanups
|| cleanup_label
!= 0)
2208 last_insn
= get_last_insn ();
2210 /* Distribute return down conditional expr if either of the sides
2211 may involve tail recursion (see test below). This enhances the number
2212 of tail recursions we see. Don't do this always since it can produce
2213 sub-optimal code in some cases and we distribute assignments into
2214 conditional expressions when it would help. */
2216 if (optimize
&& retval_rhs
!= 0
2217 && frame_offset
== 0
2218 && TREE_CODE (retval_rhs
) == COND_EXPR
2219 && (TREE_CODE (TREE_OPERAND (retval_rhs
, 1)) == CALL_EXPR
2220 || TREE_CODE (TREE_OPERAND (retval_rhs
, 2)) == CALL_EXPR
))
2222 rtx label
= gen_label_rtx ();
2223 do_jump (TREE_OPERAND (retval_rhs
, 0), label
, NULL_RTX
);
2224 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2225 DECL_RESULT (current_function_decl
),
2226 TREE_OPERAND (retval_rhs
, 1)));
2228 expand_return (build (MODIFY_EXPR
, TREE_TYPE (current_function_decl
),
2229 DECL_RESULT (current_function_decl
),
2230 TREE_OPERAND (retval_rhs
, 2)));
2234 /* For tail-recursive call to current function,
2235 just jump back to the beginning.
2236 It's unsafe if any auto variable in this function
2237 has its address taken; for simplicity,
2238 require stack frame to be empty. */
2239 if (optimize
&& retval_rhs
!= 0
2240 && frame_offset
== 0
2241 && TREE_CODE (retval_rhs
) == CALL_EXPR
2242 && TREE_CODE (TREE_OPERAND (retval_rhs
, 0)) == ADDR_EXPR
2243 && TREE_OPERAND (TREE_OPERAND (retval_rhs
, 0), 0) == current_function_decl
2244 /* Finish checking validity, and if valid emit code
2245 to set the argument variables for the new call. */
2246 && tail_recursion_args (TREE_OPERAND (retval_rhs
, 1),
2247 DECL_ARGUMENTS (current_function_decl
)))
2249 if (tail_recursion_label
== 0)
2251 tail_recursion_label
= gen_label_rtx ();
2252 emit_label_after (tail_recursion_label
,
2253 tail_recursion_reentry
);
2256 expand_goto_internal (NULL_TREE
, tail_recursion_label
, last_insn
);
2261 /* This optimization is safe if there are local cleanups
2262 because expand_null_return takes care of them.
2263 ??? I think it should also be safe when there is a cleanup label,
2264 because expand_null_return takes care of them, too.
2265 Any reason why not? */
2266 if (HAVE_return
&& cleanup_label
== 0
2267 && ! current_function_returns_pcc_struct
)
2269 /* If this is return x == y; then generate
2270 if (x == y) return 1; else return 0;
2271 if we can do it with explicit return insns. */
2273 switch (TREE_CODE (retval_rhs
))
2281 case TRUTH_ANDIF_EXPR
:
2282 case TRUTH_ORIF_EXPR
:
2283 case TRUTH_AND_EXPR
:
2285 case TRUTH_NOT_EXPR
:
2286 op0
= gen_label_rtx ();
2287 jumpifnot (retval_rhs
, op0
);
2288 expand_value_return (const1_rtx
);
2290 expand_value_return (const0_rtx
);
2294 #endif /* HAVE_return */
2298 && TREE_TYPE (retval_rhs
) != void_type_node
2299 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
)
2301 /* Calculate the return value into a pseudo reg. */
2302 val
= expand_expr (retval_rhs
, NULL_RTX
, VOIDmode
, 0);
2304 /* All temporaries have now been used. */
2306 /* Return the calculated value, doing cleanups first. */
2307 expand_value_return (val
);
2311 /* No cleanups or no hard reg used;
2312 calculate value into hard return reg. */
2313 expand_expr (retval
, NULL_RTX
, VOIDmode
, 0);
2316 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl
)));
2320 /* Return 1 if the end of the generated RTX is not a barrier.
2321 This means code already compiled can drop through. */
2324 drop_through_at_end_p ()
2326 rtx insn
= get_last_insn ();
2327 while (insn
&& GET_CODE (insn
) == NOTE
)
2328 insn
= PREV_INSN (insn
);
2329 return insn
&& GET_CODE (insn
) != BARRIER
;
2332 /* Emit code to alter this function's formal parms for a tail-recursive call.
2333 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2334 FORMALS is the chain of decls of formals.
2335 Return 1 if this can be done;
2336 otherwise return 0 and do not emit any code. */
2339 tail_recursion_args (actuals
, formals
)
2340 tree actuals
, formals
;
2342 register tree a
= actuals
, f
= formals
;
2344 register rtx
*argvec
;
2346 /* Check that number and types of actuals are compatible
2347 with the formals. This is not always true in valid C code.
2348 Also check that no formal needs to be addressable
2349 and that all formals are scalars. */
2351 /* Also count the args. */
2353 for (a
= actuals
, f
= formals
, i
= 0; a
&& f
; a
= TREE_CHAIN (a
), f
= TREE_CHAIN (f
), i
++)
2355 if (TREE_TYPE (TREE_VALUE (a
)) != TREE_TYPE (f
))
2357 if (GET_CODE (DECL_RTL (f
)) != REG
|| DECL_MODE (f
) == BLKmode
)
2360 if (a
!= 0 || f
!= 0)
2363 /* Compute all the actuals. */
2365 argvec
= (rtx
*) alloca (i
* sizeof (rtx
));
2367 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2368 argvec
[i
] = expand_expr (TREE_VALUE (a
), NULL_RTX
, VOIDmode
, 0);
2370 /* Find which actual values refer to current values of previous formals.
2371 Copy each of them now, before any formal is changed. */
2373 for (a
= actuals
, i
= 0; a
; a
= TREE_CHAIN (a
), i
++)
2377 for (f
= formals
, j
= 0; j
< i
; f
= TREE_CHAIN (f
), j
++)
2378 if (reg_mentioned_p (DECL_RTL (f
), argvec
[i
]))
2379 { copy
= 1; break; }
2381 argvec
[i
] = copy_to_reg (argvec
[i
]);
2384 /* Store the values of the actuals into the formals. */
2386 for (f
= formals
, a
= actuals
, i
= 0; f
;
2387 f
= TREE_CHAIN (f
), a
= TREE_CHAIN (a
), i
++)
2389 if (DECL_MODE (f
) == GET_MODE (argvec
[i
]))
2390 emit_move_insn (DECL_RTL (f
), argvec
[i
]);
2392 convert_move (DECL_RTL (f
), argvec
[i
],
2393 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a
))));
2400 /* Generate the RTL code for entering a binding contour.
2401 The variables are declared one by one, by calls to `expand_decl'.
2403 EXIT_FLAG is nonzero if this construct should be visible to
2404 `exit_something'. */
2407 expand_start_bindings (exit_flag
)
2410 struct nesting
*thisblock
= ALLOC_NESTING ();
2412 rtx note
= emit_note (NULL_PTR
, NOTE_INSN_BLOCK_BEG
);
2414 /* Make an entry on block_stack for the block we are entering. */
2416 thisblock
->next
= block_stack
;
2417 thisblock
->all
= nesting_stack
;
2418 thisblock
->depth
= ++nesting_depth
;
2419 thisblock
->data
.block
.stack_level
= 0;
2420 thisblock
->data
.block
.cleanups
= 0;
2421 thisblock
->data
.block
.function_call_count
= 0;
2425 if (block_stack
->data
.block
.cleanups
== NULL_TREE
2426 && (block_stack
->data
.block
.outer_cleanups
== NULL_TREE
2427 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
))
2428 thisblock
->data
.block
.outer_cleanups
= empty_cleanup_list
;
2430 thisblock
->data
.block
.outer_cleanups
2431 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2432 block_stack
->data
.block
.outer_cleanups
);
2435 thisblock
->data
.block
.outer_cleanups
= 0;
2439 && !(block_stack
->data
.block
.cleanups
== NULL_TREE
2440 && block_stack
->data
.block
.outer_cleanups
== NULL_TREE
))
2441 thisblock
->data
.block
.outer_cleanups
2442 = tree_cons (NULL_TREE
, block_stack
->data
.block
.cleanups
,
2443 block_stack
->data
.block
.outer_cleanups
);
2445 thisblock
->data
.block
.outer_cleanups
= 0;
2447 thisblock
->data
.block
.label_chain
= 0;
2448 thisblock
->data
.block
.innermost_stack_block
= stack_block_stack
;
2449 thisblock
->data
.block
.first_insn
= note
;
2450 thisblock
->data
.block
.block_start_count
= ++block_start_count
;
2451 thisblock
->exit_label
= exit_flag
? gen_label_rtx () : 0;
2452 block_stack
= thisblock
;
2453 nesting_stack
= thisblock
;
2455 /* Make a new level for allocating stack slots. */
2459 /* Generate RTL code to terminate a binding contour.
2460 VARS is the chain of VAR_DECL nodes
2461 for the variables bound in this contour.
2462 MARK_ENDS is nonzero if we should put a note at the beginning
2463 and end of this binding contour.
2465 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
2466 (That is true automatically if the contour has a saved stack level.) */
2469 expand_end_bindings (vars
, mark_ends
, dont_jump_in
)
2474 register struct nesting
*thisblock
= block_stack
;
2478 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2479 if (! TREE_USED (decl
) && TREE_CODE (decl
) == VAR_DECL
)
2480 warning_with_decl (decl
, "unused variable `%s'");
2482 /* Mark the beginning and end of the scope if requested. */
2485 emit_note (NULL_PTR
, NOTE_INSN_BLOCK_END
);
2487 /* Get rid of the beginning-mark if we don't make an end-mark. */
2488 NOTE_LINE_NUMBER (thisblock
->data
.block
.first_insn
) = NOTE_INSN_DELETED
;
2490 if (thisblock
->exit_label
)
2492 do_pending_stack_adjust ();
2493 emit_label (thisblock
->exit_label
);
2496 /* If necessary, make a handler for nonlocal gotos taking
2497 place in the function calls in this block. */
2498 if (function_call_count
!= thisblock
->data
.block
.function_call_count
2500 /* Make handler for outermost block
2501 if there were any nonlocal gotos to this function. */
2502 && (thisblock
->next
== 0 ? current_function_has_nonlocal_label
2503 /* Make handler for inner block if it has something
2504 special to do when you jump out of it. */
2505 : (thisblock
->data
.block
.cleanups
!= 0
2506 || thisblock
->data
.block
.stack_level
!= 0)))
2509 rtx afterward
= gen_label_rtx ();
2510 rtx handler_label
= gen_label_rtx ();
2511 rtx save_receiver
= gen_reg_rtx (Pmode
);
2513 /* Don't let jump_optimize delete the handler. */
2514 LABEL_PRESERVE_P (handler_label
) = 1;
2516 /* Record the handler address in the stack slot for that purpose,
2517 during this block, saving and restoring the outer value. */
2518 if (thisblock
->next
!= 0)
2520 emit_move_insn (nonlocal_goto_handler_slot
, save_receiver
);
2521 emit_insn_before (gen_move_insn (save_receiver
,
2522 nonlocal_goto_handler_slot
),
2523 thisblock
->data
.block
.first_insn
);
2525 emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot
,
2526 gen_rtx (LABEL_REF
, Pmode
,
2528 thisblock
->data
.block
.first_insn
);
2530 /* Jump around the handler; it runs only when specially invoked. */
2531 emit_jump (afterward
);
2532 emit_label (handler_label
);
2534 #ifdef HAVE_nonlocal_goto
2535 if (! HAVE_nonlocal_goto
)
2537 /* First adjust our frame pointer to its actual value. It was
2538 previously set to the start of the virtual area corresponding to
2539 the stacked variables when we branched here and now needs to be
2540 adjusted to the actual hardware fp value.
2542 Assignments are to virtual registers are converted by
2543 instantiate_virtual_regs into the corresponding assignment
2544 to the underlying register (fp in this case) that makes
2545 the original assignment true.
2546 So the following insn will actually be
2547 decrementing fp by STARTING_FRAME_OFFSET. */
2548 emit_move_insn (virtual_stack_vars_rtx
, frame_pointer_rtx
);
2550 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2551 if (fixed_regs
[ARG_POINTER_REGNUM
])
2553 #ifdef ELIMINABLE_REGS
2554 /* If the argument pointer can be eliminated in favor of the
2555 frame pointer, we don't need to restore it. We assume here
2556 that if such an elimination is present, it can always be used.
2557 This is the case on all known machines; if we don't make this
2558 assumption, we do unnecessary saving on many machines. */
2559 static struct elims
{int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
2562 for (i
= 0; i
< sizeof elim_regs
/ sizeof elim_regs
[0]; i
++)
2563 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
2564 && elim_regs
[i
].to
== FRAME_POINTER_REGNUM
)
2567 if (i
== sizeof elim_regs
/ sizeof elim_regs
[0])
2570 /* Now restore our arg pointer from the address at which it
2571 was saved in our stack frame.
2572 If there hasn't be space allocated for it yet, make
2574 if (arg_pointer_save_area
== 0)
2575 arg_pointer_save_area
2576 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
2577 emit_move_insn (virtual_incoming_args_rtx
,
2578 /* We need a pseudo here, or else
2579 instantiate_virtual_regs_1 complains. */
2580 copy_to_reg (arg_pointer_save_area
));
2585 /* The handler expects the desired label address in the static chain
2586 register. It tests the address and does an appropriate jump
2587 to whatever label is desired. */
2588 for (link
= nonlocal_labels
; link
; link
= TREE_CHAIN (link
))
2589 /* Skip any labels we shouldn't be able to jump to from here. */
2590 if (! DECL_TOO_LATE (TREE_VALUE (link
)))
2592 rtx not_this
= gen_label_rtx ();
2593 rtx
this = gen_label_rtx ();
2594 do_jump_if_equal (static_chain_rtx
,
2595 gen_rtx (LABEL_REF
, Pmode
, DECL_RTL (TREE_VALUE (link
))),
2597 emit_jump (not_this
);
2599 expand_goto (TREE_VALUE (link
));
2600 emit_label (not_this
);
2602 /* If label is not recognized, abort. */
2603 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, "abort"), 0,
2605 emit_label (afterward
);
2608 /* Don't allow jumping into a block that has cleanups or a stack level. */
2610 || thisblock
->data
.block
.stack_level
!= 0
2611 || thisblock
->data
.block
.cleanups
!= 0)
2613 struct label_chain
*chain
;
2615 /* Any labels in this block are no longer valid to go to.
2616 Mark them to cause an error message. */
2617 for (chain
= thisblock
->data
.block
.label_chain
; chain
; chain
= chain
->next
)
2619 DECL_TOO_LATE (chain
->label
) = 1;
2620 /* If any goto without a fixup came to this label,
2621 that must be an error, because gotos without fixups
2622 come from outside all saved stack-levels and all cleanups. */
2623 if (TREE_ADDRESSABLE (chain
->label
))
2624 error_with_decl (chain
->label
,
2625 "label `%s' used before containing binding contour");
2629 /* Restore stack level in effect before the block
2630 (only if variable-size objects allocated). */
2631 /* Perform any cleanups associated with the block. */
2633 if (thisblock
->data
.block
.stack_level
!= 0
2634 || thisblock
->data
.block
.cleanups
!= 0)
2636 /* Don't let cleanups affect ({...}) constructs. */
2637 int old_expr_stmts_for_value
= expr_stmts_for_value
;
2638 rtx old_last_expr_value
= last_expr_value
;
2639 tree old_last_expr_type
= last_expr_type
;
2640 expr_stmts_for_value
= 0;
2642 /* Do the cleanups. */
2643 expand_cleanups (thisblock
->data
.block
.cleanups
, NULL_TREE
);
2644 do_pending_stack_adjust ();
2646 expr_stmts_for_value
= old_expr_stmts_for_value
;
2647 last_expr_value
= old_last_expr_value
;
2648 last_expr_type
= old_last_expr_type
;
2650 /* Restore the stack level. */
2652 if (thisblock
->data
.block
.stack_level
!= 0)
2654 emit_stack_restore (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
2655 thisblock
->data
.block
.stack_level
, NULL_RTX
);
2656 if (nonlocal_goto_handler_slot
!= 0)
2657 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
,
2661 /* Any gotos out of this block must also do these things.
2662 Also report any gotos with fixups that came to labels in this
2664 fixup_gotos (thisblock
,
2665 thisblock
->data
.block
.stack_level
,
2666 thisblock
->data
.block
.cleanups
,
2667 thisblock
->data
.block
.first_insn
,
2671 /* If doing stupid register allocation, make sure lives of all
2672 register variables declared here extend thru end of scope. */
2675 for (decl
= vars
; decl
; decl
= TREE_CHAIN (decl
))
2677 rtx rtl
= DECL_RTL (decl
);
2678 if (TREE_CODE (decl
) == VAR_DECL
&& rtl
!= 0)
2682 /* Restore block_stack level for containing block. */
2684 stack_block_stack
= thisblock
->data
.block
.innermost_stack_block
;
2685 POPSTACK (block_stack
);
2687 /* Pop the stack slot nesting and free any slots at this level. */
2691 /* Generate RTL for the automatic variable declaration DECL.
2692 (Other kinds of declarations are simply ignored if seen here.)
2693 CLEANUP is an expression to be executed at exit from this binding contour;
2694 for example, in C++, it might call the destructor for this variable.
2696 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2697 either before or after calling `expand_decl' but before compiling
2698 any subsequent expressions. This is because CLEANUP may be expanded
2699 more than once, on different branches of execution.
2700 For the same reason, CLEANUP may not contain a CALL_EXPR
2701 except as its topmost node--else `preexpand_calls' would get confused.
2703 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2704 that is not associated with any particular variable.
2706 There is no special support here for C++ constructors.
2707 They should be handled by the proper code in DECL_INITIAL. */
2713 struct nesting
*thisblock
= block_stack
;
2714 tree type
= TREE_TYPE (decl
);
2716 /* Only automatic variables need any expansion done.
2717 Static and external variables, and external functions,
2718 will be handled by `assemble_variable' (called from finish_decl).
2719 TYPE_DECL and CONST_DECL require nothing.
2720 PARM_DECLs are handled in `assign_parms'. */
2722 if (TREE_CODE (decl
) != VAR_DECL
)
2724 if (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
))
2727 /* Create the RTL representation for the variable. */
2729 if (type
== error_mark_node
)
2730 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, const0_rtx
);
2731 else if (DECL_SIZE (decl
) == 0)
2732 /* Variable with incomplete type. */
2734 if (DECL_INITIAL (decl
) == 0)
2735 /* Error message was already done; now avoid a crash. */
2736 DECL_RTL (decl
) = assign_stack_temp (DECL_MODE (decl
), 0, 1);
2738 /* An initializer is going to decide the size of this array.
2739 Until we know the size, represent its address with a reg. */
2740 DECL_RTL (decl
) = gen_rtx (MEM
, BLKmode
, gen_reg_rtx (Pmode
));
2742 else if (DECL_MODE (decl
) != BLKmode
2743 /* If -ffloat-store, don't put explicit float vars
2745 && !(flag_float_store
2746 && TREE_CODE (type
) == REAL_TYPE
)
2747 && ! TREE_THIS_VOLATILE (decl
)
2748 && ! TREE_ADDRESSABLE (decl
)
2749 && (DECL_REGISTER (decl
) || ! obey_regdecls
))
2751 /* Automatic variable that can go in a register. */
2752 DECL_RTL (decl
) = gen_reg_rtx (DECL_MODE (decl
));
2753 if (TREE_CODE (type
) == POINTER_TYPE
)
2754 mark_reg_pointer (DECL_RTL (decl
));
2755 REG_USERVAR_P (DECL_RTL (decl
)) = 1;
2757 else if (TREE_CODE (DECL_SIZE (decl
)) == INTEGER_CST
)
2759 /* Variable of fixed size that goes on the stack. */
2763 /* If we previously made RTL for this decl, it must be an array
2764 whose size was determined by the initializer.
2765 The old address was a register; set that register now
2766 to the proper address. */
2767 if (DECL_RTL (decl
) != 0)
2769 if (GET_CODE (DECL_RTL (decl
)) != MEM
2770 || GET_CODE (XEXP (DECL_RTL (decl
), 0)) != REG
)
2772 oldaddr
= XEXP (DECL_RTL (decl
), 0);
2776 = assign_stack_temp (DECL_MODE (decl
),
2777 ((TREE_INT_CST_LOW (DECL_SIZE (decl
))
2778 + BITS_PER_UNIT
- 1)
2782 /* Set alignment we actually gave this decl. */
2783 DECL_ALIGN (decl
) = (DECL_MODE (decl
) == BLKmode
? BIGGEST_ALIGNMENT
2784 : GET_MODE_BITSIZE (DECL_MODE (decl
)));
2788 addr
= force_operand (XEXP (DECL_RTL (decl
), 0), oldaddr
);
2789 if (addr
!= oldaddr
)
2790 emit_move_insn (oldaddr
, addr
);
2793 /* If this is a memory ref that contains aggregate components,
2794 mark it as such for cse and loop optimize. */
2795 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2796 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2797 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2798 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2800 /* If this is in memory because of -ffloat-store,
2801 set the volatile bit, to prevent optimizations from
2802 undoing the effects. */
2803 if (flag_float_store
&& TREE_CODE (type
) == REAL_TYPE
)
2804 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2808 /* Dynamic-size object: must push space on the stack. */
2812 /* Record the stack pointer on entry to block, if have
2813 not already done so. */
2814 if (thisblock
->data
.block
.stack_level
== 0)
2816 do_pending_stack_adjust ();
2817 emit_stack_save (thisblock
->next
? SAVE_BLOCK
: SAVE_FUNCTION
,
2818 &thisblock
->data
.block
.stack_level
,
2819 thisblock
->data
.block
.first_insn
);
2820 stack_block_stack
= thisblock
;
2823 /* Compute the variable's size, in bytes. */
2824 size
= expand_expr (size_binop (CEIL_DIV_EXPR
,
2826 size_int (BITS_PER_UNIT
)),
2827 NULL_RTX
, VOIDmode
, 0);
2830 /* This is equivalent to calling alloca. */
2831 current_function_calls_alloca
= 1;
2833 /* Allocate space on the stack for the variable. */
2834 address
= allocate_dynamic_stack_space (size
, NULL_RTX
,
2837 if (nonlocal_goto_handler_slot
!= 0)
2838 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
2840 /* Reference the variable indirect through that rtx. */
2841 DECL_RTL (decl
) = gen_rtx (MEM
, DECL_MODE (decl
), address
);
2843 /* If this is a memory ref that contains aggregate components,
2844 mark it as such for cse and loop optimize. */
2845 MEM_IN_STRUCT_P (DECL_RTL (decl
))
2846 = (TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
2847 || TREE_CODE (TREE_TYPE (decl
)) == RECORD_TYPE
2848 || TREE_CODE (TREE_TYPE (decl
)) == UNION_TYPE
);
2850 /* Indicate the alignment we actually gave this variable. */
2851 #ifdef STACK_BOUNDARY
2852 DECL_ALIGN (decl
) = STACK_BOUNDARY
;
2854 DECL_ALIGN (decl
) = BIGGEST_ALIGNMENT
;
2858 if (TREE_THIS_VOLATILE (decl
))
2859 MEM_VOLATILE_P (DECL_RTL (decl
)) = 1;
2860 if (TREE_READONLY (decl
))
2861 RTX_UNCHANGING_P (DECL_RTL (decl
)) = 1;
2863 /* If doing stupid register allocation, make sure life of any
2864 register variable starts here, at the start of its scope. */
2867 use_variable (DECL_RTL (decl
));
2870 /* Emit code to perform the initialization of a declaration DECL. */
2873 expand_decl_init (decl
)
2876 int was_used
= TREE_USED (decl
);
2878 if (TREE_STATIC (decl
))
2881 /* Compute and store the initial value now. */
2883 if (DECL_INITIAL (decl
) == error_mark_node
)
2885 enum tree_code code
= TREE_CODE (TREE_TYPE (decl
));
2886 if (code
== INTEGER_TYPE
|| code
== REAL_TYPE
|| code
== ENUMERAL_TYPE
2887 || code
== POINTER_TYPE
)
2888 expand_assignment (decl
, convert (TREE_TYPE (decl
), integer_zero_node
),
2892 else if (DECL_INITIAL (decl
) && TREE_CODE (DECL_INITIAL (decl
)) != TREE_LIST
)
2894 emit_line_note (DECL_SOURCE_FILE (decl
), DECL_SOURCE_LINE (decl
));
2895 expand_assignment (decl
, DECL_INITIAL (decl
), 0, 0);
2899 /* Don't let the initialization count as "using" the variable. */
2900 TREE_USED (decl
) = was_used
;
2902 /* Free any temporaries we made while initializing the decl. */
2906 /* CLEANUP is an expression to be executed at exit from this binding contour;
2907 for example, in C++, it might call the destructor for this variable.
2909 If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them
2910 either before or after calling `expand_decl' but before compiling
2911 any subsequent expressions. This is because CLEANUP may be expanded
2912 more than once, on different branches of execution.
2913 For the same reason, CLEANUP may not contain a CALL_EXPR
2914 except as its topmost node--else `preexpand_calls' would get confused.
2916 If CLEANUP is nonzero and DECL is zero, we record a cleanup
2917 that is not associated with any particular variable. */
2920 expand_decl_cleanup (decl
, cleanup
)
2923 struct nesting
*thisblock
= block_stack
;
2925 /* Error if we are not in any block. */
2929 /* Record the cleanup if there is one. */
2933 thisblock
->data
.block
.cleanups
2934 = temp_tree_cons (decl
, cleanup
, thisblock
->data
.block
.cleanups
);
2935 /* If this block has a cleanup, it belongs in stack_block_stack. */
2936 stack_block_stack
= thisblock
;
2941 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2942 DECL_ELTS is the list of elements that belong to DECL's type.
2943 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2946 expand_anon_union_decl (decl
, cleanup
, decl_elts
)
2947 tree decl
, cleanup
, decl_elts
;
2949 struct nesting
*thisblock
= block_stack
;
2952 expand_decl (decl
, cleanup
);
2953 x
= DECL_RTL (decl
);
2957 tree decl_elt
= TREE_VALUE (decl_elts
);
2958 tree cleanup_elt
= TREE_PURPOSE (decl_elts
);
2959 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (decl_elt
));
2961 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2962 instead create a new MEM rtx with the proper mode. */
2963 if (GET_CODE (x
) == MEM
)
2965 if (mode
== GET_MODE (x
))
2966 DECL_RTL (decl_elt
) = x
;
2969 DECL_RTL (decl_elt
) = gen_rtx (MEM
, mode
, copy_rtx (XEXP (x
, 0)));
2970 MEM_IN_STRUCT_P (DECL_RTL (decl_elt
)) = MEM_IN_STRUCT_P (x
);
2971 RTX_UNCHANGING_P (DECL_RTL (decl_elt
)) = RTX_UNCHANGING_P (x
);
2974 else if (GET_CODE (x
) == REG
)
2976 if (mode
== GET_MODE (x
))
2977 DECL_RTL (decl_elt
) = x
;
2979 DECL_RTL (decl_elt
) = gen_rtx (SUBREG
, mode
, x
, 0);
2984 /* Record the cleanup if there is one. */
2987 thisblock
->data
.block
.cleanups
2988 = temp_tree_cons (decl_elt
, cleanup_elt
,
2989 thisblock
->data
.block
.cleanups
);
2991 decl_elts
= TREE_CHAIN (decl_elts
);
2995 /* Expand a list of cleanups LIST.
2996 Elements may be expressions or may be nested lists.
2998 If DONT_DO is nonnull, then any list-element
2999 whose TREE_PURPOSE matches DONT_DO is omitted.
3000 This is sometimes used to avoid a cleanup associated with
3001 a value that is being returned out of the scope. */
3004 expand_cleanups (list
, dont_do
)
3009 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
3010 if (dont_do
== 0 || TREE_PURPOSE (tail
) != dont_do
)
3012 if (TREE_CODE (TREE_VALUE (tail
)) == TREE_LIST
)
3013 expand_cleanups (TREE_VALUE (tail
), dont_do
);
3016 /* Cleanups may be run multiple times. For example,
3017 when exiting a binding contour, we expand the
3018 cleanups associated with that contour. When a goto
3019 within that binding contour has a target outside that
3020 contour, it will expand all cleanups from its scope to
3021 the target. Though the cleanups are expanded multiple
3022 times, the control paths are non-overlapping so the
3023 cleanups will not be executed twice. */
3024 expand_expr (TREE_VALUE (tail
), const0_rtx
, VOIDmode
, 0);
3030 /* Expand a list of cleanups for a goto fixup.
3031 The expansion is put into the insn chain after the insn *BEFORE_JUMP
3032 and *BEFORE_JUMP is set to the insn that now comes before the jump. */
3035 fixup_cleanups (list
, before_jump
)
3039 rtx beyond_jump
= get_last_insn ();
3040 rtx new_before_jump
;
3042 expand_cleanups (list
, NULL_TREE
);
3043 /* Pop any pushes done in the cleanups,
3044 in case function is about to return. */
3045 do_pending_stack_adjust ();
3047 new_before_jump
= get_last_insn ();
3049 if (beyond_jump
!= new_before_jump
)
3051 /* If cleanups expand to nothing, don't reorder. */
3052 reorder_insns (NEXT_INSN (beyond_jump
), new_before_jump
, *before_jump
);
3053 *before_jump
= new_before_jump
;
3057 /* Move all cleanups from the current block_stack
3058 to the containing block_stack, where they are assumed to
3059 have been created. If anything can cause a temporary to
3060 be created, but not expanded for more than one level of
3061 block_stacks, then this code will have to change. */
3066 struct nesting
*block
= block_stack
;
3067 struct nesting
*outer
= block
->next
;
3069 outer
->data
.block
.cleanups
3070 = chainon (block
->data
.block
.cleanups
,
3071 outer
->data
.block
.cleanups
);
3072 block
->data
.block
.cleanups
= 0;
3076 last_cleanup_this_contour ()
3078 if (block_stack
== 0)
3081 return block_stack
->data
.block
.cleanups
;
3084 /* Return 1 if there are any pending cleanups at this point.
3085 If THIS_CONTOUR is nonzero, check the current contour as well.
3086 Otherwise, look only at the contours that enclose this one. */
3089 any_pending_cleanups (this_contour
)
3092 struct nesting
*block
;
3094 if (block_stack
== 0)
3097 if (this_contour
&& block_stack
->data
.block
.cleanups
!= NULL
)
3099 if (block_stack
->data
.block
.cleanups
== 0
3100 && (block_stack
->data
.block
.outer_cleanups
== 0
3102 || block_stack
->data
.block
.outer_cleanups
== empty_cleanup_list
3107 for (block
= block_stack
->next
; block
; block
= block
->next
)
3108 if (block
->data
.block
.cleanups
!= 0)
3114 /* Enter a case (Pascal) or switch (C) statement.
3115 Push a block onto case_stack and nesting_stack
3116 to accumulate the case-labels that are seen
3117 and to record the labels generated for the statement.
3119 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3120 Otherwise, this construct is transparent for `exit_something'.
3122 EXPR is the index-expression to be dispatched on.
3123 TYPE is its nominal type. We could simply convert EXPR to this type,
3124 but instead we take short cuts. */
3127 expand_start_case (exit_flag
, expr
, type
, printname
)
3133 register struct nesting
*thiscase
= ALLOC_NESTING ();
3135 /* Make an entry on case_stack for the case we are entering. */
3137 thiscase
->next
= case_stack
;
3138 thiscase
->all
= nesting_stack
;
3139 thiscase
->depth
= ++nesting_depth
;
3140 thiscase
->exit_label
= exit_flag
? gen_label_rtx () : 0;
3141 thiscase
->data
.case_stmt
.case_list
= 0;
3142 thiscase
->data
.case_stmt
.index_expr
= expr
;
3143 thiscase
->data
.case_stmt
.nominal_type
= type
;
3144 thiscase
->data
.case_stmt
.default_label
= 0;
3145 thiscase
->data
.case_stmt
.num_ranges
= 0;
3146 thiscase
->data
.case_stmt
.printname
= printname
;
3147 thiscase
->data
.case_stmt
.seenlabel
= 0;
3148 case_stack
= thiscase
;
3149 nesting_stack
= thiscase
;
3151 do_pending_stack_adjust ();
3153 /* Make sure case_stmt.start points to something that won't
3154 need any transformation before expand_end_case. */
3155 if (GET_CODE (get_last_insn ()) != NOTE
)
3156 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
3158 thiscase
->data
.case_stmt
.start
= get_last_insn ();
3161 /* Start a "dummy case statement" within which case labels are invalid
3162 and are not connected to any larger real case statement.
3163 This can be used if you don't want to let a case statement jump
3164 into the middle of certain kinds of constructs. */
3167 expand_start_case_dummy ()
3169 register struct nesting
*thiscase
= ALLOC_NESTING ();
3171 /* Make an entry on case_stack for the dummy. */
3173 thiscase
->next
= case_stack
;
3174 thiscase
->all
= nesting_stack
;
3175 thiscase
->depth
= ++nesting_depth
;
3176 thiscase
->exit_label
= 0;
3177 thiscase
->data
.case_stmt
.case_list
= 0;
3178 thiscase
->data
.case_stmt
.start
= 0;
3179 thiscase
->data
.case_stmt
.nominal_type
= 0;
3180 thiscase
->data
.case_stmt
.default_label
= 0;
3181 thiscase
->data
.case_stmt
.num_ranges
= 0;
3182 case_stack
= thiscase
;
3183 nesting_stack
= thiscase
;
3186 /* End a dummy case statement. */
3189 expand_end_case_dummy ()
3191 POPSTACK (case_stack
);
3194 /* Return the data type of the index-expression
3195 of the innermost case statement, or null if none. */
3198 case_index_expr_type ()
3201 return TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3205 /* Accumulate one case or default label inside a case or switch statement.
3206 VALUE is the value of the case (a null pointer, for a default label).
3208 If not currently inside a case or switch statement, return 1 and do
3209 nothing. The caller will print a language-specific error message.
3210 If VALUE is a duplicate or overlaps, return 2 and do nothing
3211 except store the (first) duplicate node in *DUPLICATE.
3212 If VALUE is out of range, return 3 and do nothing.
3213 If we are jumping into the scope of a cleaup or var-sized array, return 5.
3214 Return 0 on success.
3216 Extended to handle range statements. */
3219 pushcase (value
, label
, duplicate
)
3220 register tree value
;
3221 register tree label
;
3224 register struct case_node
**l
;
3225 register struct case_node
*n
;
3229 /* Fail if not inside a real case statement. */
3230 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3233 if (stack_block_stack
3234 && stack_block_stack
->depth
> case_stack
->depth
)
3237 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3238 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3240 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3241 if (index_type
== error_mark_node
)
3244 /* Convert VALUE to the type in which the comparisons are nominally done. */
3246 value
= convert (nominal_type
, value
);
3248 /* If this is the first label, warn if any insns have been emitted. */
3249 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3252 for (insn
= case_stack
->data
.case_stmt
.start
;
3254 insn
= NEXT_INSN (insn
))
3256 if (GET_CODE (insn
) == CODE_LABEL
)
3258 if (GET_CODE (insn
) != NOTE
3259 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3261 warning ("unreachable code at beginning of %s",
3262 case_stack
->data
.case_stmt
.printname
);
3267 case_stack
->data
.case_stmt
.seenlabel
= 1;
3269 /* Fail if this value is out of range for the actual type of the index
3270 (which may be narrower than NOMINAL_TYPE). */
3271 if (value
!= 0 && ! int_fits_type_p (value
, index_type
))
3274 /* Fail if this is a duplicate or overlaps another entry. */
3277 if (case_stack
->data
.case_stmt
.default_label
!= 0)
3279 *duplicate
= case_stack
->data
.case_stmt
.default_label
;
3282 case_stack
->data
.case_stmt
.default_label
= label
;
3286 /* Find the elt in the chain before which to insert the new value,
3287 to keep the chain sorted in increasing order.
3288 But report an error if this element is a duplicate. */
3289 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3290 /* Keep going past elements distinctly less than VALUE. */
3291 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value
);
3296 /* Element we will insert before must be distinctly greater;
3297 overlap means error. */
3298 if (! tree_int_cst_lt (value
, (*l
)->low
))
3300 *duplicate
= (*l
)->code_label
;
3305 /* Add this label to the chain, and succeed.
3306 Copy VALUE so it is on temporary rather than momentary
3307 obstack and will thus survive till the end of the case statement. */
3308 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3311 n
->high
= n
->low
= copy_node (value
);
3312 n
->code_label
= label
;
3316 expand_label (label
);
3320 /* Like pushcase but this case applies to all values
3321 between VALUE1 and VALUE2 (inclusive).
3322 The return value is the same as that of pushcase
3323 but there is one additional error code:
3324 4 means the specified range was empty. */
3327 pushcase_range (value1
, value2
, label
, duplicate
)
3328 register tree value1
, value2
;
3329 register tree label
;
3332 register struct case_node
**l
;
3333 register struct case_node
*n
;
3337 /* Fail if not inside a real case statement. */
3338 if (! (case_stack
&& case_stack
->data
.case_stmt
.start
))
3341 if (stack_block_stack
3342 && stack_block_stack
->depth
> case_stack
->depth
)
3345 index_type
= TREE_TYPE (case_stack
->data
.case_stmt
.index_expr
);
3346 nominal_type
= case_stack
->data
.case_stmt
.nominal_type
;
3348 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3349 if (index_type
== error_mark_node
)
3352 /* If this is the first label, warn if any insns have been emitted. */
3353 if (case_stack
->data
.case_stmt
.seenlabel
== 0)
3356 for (insn
= case_stack
->data
.case_stmt
.start
;
3358 insn
= NEXT_INSN (insn
))
3360 if (GET_CODE (insn
) == CODE_LABEL
)
3362 if (GET_CODE (insn
) != NOTE
3363 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
)) != USE
))
3365 warning ("unreachable code at beginning of %s",
3366 case_stack
->data
.case_stmt
.printname
);
3371 case_stack
->data
.case_stmt
.seenlabel
= 1;
3373 /* Convert VALUEs to type in which the comparisons are nominally done. */
3374 if (value1
== 0) /* Negative infinity. */
3375 value1
= TYPE_MIN_VALUE(index_type
);
3376 value1
= convert (nominal_type
, value1
);
3378 if (value2
== 0) /* Positive infinity. */
3379 value2
= TYPE_MAX_VALUE(index_type
);
3380 value2
= convert (nominal_type
, value2
);
3382 /* Fail if these values are out of range. */
3383 if (! int_fits_type_p (value1
, index_type
))
3386 if (! int_fits_type_p (value2
, index_type
))
3389 /* Fail if the range is empty. */
3390 if (tree_int_cst_lt (value2
, value1
))
3393 /* If the bounds are equal, turn this into the one-value case. */
3394 if (tree_int_cst_equal (value1
, value2
))
3395 return pushcase (value1
, label
, duplicate
);
3397 /* Find the elt in the chain before which to insert the new value,
3398 to keep the chain sorted in increasing order.
3399 But report an error if this element is a duplicate. */
3400 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3401 /* Keep going past elements distinctly less than this range. */
3402 *l
!= 0 && tree_int_cst_lt ((*l
)->high
, value1
);
3407 /* Element we will insert before must be distinctly greater;
3408 overlap means error. */
3409 if (! tree_int_cst_lt (value2
, (*l
)->low
))
3411 *duplicate
= (*l
)->code_label
;
3416 /* Add this label to the chain, and succeed.
3417 Copy VALUE1, VALUE2 so they are on temporary rather than momentary
3418 obstack and will thus survive till the end of the case statement. */
3420 n
= (struct case_node
*) oballoc (sizeof (struct case_node
));
3423 n
->low
= copy_node (value1
);
3424 n
->high
= copy_node (value2
);
3425 n
->code_label
= label
;
3428 expand_label (label
);
3430 case_stack
->data
.case_stmt
.num_ranges
++;
3435 /* Called when the index of a switch statement is an enumerated type
3436 and there is no default label.
3438 Checks that all enumeration literals are covered by the case
3439 expressions of a switch. Also, warn if there are any extra
3440 switch cases that are *not* elements of the enumerated type.
3442 If all enumeration literals were covered by the case expressions,
3443 turn one of the expressions into the default expression since it should
3444 not be possible to fall through such a switch. */
3447 check_for_full_enumeration_handling (type
)
3450 register struct case_node
*n
;
3451 register struct case_node
**l
;
3452 register tree chain
;
3455 /* The time complexity of this loop is currently O(N * M), with
3456 N being the number of enumerals in the enumerated type, and
3457 M being the number of case expressions in the switch. */
3459 for (chain
= TYPE_VALUES (type
);
3461 chain
= TREE_CHAIN (chain
))
3463 /* Find a match between enumeral and case expression, if possible.
3464 Quit looking when we've gone too far (since case expressions
3465 are kept sorted in ascending order). Warn about enumerals not
3466 handled in the switch statement case expression list. */
3468 for (n
= case_stack
->data
.case_stmt
.case_list
;
3469 n
&& tree_int_cst_lt (n
->high
, TREE_VALUE (chain
));
3473 if (!n
|| tree_int_cst_lt (TREE_VALUE (chain
), n
->low
))
3476 warning ("enumeration value `%s' not handled in switch",
3477 IDENTIFIER_POINTER (TREE_PURPOSE (chain
)));
3482 /* Now we go the other way around; we warn if there are case
3483 expressions that don't correspond to enumerals. This can
3484 occur since C and C++ don't enforce type-checking of
3485 assignments to enumeration variables. */
3488 for (n
= case_stack
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3490 for (chain
= TYPE_VALUES (type
);
3491 chain
&& !tree_int_cst_equal (n
->low
, TREE_VALUE (chain
));
3492 chain
= TREE_CHAIN (chain
))
3496 warning ("case value `%d' not in enumerated type `%s'",
3497 TREE_INT_CST_LOW (n
->low
),
3498 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
3501 : DECL_NAME (TYPE_NAME (type
))));
3502 if (!tree_int_cst_equal (n
->low
, n
->high
))
3504 for (chain
= TYPE_VALUES (type
);
3505 chain
&& !tree_int_cst_equal (n
->high
, TREE_VALUE (chain
));
3506 chain
= TREE_CHAIN (chain
))
3510 warning ("case value `%d' not in enumerated type `%s'",
3511 TREE_INT_CST_LOW (n
->high
),
3512 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type
))
3515 : DECL_NAME (TYPE_NAME (type
))));
3519 /* If all values were found as case labels, make one of them the default
3520 label. Thus, this switch will never fall through. We arbitrarily pick
3521 the last one to make the default since this is likely the most
3522 efficient choice. */
3526 for (l
= &case_stack
->data
.case_stmt
.case_list
;
3531 case_stack
->data
.case_stmt
.default_label
= (*l
)->code_label
;
3536 /* Terminate a case (Pascal) or switch (C) statement
3537 in which ORIG_INDEX is the expression to be tested.
3538 Generate the code to test it and jump to the right place. */
3541 expand_end_case (orig_index
)
3544 tree minval
, maxval
, range
;
3545 rtx default_label
= 0;
3546 register struct case_node
*n
;
3549 rtx table_label
= gen_label_rtx ();
3554 register struct nesting
*thiscase
= case_stack
;
3555 tree index_expr
= thiscase
->data
.case_stmt
.index_expr
;
3556 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (index_expr
));
3558 do_pending_stack_adjust ();
3560 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3561 if (TREE_TYPE (index_expr
) != error_mark_node
)
3563 /* If switch expression was an enumerated type, check that all
3564 enumeration literals are covered by the cases.
3565 No sense trying this if there's a default case, however. */
3567 if (!thiscase
->data
.case_stmt
.default_label
3568 && TREE_CODE (TREE_TYPE (orig_index
)) == ENUMERAL_TYPE
3569 && TREE_CODE (index_expr
) != INTEGER_CST
)
3570 check_for_full_enumeration_handling (TREE_TYPE (orig_index
));
3572 /* If this is the first label, warn if any insns have been emitted. */
3573 if (thiscase
->data
.case_stmt
.seenlabel
== 0)
3576 for (insn
= get_last_insn ();
3577 insn
!= case_stack
->data
.case_stmt
.start
;
3578 insn
= PREV_INSN (insn
))
3579 if (GET_CODE (insn
) != NOTE
3580 && (GET_CODE (insn
) != INSN
|| GET_CODE (PATTERN (insn
))!= USE
))
3582 warning ("unreachable code at beginning of %s",
3583 case_stack
->data
.case_stmt
.printname
);
3588 /* If we don't have a default-label, create one here,
3589 after the body of the switch. */
3590 if (thiscase
->data
.case_stmt
.default_label
== 0)
3592 thiscase
->data
.case_stmt
.default_label
3593 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
3594 expand_label (thiscase
->data
.case_stmt
.default_label
);
3596 default_label
= label_rtx (thiscase
->data
.case_stmt
.default_label
);
3598 before_case
= get_last_insn ();
3600 /* Simplify the case-list before we count it. */
3601 group_case_nodes (thiscase
->data
.case_stmt
.case_list
);
3603 /* Get upper and lower bounds of case values.
3604 Also convert all the case values to the index expr's data type. */
3607 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3609 /* Check low and high label values are integers. */
3610 if (TREE_CODE (n
->low
) != INTEGER_CST
)
3612 if (TREE_CODE (n
->high
) != INTEGER_CST
)
3615 n
->low
= convert (TREE_TYPE (index_expr
), n
->low
);
3616 n
->high
= convert (TREE_TYPE (index_expr
), n
->high
);
3618 /* Count the elements and track the largest and smallest
3619 of them (treating them as signed even if they are not). */
3627 if (INT_CST_LT (n
->low
, minval
))
3629 if (INT_CST_LT (maxval
, n
->high
))
3632 /* A range counts double, since it requires two compares. */
3633 if (! tree_int_cst_equal (n
->low
, n
->high
))
3637 /* Compute span of values. */
3639 range
= fold (build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3642 if (count
== 0 || TREE_CODE (TREE_TYPE (index_expr
)) == ERROR_MARK
)
3644 expand_expr (index_expr
, const0_rtx
, VOIDmode
, 0);
3646 emit_jump (default_label
);
3648 /* If range of values is much bigger than number of values,
3649 make a sequence of conditional branches instead of a dispatch.
3650 If the switch-index is a constant, do it this way
3651 because we can optimize it. */
3653 #ifndef CASE_VALUES_THRESHOLD
3655 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
3657 /* If machine does not have a case insn that compares the
3658 bounds, this means extra overhead for dispatch tables
3659 which raises the threshold for using them. */
3660 #define CASE_VALUES_THRESHOLD 5
3661 #endif /* HAVE_casesi */
3662 #endif /* CASE_VALUES_THRESHOLD */
3664 else if (TREE_INT_CST_HIGH (range
) != 0
3665 || count
< CASE_VALUES_THRESHOLD
3666 || ((unsigned HOST_WIDE_INT
) (TREE_INT_CST_LOW (range
))
3668 || TREE_CODE (index_expr
) == INTEGER_CST
3669 /* These will reduce to a constant. */
3670 || (TREE_CODE (index_expr
) == CALL_EXPR
3671 && TREE_CODE (TREE_OPERAND (index_expr
, 0)) == ADDR_EXPR
3672 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == FUNCTION_DECL
3673 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr
, 0), 0)) == BUILT_IN_CLASSIFY_TYPE
)
3674 || (TREE_CODE (index_expr
) == COMPOUND_EXPR
3675 && TREE_CODE (TREE_OPERAND (index_expr
, 1)) == INTEGER_CST
))
3677 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3679 /* If the index is a short or char that we do not have
3680 an insn to handle comparisons directly, convert it to
3681 a full integer now, rather than letting each comparison
3682 generate the conversion. */
3684 if (GET_MODE_CLASS (GET_MODE (index
)) == MODE_INT
3685 && (cmp_optab
->handlers
[(int) GET_MODE(index
)].insn_code
3686 == CODE_FOR_nothing
))
3688 enum machine_mode wider_mode
;
3689 for (wider_mode
= GET_MODE (index
); wider_mode
!= VOIDmode
;
3690 wider_mode
= GET_MODE_WIDER_MODE (wider_mode
))
3691 if (cmp_optab
->handlers
[(int) wider_mode
].insn_code
3692 != CODE_FOR_nothing
)
3694 index
= convert_to_mode (wider_mode
, index
, unsignedp
);
3700 do_pending_stack_adjust ();
3702 index
= protect_from_queue (index
, 0);
3703 if (GET_CODE (index
) == MEM
)
3704 index
= copy_to_reg (index
);
3705 if (GET_CODE (index
) == CONST_INT
3706 || TREE_CODE (index_expr
) == INTEGER_CST
)
3708 /* Make a tree node with the proper constant value
3709 if we don't already have one. */
3710 if (TREE_CODE (index_expr
) != INTEGER_CST
)
3713 = build_int_2 (INTVAL (index
),
3714 !unsignedp
&& INTVAL (index
) >= 0 ? 0 : -1);
3715 index_expr
= convert (TREE_TYPE (index_expr
), index_expr
);
3718 /* For constant index expressions we need only
3719 issue a unconditional branch to the appropriate
3720 target code. The job of removing any unreachable
3721 code is left to the optimisation phase if the
3722 "-O" option is specified. */
3723 for (n
= thiscase
->data
.case_stmt
.case_list
;
3727 if (! tree_int_cst_lt (index_expr
, n
->low
)
3728 && ! tree_int_cst_lt (n
->high
, index_expr
))
3732 emit_jump (label_rtx (n
->code_label
));
3734 emit_jump (default_label
);
3738 /* If the index expression is not constant we generate
3739 a binary decision tree to select the appropriate
3740 target code. This is done as follows:
3742 The list of cases is rearranged into a binary tree,
3743 nearly optimal assuming equal probability for each case.
3745 The tree is transformed into RTL, eliminating
3746 redundant test conditions at the same time.
3748 If program flow could reach the end of the
3749 decision tree an unconditional jump to the
3750 default code is emitted. */
3753 = (TREE_CODE (TREE_TYPE (orig_index
)) != ENUMERAL_TYPE
3754 && estimate_case_costs (thiscase
->data
.case_stmt
.case_list
));
3755 balance_case_nodes (&thiscase
->data
.case_stmt
.case_list
,
3757 emit_case_nodes (index
, thiscase
->data
.case_stmt
.case_list
,
3758 default_label
, TREE_TYPE (index_expr
));
3759 emit_jump_if_reachable (default_label
);
3768 enum machine_mode index_mode
= SImode
;
3769 int index_bits
= GET_MODE_BITSIZE (index_mode
);
3771 /* Convert the index to SImode. */
3772 if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr
)))
3773 > GET_MODE_BITSIZE (index_mode
))
3775 enum machine_mode omode
= TYPE_MODE (TREE_TYPE (index_expr
));
3776 rtx rangertx
= expand_expr (range
, NULL_RTX
, VOIDmode
, 0);
3778 /* We must handle the endpoints in the original mode. */
3779 index_expr
= build (MINUS_EXPR
, TREE_TYPE (index_expr
),
3780 index_expr
, minval
);
3781 minval
= integer_zero_node
;
3782 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3783 emit_cmp_insn (rangertx
, index
, LTU
, NULL_RTX
, omode
, 0, 0);
3784 emit_jump_insn (gen_bltu (default_label
));
3785 /* Now we can safely truncate. */
3786 index
= convert_to_mode (index_mode
, index
, 0);
3790 if (TYPE_MODE (TREE_TYPE (index_expr
)) != index_mode
)
3791 index_expr
= convert (type_for_size (index_bits
, 0),
3793 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3796 index
= protect_from_queue (index
, 0);
3797 do_pending_stack_adjust ();
3799 emit_jump_insn (gen_casesi (index
, expand_expr (minval
, NULL_RTX
,
3801 expand_expr (range
, NULL_RTX
,
3803 table_label
, default_label
));
3807 #ifdef HAVE_tablejump
3808 if (! win
&& HAVE_tablejump
)
3810 index_expr
= convert (thiscase
->data
.case_stmt
.nominal_type
,
3811 fold (build (MINUS_EXPR
,
3812 TREE_TYPE (index_expr
),
3813 index_expr
, minval
)));
3814 index
= expand_expr (index_expr
, NULL_RTX
, VOIDmode
, 0);
3816 index
= protect_from_queue (index
, 0);
3817 do_pending_stack_adjust ();
3819 do_tablejump (index
, TYPE_MODE (TREE_TYPE (index_expr
)),
3820 expand_expr (range
, NULL_RTX
, VOIDmode
, 0),
3821 table_label
, default_label
);
3828 /* Get table of labels to jump to, in order of case index. */
3830 ncases
= TREE_INT_CST_LOW (range
) + 1;
3831 labelvec
= (rtx
*) alloca (ncases
* sizeof (rtx
));
3832 bzero (labelvec
, ncases
* sizeof (rtx
));
3834 for (n
= thiscase
->data
.case_stmt
.case_list
; n
; n
= n
->right
)
3836 register HOST_WIDE_INT i
3837 = TREE_INT_CST_LOW (n
->low
) - TREE_INT_CST_LOW (minval
);
3842 = gen_rtx (LABEL_REF
, Pmode
, label_rtx (n
->code_label
));
3843 if (i
+ TREE_INT_CST_LOW (minval
)
3844 == TREE_INT_CST_LOW (n
->high
))
3850 /* Fill in the gaps with the default. */
3851 for (i
= 0; i
< ncases
; i
++)
3852 if (labelvec
[i
] == 0)
3853 labelvec
[i
] = gen_rtx (LABEL_REF
, Pmode
, default_label
);
3855 /* Output the table */
3856 emit_label (table_label
);
3858 /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
3859 were an expression, instead of an #ifdef/#ifndef. */
3861 #ifdef CASE_VECTOR_PC_RELATIVE
3865 emit_jump_insn (gen_rtx (ADDR_DIFF_VEC
, CASE_VECTOR_MODE
,
3866 gen_rtx (LABEL_REF
, Pmode
, table_label
),
3867 gen_rtvec_v (ncases
, labelvec
)));
3869 emit_jump_insn (gen_rtx (ADDR_VEC
, CASE_VECTOR_MODE
,
3870 gen_rtvec_v (ncases
, labelvec
)));
3872 /* If the case insn drops through the table,
3873 after the table we must jump to the default-label.
3874 Otherwise record no drop-through after the table. */
3875 #ifdef CASE_DROPS_THROUGH
3876 emit_jump (default_label
);
3882 before_case
= squeeze_notes (NEXT_INSN (before_case
), get_last_insn ());
3883 reorder_insns (before_case
, get_last_insn (),
3884 thiscase
->data
.case_stmt
.start
);
3886 if (thiscase
->exit_label
)
3887 emit_label (thiscase
->exit_label
);
3889 POPSTACK (case_stack
);
3894 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3897 do_jump_if_equal (op1
, op2
, label
, unsignedp
)
3898 rtx op1
, op2
, label
;
3901 if (GET_CODE (op1
) == CONST_INT
3902 && GET_CODE (op2
) == CONST_INT
)
3904 if (INTVAL (op1
) == INTVAL (op2
))
3909 enum machine_mode mode
= GET_MODE (op1
);
3910 if (mode
== VOIDmode
)
3911 mode
= GET_MODE (op2
);
3912 emit_cmp_insn (op1
, op2
, EQ
, NULL_RTX
, mode
, unsignedp
, 0);
3913 emit_jump_insn (gen_beq (label
));
3917 /* Not all case values are encountered equally. This function
3918 uses a heuristic to weight case labels, in cases where that
3919 looks like a reasonable thing to do.
3921 Right now, all we try to guess is text, and we establish the
3924 chars above space: 16
3933 If we find any cases in the switch that are not either -1 or in the range
3934 of valid ASCII characters, or are control characters other than those
3935 commonly used with "\", don't treat this switch scanning text.
3937 Return 1 if these nodes are suitable for cost estimation, otherwise
3941 estimate_case_costs (node
)
3944 tree min_ascii
= build_int_2 (-1, -1);
3945 tree max_ascii
= convert (TREE_TYPE (node
->high
), build_int_2 (127, 0));
3949 /* If we haven't already made the cost table, make it now. Note that the
3950 lower bound of the table is -1, not zero. */
3952 if (cost_table
== NULL
)
3954 cost_table
= ((short *) xmalloc (129 * sizeof (short))) + 1;
3955 bzero (cost_table
- 1, 129 * sizeof (short));
3957 for (i
= 0; i
< 128; i
++)
3961 else if (ispunct (i
))
3963 else if (iscntrl (i
))
3967 cost_table
[' '] = 8;
3968 cost_table
['\t'] = 4;
3969 cost_table
['\0'] = 4;
3970 cost_table
['\n'] = 2;
3971 cost_table
['\f'] = 1;
3972 cost_table
['\v'] = 1;
3973 cost_table
['\b'] = 1;
3976 /* See if all the case expressions look like text. It is text if the
3977 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3978 as signed arithmetic since we don't want to ever access cost_table with a
3979 value less than -1. Also check that none of the constants in a range
3980 are strange control characters. */
3982 for (n
= node
; n
; n
= n
->right
)
3984 if ((INT_CST_LT (n
->low
, min_ascii
)) || INT_CST_LT (max_ascii
, n
->high
))
3987 for (i
= TREE_INT_CST_LOW (n
->low
); i
<= TREE_INT_CST_LOW (n
->high
); i
++)
3988 if (cost_table
[i
] < 0)
3992 /* All interesting values are within the range of interesting
3993 ASCII characters. */
3997 /* Scan an ordered list of case nodes
3998 combining those with consecutive values or ranges.
4000 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4003 group_case_nodes (head
)
4006 case_node_ptr node
= head
;
4010 rtx lb
= next_real_insn (label_rtx (node
->code_label
));
4011 case_node_ptr np
= node
;
4013 /* Try to group the successors of NODE with NODE. */
4014 while (((np
= np
->right
) != 0)
4015 /* Do they jump to the same place? */
4016 && next_real_insn (label_rtx (np
->code_label
)) == lb
4017 /* Are their ranges consecutive? */
4018 && tree_int_cst_equal (np
->low
,
4019 fold (build (PLUS_EXPR
,
4020 TREE_TYPE (node
->high
),
4023 /* An overflow is not consecutive. */
4024 && tree_int_cst_lt (node
->high
,
4025 fold (build (PLUS_EXPR
,
4026 TREE_TYPE (node
->high
),
4028 integer_one_node
))))
4030 node
->high
= np
->high
;
4032 /* NP is the first node after NODE which can't be grouped with it.
4033 Delete the nodes in between, and move on to that node. */
4039 /* Take an ordered list of case nodes
4040 and transform them into a near optimal binary tree,
4041 on the assumption that any target code selection value is as
4042 likely as any other.
4044 The transformation is performed by splitting the ordered
4045 list into two equal sections plus a pivot. The parts are
4046 then attached to the pivot as left and right branches. Each
4047 branch is is then transformed recursively. */
4050 balance_case_nodes (head
, parent
)
4051 case_node_ptr
*head
;
4052 case_node_ptr parent
;
4054 register case_node_ptr np
;
4062 register case_node_ptr
*npp
;
4065 /* Count the number of entries on branch. Also count the ranges. */
4069 if (!tree_int_cst_equal (np
->low
, np
->high
))
4073 cost
+= cost_table
[TREE_INT_CST_LOW (np
->high
)];
4077 cost
+= cost_table
[TREE_INT_CST_LOW (np
->low
)];
4085 /* Split this list if it is long enough for that to help. */
4090 /* Find the place in the list that bisects the list's total cost,
4091 Here I gets half the total cost. */
4096 /* Skip nodes while their cost does not reach that amount. */
4097 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
4098 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->high
)];
4099 i
-= cost_table
[TREE_INT_CST_LOW ((*npp
)->low
)];
4102 npp
= &(*npp
)->right
;
4107 /* Leave this branch lopsided, but optimize left-hand
4108 side and fill in `parent' fields for right-hand side. */
4110 np
->parent
= parent
;
4111 balance_case_nodes (&np
->left
, np
);
4112 for (; np
->right
; np
= np
->right
)
4113 np
->right
->parent
= np
;
4117 /* If there are just three nodes, split at the middle one. */
4119 npp
= &(*npp
)->right
;
4122 /* Find the place in the list that bisects the list's total cost,
4123 where ranges count as 2.
4124 Here I gets half the total cost. */
4125 i
= (i
+ ranges
+ 1) / 2;
4128 /* Skip nodes while their cost does not reach that amount. */
4129 if (!tree_int_cst_equal ((*npp
)->low
, (*npp
)->high
))
4134 npp
= &(*npp
)->right
;
4139 np
->parent
= parent
;
4142 /* Optimize each of the two split parts. */
4143 balance_case_nodes (&np
->left
, np
);
4144 balance_case_nodes (&np
->right
, np
);
4148 /* Else leave this branch as one level,
4149 but fill in `parent' fields. */
4151 np
->parent
= parent
;
4152 for (; np
->right
; np
= np
->right
)
4153 np
->right
->parent
= np
;
4158 /* Search the parent sections of the case node tree
4159 to see if a test for the lower bound of NODE would be redundant.
4160 INDEX_TYPE is the type of the index expression.
4162 The instructions to generate the case decision tree are
4163 output in the same order as nodes are processed so it is
4164 known that if a parent node checks the range of the current
4165 node minus one that the current node is bounded at its lower
4166 span. Thus the test would be redundant. */
4169 node_has_low_bound (node
, index_type
)
4174 case_node_ptr pnode
;
4176 /* If the lower bound of this node is the lowest value in the index type,
4177 we need not test it. */
4179 if (tree_int_cst_equal (node
->low
, TYPE_MIN_VALUE (index_type
)))
4182 /* If this node has a left branch, the value at the left must be less
4183 than that at this node, so it cannot be bounded at the bottom and
4184 we need not bother testing any further. */
4189 low_minus_one
= fold (build (MINUS_EXPR
, TREE_TYPE (node
->low
),
4190 node
->low
, integer_one_node
));
4192 /* If the subtraction above overflowed, we can't verify anything.
4193 Otherwise, look for a parent that tests our value - 1. */
4195 if (! tree_int_cst_lt (low_minus_one
, node
->low
))
4198 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4199 if (tree_int_cst_equal (low_minus_one
, pnode
->high
))
4205 /* Search the parent sections of the case node tree
4206 to see if a test for the upper bound of NODE would be redundant.
4207 INDEX_TYPE is the type of the index expression.
4209 The instructions to generate the case decision tree are
4210 output in the same order as nodes are processed so it is
4211 known that if a parent node checks the range of the current
4212 node plus one that the current node is bounded at its upper
4213 span. Thus the test would be redundant. */
4216 node_has_high_bound (node
, index_type
)
4221 case_node_ptr pnode
;
4223 /* If the upper bound of this node is the highest value in the type
4224 of the index expression, we need not test against it. */
4226 if (tree_int_cst_equal (node
->high
, TYPE_MAX_VALUE (index_type
)))
4229 /* If this node has a right branch, the value at the right must be greater
4230 than that at this node, so it cannot be bounded at the top and
4231 we need not bother testing any further. */
4236 high_plus_one
= fold (build (PLUS_EXPR
, TREE_TYPE (node
->high
),
4237 node
->high
, integer_one_node
));
4239 /* If the addition above overflowed, we can't verify anything.
4240 Otherwise, look for a parent that tests our value + 1. */
4242 if (! tree_int_cst_lt (node
->high
, high_plus_one
))
4245 for (pnode
= node
->parent
; pnode
; pnode
= pnode
->parent
)
4246 if (tree_int_cst_equal (high_plus_one
, pnode
->low
))
4252 /* Search the parent sections of the
4253 case node tree to see if both tests for the upper and lower
4254 bounds of NODE would be redundant. */
4257 node_is_bounded (node
, index_type
)
4261 return (node_has_low_bound (node
, index_type
)
4262 && node_has_high_bound (node
, index_type
));
4265 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4268 emit_jump_if_reachable (label
)
4271 if (GET_CODE (get_last_insn ()) != BARRIER
)
4275 /* Emit step-by-step code to select a case for the value of INDEX.
4276 The thus generated decision tree follows the form of the
4277 case-node binary tree NODE, whose nodes represent test conditions.
4278 INDEX_TYPE is the type of the index of the switch.
4280 Care is taken to prune redundant tests from the decision tree
4281 by detecting any boundary conditions already checked by
4282 emitted rtx. (See node_has_high_bound, node_has_low_bound
4283 and node_is_bounded, above.)
4285 Where the test conditions can be shown to be redundant we emit
4286 an unconditional jump to the target code. As a further
4287 optimization, the subordinates of a tree node are examined to
4288 check for bounded nodes. In this case conditional and/or
4289 unconditional jumps as a result of the boundary check for the
4290 current node are arranged to target the subordinates associated
4291 code for out of bound conditions on the current node node.
4293 We can assume that when control reaches the code generated here,
4294 the index value has already been compared with the parents
4295 of this node, and determined to be on the same side of each parent
4296 as this node is. Thus, if this node tests for the value 51,
4297 and a parent tested for 52, we don't need to consider
4298 the possibility of a value greater than 51. If another parent
4299 tests for the value 50, then this node need not test anything. */
4302 emit_case_nodes (index
, node
, default_label
, index_type
)
4308 /* If INDEX has an unsigned type, we must make unsigned branches. */
4309 int unsignedp
= TREE_UNSIGNED (index_type
);
4310 typedef rtx
rtx_function ();
4311 rtx_function
*gen_bgt_pat
= unsignedp
? gen_bgtu
: gen_bgt
;
4312 rtx_function
*gen_bge_pat
= unsignedp
? gen_bgeu
: gen_bge
;
4313 rtx_function
*gen_blt_pat
= unsignedp
? gen_bltu
: gen_blt
;
4314 rtx_function
*gen_ble_pat
= unsignedp
? gen_bleu
: gen_ble
;
4315 enum machine_mode mode
= GET_MODE (index
);
4317 /* See if our parents have already tested everything for us.
4318 If they have, emit an unconditional jump for this node. */
4319 if (node_is_bounded (node
, index_type
))
4320 emit_jump (label_rtx (node
->code_label
));
4322 else if (tree_int_cst_equal (node
->low
, node
->high
))
4324 /* Node is single valued. First see if the index expression matches
4325 this node and then check our children, if any. */
4327 do_jump_if_equal (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
4328 label_rtx (node
->code_label
), unsignedp
);
4330 if (node
->right
!= 0 && node
->left
!= 0)
4332 /* This node has children on both sides.
4333 Dispatch to one side or the other
4334 by comparing the index value with this node's value.
4335 If one subtree is bounded, check that one first,
4336 so we can avoid real branches in the tree. */
4338 if (node_is_bounded (node
->right
, index_type
))
4340 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4342 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4344 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4345 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4348 else if (node_is_bounded (node
->left
, index_type
))
4350 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4352 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4353 emit_jump_insn ((*gen_blt_pat
) (label_rtx (node
->left
->code_label
)));
4354 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4359 /* Neither node is bounded. First distinguish the two sides;
4360 then emit the code for one side at a time. */
4363 = build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4365 /* See if the value is on the right. */
4366 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4368 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4369 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4371 /* Value must be on the left.
4372 Handle the left-hand subtree. */
4373 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4374 /* If left-hand subtree does nothing,
4376 emit_jump_if_reachable (default_label
);
4378 /* Code branches here for the right-hand subtree. */
4379 expand_label (test_label
);
4380 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4384 else if (node
->right
!= 0 && node
->left
== 0)
4386 /* Here we have a right child but no left so we issue conditional
4387 branch to default and process the right child.
4389 Omit the conditional branch to default if we it avoid only one
4390 right child; it costs too much space to save so little time. */
4392 if (node
->right
->right
|| node
->right
->left
4393 || !tree_int_cst_equal (node
->right
->low
, node
->right
->high
))
4395 if (!node_has_low_bound (node
, index_type
))
4397 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4399 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4400 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4403 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4406 /* We cannot process node->right normally
4407 since we haven't ruled out the numbers less than
4408 this node's value. So handle node->right explicitly. */
4409 do_jump_if_equal (index
,
4410 expand_expr (node
->right
->low
, NULL_RTX
,
4412 label_rtx (node
->right
->code_label
), unsignedp
);
4415 else if (node
->right
== 0 && node
->left
!= 0)
4417 /* Just one subtree, on the left. */
4419 #if 0 /* The following code and comment were formerly part
4420 of the condition here, but they didn't work
4421 and I don't understand what the idea was. -- rms. */
4422 /* If our "most probable entry" is less probable
4423 than the default label, emit a jump to
4424 the default label using condition codes
4425 already lying around. With no right branch,
4426 a branch-greater-than will get us to the default
4429 && cost_table
[TREE_INT_CST_LOW (node
->high
)] < 12)
4432 if (node
->left
->left
|| node
->left
->right
4433 || !tree_int_cst_equal (node
->left
->low
, node
->left
->high
))
4435 if (!node_has_high_bound (node
, index_type
))
4437 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4439 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4440 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4443 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4446 /* We cannot process node->left normally
4447 since we haven't ruled out the numbers less than
4448 this node's value. So handle node->left explicitly. */
4449 do_jump_if_equal (index
,
4450 expand_expr (node
->left
->low
, NULL_RTX
,
4452 label_rtx (node
->left
->code_label
), unsignedp
);
4457 /* Node is a range. These cases are very similar to those for a single
4458 value, except that we do not start by testing whether this node
4459 is the one to branch to. */
4461 if (node
->right
!= 0 && node
->left
!= 0)
4463 /* Node has subtrees on both sides.
4464 If the right-hand subtree is bounded,
4465 test for it first, since we can go straight there.
4466 Otherwise, we need to make a branch in the control structure,
4467 then handle the two subtrees. */
4468 tree test_label
= 0;
4470 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4472 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4474 if (node_is_bounded (node
->right
, index_type
))
4475 /* Right hand node is fully bounded so we can eliminate any
4476 testing and branch directly to the target code. */
4477 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (node
->right
->code_label
)));
4480 /* Right hand node requires testing.
4481 Branch to a label where we will handle it later. */
4483 test_label
= build_decl (LABEL_DECL
, NULL_TREE
, NULL_TREE
);
4484 emit_jump_insn ((*gen_bgt_pat
) (label_rtx (test_label
)));
4487 /* Value belongs to this node or to the left-hand subtree. */
4489 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
4490 GE
, NULL_RTX
, mode
, unsignedp
, 0);
4491 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4493 /* Handle the left-hand subtree. */
4494 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4496 /* If right node had to be handled later, do that now. */
4500 /* If the left-hand subtree fell through,
4501 don't let it fall into the right-hand subtree. */
4502 emit_jump_if_reachable (default_label
);
4504 expand_label (test_label
);
4505 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4509 else if (node
->right
!= 0 && node
->left
== 0)
4511 /* Deal with values to the left of this node,
4512 if they are possible. */
4513 if (!node_has_low_bound (node
, index_type
))
4515 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
4517 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4518 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4521 /* Value belongs to this node or to the right-hand subtree. */
4523 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4525 LE
, NULL_RTX
, mode
, unsignedp
, 0);
4526 emit_jump_insn ((*gen_ble_pat
) (label_rtx (node
->code_label
)));
4528 emit_case_nodes (index
, node
->right
, default_label
, index_type
);
4531 else if (node
->right
== 0 && node
->left
!= 0)
4533 /* Deal with values to the right of this node,
4534 if they are possible. */
4535 if (!node_has_high_bound (node
, index_type
))
4537 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4539 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4540 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4543 /* Value belongs to this node or to the left-hand subtree. */
4545 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
, VOIDmode
, 0),
4546 GE
, NULL_RTX
, mode
, unsignedp
, 0);
4547 emit_jump_insn ((*gen_bge_pat
) (label_rtx (node
->code_label
)));
4549 emit_case_nodes (index
, node
->left
, default_label
, index_type
);
4554 /* Node has no children so we check low and high bounds to remove
4555 redundant tests. Only one of the bounds can exist,
4556 since otherwise this node is bounded--a case tested already. */
4558 if (!node_has_high_bound (node
, index_type
))
4560 emit_cmp_insn (index
, expand_expr (node
->high
, NULL_RTX
,
4562 GT
, NULL_RTX
, mode
, unsignedp
, 0);
4563 emit_jump_insn ((*gen_bgt_pat
) (default_label
));
4566 if (!node_has_low_bound (node
, index_type
))
4568 emit_cmp_insn (index
, expand_expr (node
->low
, NULL_RTX
,
4570 LT
, NULL_RTX
, mode
, unsignedp
, 0);
4571 emit_jump_insn ((*gen_blt_pat
) (default_label
));
4574 emit_jump (label_rtx (node
->code_label
));
4579 /* These routines are used by the loop unrolling code. They copy BLOCK trees
4580 so that the debugging info will be correct for the unrolled loop. */
4582 /* Indexed by block number, contains a pointer to the N'th block node. */
4584 static tree
*block_vector
;
4587 find_loop_tree_blocks ()
4589 tree block
= DECL_INITIAL (current_function_decl
);
4591 /* There first block is for the function body, and does not have
4592 corresponding block notes. Don't include it in the block vector. */
4593 block
= BLOCK_SUBBLOCKS (block
);
4595 block_vector
= identify_blocks (block
, get_insns ());
4599 unroll_block_trees ()
4601 tree block
= DECL_INITIAL (current_function_decl
);
4603 reorder_blocks (block_vector
, block
, get_insns ());