]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/stmt.c
Replace insn_foo with insn_data.foo.
[thirdparty/gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38
39 #include "rtl.h"
40 #include "tree.h"
41 #include "flags.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-flags.h"
45 #include "insn-config.h"
46 #include "insn-codes.h"
47 #include "expr.h"
48 #include "hard-reg-set.h"
49 #include "obstack.h"
50 #include "loop.h"
51 #include "recog.h"
52 #include "machmode.h"
53 #include "toplev.h"
54 #include "output.h"
55 #include "ggc.h"
56
57 #define obstack_chunk_alloc xmalloc
58 #define obstack_chunk_free free
59 struct obstack stmt_obstack;
60
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
64 #endif
65
66 /* Each time we expand the end of a binding contour (in `expand_end_bindings')
67 and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
68 This is used by the `remember_end_note' function to record the endpoint
69 of each generated block in its associated BLOCK node. */
70
71 static rtx last_block_end_note;
72 \f
73 /* Functions and data structures for expanding case statements. */
74
75 /* Case label structure, used to hold info on labels within case
76 statements. We handle "range" labels; for a single-value label
77 as in C, the high and low limits are the same.
78
79 An AVL tree of case nodes is initially created, and later transformed
80 to a list linked via the RIGHT fields in the nodes. Nodes with
81 higher case values are later in the list.
82
83 Switch statements can be output in one of two forms. A branch table
84 is used if there are more than a few labels and the labels are dense
85 within the range between the smallest and largest case value. If a
86 branch table is used, no further manipulations are done with the case
87 node chain.
88
89 The alternative to the use of a branch table is to generate a series
90 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
91 and PARENT fields to hold a binary tree. Initially the tree is
92 totally unbalanced, with everything on the right. We balance the tree
93 with nodes on the left having lower case values than the parent
94 and nodes on the right having higher values. We then output the tree
95 in order. */
96
97 struct case_node
98 {
99 struct case_node *left; /* Left son in binary tree */
100 struct case_node *right; /* Right son in binary tree; also node chain */
101 struct case_node *parent; /* Parent of node in binary tree */
102 tree low; /* Lowest index value for this label */
103 tree high; /* Highest index value for this label */
104 tree code_label; /* Label to jump to when node matches */
105 int balance;
106 };
107
108 typedef struct case_node case_node;
109 typedef struct case_node *case_node_ptr;
110
111 /* These are used by estimate_case_costs and balance_case_nodes. */
112
113 /* This must be a signed type, and non-ANSI compilers lack signed char. */
114 static short *cost_table;
115 static int use_cost_table;
116 \f
117 /* Stack of control and binding constructs we are currently inside.
118
119 These constructs begin when you call `expand_start_WHATEVER'
120 and end when you call `expand_end_WHATEVER'. This stack records
121 info about how the construct began that tells the end-function
122 what to do. It also may provide information about the construct
123 to alter the behavior of other constructs within the body.
124 For example, they may affect the behavior of C `break' and `continue'.
125
126 Each construct gets one `struct nesting' object.
127 All of these objects are chained through the `all' field.
128 `nesting_stack' points to the first object (innermost construct).
129 The position of an entry on `nesting_stack' is in its `depth' field.
130
131 Each type of construct has its own individual stack.
132 For example, loops have `loop_stack'. Each object points to the
133 next object of the same type through the `next' field.
134
135 Some constructs are visible to `break' exit-statements and others
136 are not. Which constructs are visible depends on the language.
137 Therefore, the data structure allows each construct to be visible
138 or not, according to the args given when the construct is started.
139 The construct is visible if the `exit_label' field is non-null.
140 In that case, the value should be a CODE_LABEL rtx. */
141
142 struct nesting
143 {
144 struct nesting *all;
145 struct nesting *next;
146 int depth;
147 rtx exit_label;
148 union
149 {
150 /* For conds (if-then and if-then-else statements). */
151 struct
152 {
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
159 rtx next_label;
160 } cond;
161 /* For loops. */
162 struct
163 {
164 /* Label at the top of the loop; place to loop back to. */
165 rtx start_label;
166 /* Label at the end of the whole construct. */
167 rtx end_label;
168 /* Label before a jump that branches to the end of the whole
169 construct. This is where destructors go if any. */
170 rtx alt_end_label;
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
173 rtx continue_label;
174 } loop;
175 /* For variable binding contours. */
176 struct
177 {
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
181 /* Nonzero => value to restore stack to on exit. */
182 rtx stack_level;
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
186 rtx first_insn;
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
193 tree cleanups;
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
198 The tail of this list can be 0,
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
202 tree outer_cleanups;
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
207 int n_function_calls;
208 /* Nonzero if this is associated with a EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 /* When in a conditional context, this is the specific
227 cleanup list associated with last_unconditional_cleanup,
228 where we place the conditionalized cleanups. */
229 tree *cleanup_ptr;
230 } block;
231 /* For switch (C) or case (Pascal) statements,
232 and also for dummies (see `expand_start_case_dummy'). */
233 struct
234 {
235 /* The insn after which the case dispatch should finally
236 be emitted. Zero for a dummy. */
237 rtx start;
238 /* A list of case labels; it is first built as an AVL tree.
239 During expand_end_case, this is converted to a list, and may be
240 rearranged into a nearly balanced binary tree. */
241 struct case_node *case_list;
242 /* Label to jump to if no case matches. */
243 tree default_label;
244 /* The expression to be dispatched on. */
245 tree index_expr;
246 /* Type that INDEX_EXPR should be converted to. */
247 tree nominal_type;
248 /* Number of range exprs in case statement. */
249 int num_ranges;
250 /* Name of this kind of statement, for warnings. */
251 const char *printname;
252 /* Used to save no_line_numbers till we see the first case label.
253 We set this to -1 when we see the first case label in this
254 case statement. */
255 int line_number_status;
256 } case_stmt;
257 } data;
258 };
259
260 /* Allocate and return a new `struct nesting'. */
261
262 #define ALLOC_NESTING() \
263 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
264
265 /* Pop the nesting stack element by element until we pop off
266 the element which is at the top of STACK.
267 Update all the other stacks, popping off elements from them
268 as we pop them from nesting_stack. */
269
270 #define POPSTACK(STACK) \
271 do { struct nesting *target = STACK; \
272 struct nesting *this; \
273 do { this = nesting_stack; \
274 if (loop_stack == this) \
275 loop_stack = loop_stack->next; \
276 if (cond_stack == this) \
277 cond_stack = cond_stack->next; \
278 if (block_stack == this) \
279 block_stack = block_stack->next; \
280 if (stack_block_stack == this) \
281 stack_block_stack = stack_block_stack->next; \
282 if (case_stack == this) \
283 case_stack = case_stack->next; \
284 nesting_depth = nesting_stack->depth - 1; \
285 nesting_stack = this->all; \
286 obstack_free (&stmt_obstack, this); } \
287 while (this != target); } while (0)
288 \f
289 /* In some cases it is impossible to generate code for a forward goto
290 until the label definition is seen. This happens when it may be necessary
291 for the goto to reset the stack pointer: we don't yet know how to do that.
292 So expand_goto puts an entry on this fixup list.
293 Each time a binding contour that resets the stack is exited,
294 we check each fixup.
295 If the target label has now been defined, we can insert the proper code. */
296
297 struct goto_fixup
298 {
299 /* Points to following fixup. */
300 struct goto_fixup *next;
301 /* Points to the insn before the jump insn.
302 If more code must be inserted, it goes after this insn. */
303 rtx before_jump;
304 /* The LABEL_DECL that this jump is jumping to, or 0
305 for break, continue or return. */
306 tree target;
307 /* The BLOCK for the place where this goto was found. */
308 tree context;
309 /* The CODE_LABEL rtx that this is jumping to. */
310 rtx target_rtl;
311 /* Number of binding contours started in current function
312 before the label reference. */
313 int block_start_count;
314 /* The outermost stack level that should be restored for this jump.
315 Each time a binding contour that resets the stack is exited,
316 if the target label is *not* yet defined, this slot is updated. */
317 rtx stack_level;
318 /* List of lists of cleanup expressions to be run by this goto.
319 There is one element for each block that this goto is within.
320 The tail of this list can be 0,
321 if all remaining elements would be empty.
322 The TREE_VALUE contains the cleanup list of that block as of the
323 time this goto was seen.
324 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
325 tree cleanup_list_list;
326 };
327
328 /* Within any binding contour that must restore a stack level,
329 all labels are recorded with a chain of these structures. */
330
331 struct label_chain
332 {
333 /* Points to following fixup. */
334 struct label_chain *next;
335 tree label;
336 };
337
338 struct stmt_status
339 {
340 /* Chain of all pending binding contours. */
341 struct nesting *x_block_stack;
342
343 /* If any new stacks are added here, add them to POPSTACKS too. */
344
345 /* Chain of all pending binding contours that restore stack levels
346 or have cleanups. */
347 struct nesting *x_stack_block_stack;
348
349 /* Chain of all pending conditional statements. */
350 struct nesting *x_cond_stack;
351
352 /* Chain of all pending loops. */
353 struct nesting *x_loop_stack;
354
355 /* Chain of all pending case or switch statements. */
356 struct nesting *x_case_stack;
357
358 /* Separate chain including all of the above,
359 chained through the `all' field. */
360 struct nesting *x_nesting_stack;
361
362 /* Number of entries on nesting_stack now. */
363 int x_nesting_depth;
364
365 /* Number of binding contours started so far in this function. */
366 int x_block_start_count;
367
368 /* Each time we expand an expression-statement,
369 record the expr's type and its RTL value here. */
370 tree x_last_expr_type;
371 rtx x_last_expr_value;
372
373 /* Nonzero if within a ({...}) grouping, in which case we must
374 always compute a value for each expr-stmt in case it is the last one. */
375 int x_expr_stmts_for_value;
376
377 /* Filename and line number of last line-number note,
378 whether we actually emitted it or not. */
379 char *x_emit_filename;
380 int x_emit_lineno;
381
382 struct goto_fixup *x_goto_fixup_chain;
383 };
384
385 #define block_stack (current_function->stmt->x_block_stack)
386 #define stack_block_stack (current_function->stmt->x_stack_block_stack)
387 #define cond_stack (current_function->stmt->x_cond_stack)
388 #define loop_stack (current_function->stmt->x_loop_stack)
389 #define case_stack (current_function->stmt->x_case_stack)
390 #define nesting_stack (current_function->stmt->x_nesting_stack)
391 #define nesting_depth (current_function->stmt->x_nesting_depth)
392 #define current_block_start_count (current_function->stmt->x_block_start_count)
393 #define last_expr_type (current_function->stmt->x_last_expr_type)
394 #define last_expr_value (current_function->stmt->x_last_expr_value)
395 #define expr_stmts_for_value (current_function->stmt->x_expr_stmts_for_value)
396 #define emit_filename (current_function->stmt->x_emit_filename)
397 #define emit_lineno (current_function->stmt->x_emit_lineno)
398 #define goto_fixup_chain (current_function->stmt->x_goto_fixup_chain)
399
400 /* Non-zero if we are using EH to handle cleanus. */
401 static int using_eh_for_cleanups_p = 0;
402
403
404 static int n_occurrences PROTO((int, const char *));
405 static void expand_goto_internal PROTO((tree, rtx, rtx));
406 static int expand_fixup PROTO((tree, rtx, rtx));
407 static rtx expand_nl_handler_label PROTO((rtx, rtx));
408 static void expand_nl_goto_receiver PROTO((void));
409 static void expand_nl_goto_receivers PROTO((struct nesting *));
410 static void fixup_gotos PROTO((struct nesting *, rtx, tree,
411 rtx, int));
412 static void expand_null_return_1 PROTO((rtx, int));
413 static void expand_value_return PROTO((rtx));
414 static int tail_recursion_args PROTO((tree, tree));
415 static void expand_cleanups PROTO((tree, tree, int, int));
416 static void check_seenlabel PROTO((void));
417 static void do_jump_if_equal PROTO((rtx, rtx, rtx, int));
418 static int estimate_case_costs PROTO((case_node_ptr));
419 static void group_case_nodes PROTO((case_node_ptr));
420 static void balance_case_nodes PROTO((case_node_ptr *,
421 case_node_ptr));
422 static int node_has_low_bound PROTO((case_node_ptr, tree));
423 static int node_has_high_bound PROTO((case_node_ptr, tree));
424 static int node_is_bounded PROTO((case_node_ptr, tree));
425 static void emit_jump_if_reachable PROTO((rtx));
426 static void emit_case_nodes PROTO((rtx, case_node_ptr, rtx, tree));
427 static int add_case_node PROTO((tree, tree, tree, tree *));
428 static struct case_node *case_tree2list PROTO((case_node *, case_node *));
429 static void mark_cond_nesting PROTO((struct nesting *));
430 static void mark_loop_nesting PROTO((struct nesting *));
431 static void mark_block_nesting PROTO((struct nesting *));
432 static void mark_case_nesting PROTO((struct nesting *));
433 static void mark_goto_fixup PROTO((struct goto_fixup *));
434
435 \f
436 void
437 using_eh_for_cleanups ()
438 {
439 using_eh_for_cleanups_p = 1;
440 }
441
442 /* Mark N (known to be a cond-nesting) for GC. */
443
444 static void
445 mark_cond_nesting (n)
446 struct nesting *n;
447 {
448 while (n)
449 {
450 ggc_mark_rtx (n->exit_label);
451 ggc_mark_rtx (n->data.cond.endif_label);
452 ggc_mark_rtx (n->data.cond.next_label);
453
454 n = n->next;
455 }
456 }
457
458 /* Mark N (known to be a loop-nesting) for GC. */
459
460 static void
461 mark_loop_nesting (n)
462 struct nesting *n;
463 {
464
465 while (n)
466 {
467 ggc_mark_rtx (n->exit_label);
468 ggc_mark_rtx (n->data.loop.start_label);
469 ggc_mark_rtx (n->data.loop.end_label);
470 ggc_mark_rtx (n->data.loop.alt_end_label);
471 ggc_mark_rtx (n->data.loop.continue_label);
472
473 n = n->next;
474 }
475 }
476
477 /* Mark N (known to be a block-nesting) for GC. */
478
479 static void
480 mark_block_nesting (n)
481 struct nesting *n;
482 {
483 while (n)
484 {
485 struct label_chain *l;
486
487 ggc_mark_rtx (n->exit_label);
488 ggc_mark_rtx (n->data.block.stack_level);
489 ggc_mark_rtx (n->data.block.first_insn);
490 ggc_mark_tree (n->data.block.cleanups);
491 ggc_mark_tree (n->data.block.outer_cleanups);
492
493 for (l = n->data.block.label_chain; l != NULL; l = l->next)
494 ggc_mark_tree (l->label);
495
496 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
497
498 /* ??? cleanup_ptr never points outside the stack, does it? */
499
500 n = n->next;
501 }
502 }
503
504 /* Mark N (known to be a case-nesting) for GC. */
505
506 static void
507 mark_case_nesting (n)
508 struct nesting *n;
509 {
510 while (n)
511 {
512 struct case_node *node;
513
514 ggc_mark_rtx (n->exit_label);
515 ggc_mark_rtx (n->data.case_stmt.start);
516
517 node = n->data.case_stmt.case_list;
518 while (node)
519 {
520 ggc_mark_tree (node->low);
521 ggc_mark_tree (node->high);
522 ggc_mark_tree (node->code_label);
523 node = node->right;
524 }
525
526 ggc_mark_tree (n->data.case_stmt.default_label);
527 ggc_mark_tree (n->data.case_stmt.index_expr);
528 ggc_mark_tree (n->data.case_stmt.nominal_type);
529
530 n = n->next;
531 }
532 }
533
534 /* Mark G for GC. */
535
536 static void
537 mark_goto_fixup (g)
538 struct goto_fixup *g;
539 {
540 while (g)
541 {
542 ggc_mark_rtx (g->before_jump);
543 ggc_mark_tree (g->target);
544 ggc_mark_tree (g->context);
545 ggc_mark_rtx (g->target_rtl);
546 ggc_mark_rtx (g->stack_level);
547 ggc_mark_tree (g->cleanup_list_list);
548
549 g = g->next;
550 }
551 }
552
553 /* Clear out all parts of the state in F that can safely be discarded
554 after the function has been compiled, to let garbage collection
555 reclaim the memory. */
556
557 void
558 free_stmt_status (f)
559 struct function *f;
560 {
561 /* We're about to free the function obstack. If we hold pointers to
562 things allocated there, then we'll try to mark them when we do
563 GC. So, we clear them out here explicitly. */
564 f->stmt->x_goto_fixup_chain = 0;
565 }
566
567 /* Mark P for GC. */
568
569 void
570 mark_stmt_state (p)
571 struct stmt_status *p;
572 {
573 if (p == 0)
574 return;
575
576 mark_block_nesting (p->x_block_stack);
577 mark_cond_nesting (p->x_cond_stack);
578 mark_loop_nesting (p->x_loop_stack);
579 mark_case_nesting (p->x_case_stack);
580
581 ggc_mark_tree (p->x_last_expr_type);
582 /* last_epxr_value is only valid if last_expr_type is nonzero. */
583 if (p->x_last_expr_type)
584 ggc_mark_rtx (p->x_last_expr_value);
585
586 mark_goto_fixup (p->x_goto_fixup_chain);
587 }
588
589 void
590 init_stmt ()
591 {
592 gcc_obstack_init (&stmt_obstack);
593 init_eh ();
594 }
595
596 void
597 init_stmt_for_function ()
598 {
599 current_function->stmt
600 = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
601
602 /* We are not currently within any block, conditional, loop or case. */
603 block_stack = 0;
604 stack_block_stack = 0;
605 loop_stack = 0;
606 case_stack = 0;
607 cond_stack = 0;
608 nesting_stack = 0;
609 nesting_depth = 0;
610
611 current_block_start_count = 0;
612
613 /* No gotos have been expanded yet. */
614 goto_fixup_chain = 0;
615
616 /* We are not processing a ({...}) grouping. */
617 expr_stmts_for_value = 0;
618 last_expr_type = 0;
619 last_expr_value = NULL_RTX;
620
621 init_eh_for_function ();
622 }
623 \f
624 /* Return nonzero if anything is pushed on the loop, condition, or case
625 stack. */
626 int
627 in_control_zone_p ()
628 {
629 return cond_stack || loop_stack || case_stack;
630 }
631
632 /* Record the current file and line. Called from emit_line_note. */
633 void
634 set_file_and_line_for_stmt (file, line)
635 char *file;
636 int line;
637 {
638 emit_filename = file;
639 emit_lineno = line;
640 }
641
642 /* Emit a no-op instruction. */
643
644 void
645 emit_nop ()
646 {
647 rtx last_insn;
648
649 last_insn = get_last_insn ();
650 if (!optimize
651 && (GET_CODE (last_insn) == CODE_LABEL
652 || (GET_CODE (last_insn) == NOTE
653 && prev_real_insn (last_insn) == 0)))
654 emit_insn (gen_nop ());
655 }
656 \f
657 /* Return the rtx-label that corresponds to a LABEL_DECL,
658 creating it if necessary. */
659
660 rtx
661 label_rtx (label)
662 tree label;
663 {
664 if (TREE_CODE (label) != LABEL_DECL)
665 abort ();
666
667 if (DECL_RTL (label))
668 return DECL_RTL (label);
669
670 return DECL_RTL (label) = gen_label_rtx ();
671 }
672
673 /* Add an unconditional jump to LABEL as the next sequential instruction. */
674
675 void
676 emit_jump (label)
677 rtx label;
678 {
679 do_pending_stack_adjust ();
680 emit_jump_insn (gen_jump (label));
681 emit_barrier ();
682 }
683
684 /* Emit code to jump to the address
685 specified by the pointer expression EXP. */
686
687 void
688 expand_computed_goto (exp)
689 tree exp;
690 {
691 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
692
693 #ifdef POINTERS_EXTEND_UNSIGNED
694 x = convert_memory_address (Pmode, x);
695 #endif
696
697 emit_queue ();
698 /* Be sure the function is executable. */
699 if (current_function_check_memory_usage)
700 emit_library_call (chkr_check_exec_libfunc, 1,
701 VOIDmode, 1, x, ptr_mode);
702
703 do_pending_stack_adjust ();
704 emit_indirect_jump (x);
705
706 current_function_has_computed_jump = 1;
707 }
708 \f
709 /* Handle goto statements and the labels that they can go to. */
710
711 /* Specify the location in the RTL code of a label LABEL,
712 which is a LABEL_DECL tree node.
713
714 This is used for the kind of label that the user can jump to with a
715 goto statement, and for alternatives of a switch or case statement.
716 RTL labels generated for loops and conditionals don't go through here;
717 they are generated directly at the RTL level, by other functions below.
718
719 Note that this has nothing to do with defining label *names*.
720 Languages vary in how they do that and what that even means. */
721
722 void
723 expand_label (label)
724 tree label;
725 {
726 struct label_chain *p;
727
728 do_pending_stack_adjust ();
729 emit_label (label_rtx (label));
730 if (DECL_NAME (label))
731 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
732
733 if (stack_block_stack != 0)
734 {
735 p = (struct label_chain *) oballoc (sizeof (struct label_chain));
736 p->next = stack_block_stack->data.block.label_chain;
737 stack_block_stack->data.block.label_chain = p;
738 p->label = label;
739 }
740 }
741
742 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
743 from nested functions. */
744
745 void
746 declare_nonlocal_label (label)
747 tree label;
748 {
749 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
750
751 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
752 LABEL_PRESERVE_P (label_rtx (label)) = 1;
753 if (nonlocal_goto_handler_slots == 0)
754 {
755 emit_stack_save (SAVE_NONLOCAL,
756 &nonlocal_goto_stack_level,
757 PREV_INSN (tail_recursion_reentry));
758 }
759 nonlocal_goto_handler_slots
760 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
761 }
762
763 /* Generate RTL code for a `goto' statement with target label LABEL.
764 LABEL should be a LABEL_DECL tree node that was or will later be
765 defined with `expand_label'. */
766
767 void
768 expand_goto (label)
769 tree label;
770 {
771 tree context;
772
773 /* Check for a nonlocal goto to a containing function. */
774 context = decl_function_context (label);
775 if (context != 0 && context != current_function_decl)
776 {
777 struct function *p = find_function_data (context);
778 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
779 rtx temp, handler_slot;
780 tree link;
781
782 /* Find the corresponding handler slot for this label. */
783 handler_slot = p->x_nonlocal_goto_handler_slots;
784 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
785 link = TREE_CHAIN (link))
786 handler_slot = XEXP (handler_slot, 1);
787 handler_slot = XEXP (handler_slot, 0);
788
789 p->has_nonlocal_label = 1;
790 current_function_has_nonlocal_goto = 1;
791 LABEL_REF_NONLOCAL_P (label_ref) = 1;
792
793 /* Copy the rtl for the slots so that they won't be shared in
794 case the virtual stack vars register gets instantiated differently
795 in the parent than in the child. */
796
797 #if HAVE_nonlocal_goto
798 if (HAVE_nonlocal_goto)
799 emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
800 copy_rtx (handler_slot),
801 copy_rtx (p->x_nonlocal_goto_stack_level),
802 label_ref));
803 else
804 #endif
805 {
806 rtx addr;
807
808 /* Restore frame pointer for containing function.
809 This sets the actual hard register used for the frame pointer
810 to the location of the function's incoming static chain info.
811 The non-local goto handler will then adjust it to contain the
812 proper value and reload the argument pointer, if needed. */
813 emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
814
815 /* We have now loaded the frame pointer hardware register with
816 the address of that corresponds to the start of the virtual
817 stack vars. So replace virtual_stack_vars_rtx in all
818 addresses we use with stack_pointer_rtx. */
819
820 /* Get addr of containing function's current nonlocal goto handler,
821 which will do any cleanups and then jump to the label. */
822 addr = copy_rtx (handler_slot);
823 temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
824 hard_frame_pointer_rtx));
825
826 /* Restore the stack pointer. Note this uses fp just restored. */
827 addr = p->x_nonlocal_goto_stack_level;
828 if (addr)
829 addr = replace_rtx (copy_rtx (addr),
830 virtual_stack_vars_rtx,
831 hard_frame_pointer_rtx);
832
833 emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
834
835 /* USE of hard_frame_pointer_rtx added for consistency; not clear if
836 really needed. */
837 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
838 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
839 emit_indirect_jump (temp);
840 }
841 }
842 else
843 expand_goto_internal (label, label_rtx (label), NULL_RTX);
844 }
845
846 /* Generate RTL code for a `goto' statement with target label BODY.
847 LABEL should be a LABEL_REF.
848 LAST_INSN, if non-0, is the rtx we should consider as the last
849 insn emitted (for the purposes of cleaning up a return). */
850
851 static void
852 expand_goto_internal (body, label, last_insn)
853 tree body;
854 rtx label;
855 rtx last_insn;
856 {
857 struct nesting *block;
858 rtx stack_level = 0;
859
860 if (GET_CODE (label) != CODE_LABEL)
861 abort ();
862
863 /* If label has already been defined, we can tell now
864 whether and how we must alter the stack level. */
865
866 if (PREV_INSN (label) != 0)
867 {
868 /* Find the innermost pending block that contains the label.
869 (Check containment by comparing insn-uids.)
870 Then restore the outermost stack level within that block,
871 and do cleanups of all blocks contained in it. */
872 for (block = block_stack; block; block = block->next)
873 {
874 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
875 break;
876 if (block->data.block.stack_level != 0)
877 stack_level = block->data.block.stack_level;
878 /* Execute the cleanups for blocks we are exiting. */
879 if (block->data.block.cleanups != 0)
880 {
881 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
882 do_pending_stack_adjust ();
883 }
884 }
885
886 if (stack_level)
887 {
888 /* Ensure stack adjust isn't done by emit_jump, as this
889 would clobber the stack pointer. This one should be
890 deleted as dead by flow. */
891 clear_pending_stack_adjust ();
892 do_pending_stack_adjust ();
893 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
894 }
895
896 if (body != 0 && DECL_TOO_LATE (body))
897 error ("jump to `%s' invalidly jumps into binding contour",
898 IDENTIFIER_POINTER (DECL_NAME (body)));
899 }
900 /* Label not yet defined: may need to put this goto
901 on the fixup list. */
902 else if (! expand_fixup (body, label, last_insn))
903 {
904 /* No fixup needed. Record that the label is the target
905 of at least one goto that has no fixup. */
906 if (body != 0)
907 TREE_ADDRESSABLE (body) = 1;
908 }
909
910 emit_jump (label);
911 }
912 \f
913 /* Generate if necessary a fixup for a goto
914 whose target label in tree structure (if any) is TREE_LABEL
915 and whose target in rtl is RTL_LABEL.
916
917 If LAST_INSN is nonzero, we pretend that the jump appears
918 after insn LAST_INSN instead of at the current point in the insn stream.
919
920 The fixup will be used later to insert insns just before the goto.
921 Those insns will restore the stack level as appropriate for the
922 target label, and will (in the case of C++) also invoke any object
923 destructors which have to be invoked when we exit the scopes which
924 are exited by the goto.
925
926 Value is nonzero if a fixup is made. */
927
928 static int
929 expand_fixup (tree_label, rtl_label, last_insn)
930 tree tree_label;
931 rtx rtl_label;
932 rtx last_insn;
933 {
934 struct nesting *block, *end_block;
935
936 /* See if we can recognize which block the label will be output in.
937 This is possible in some very common cases.
938 If we succeed, set END_BLOCK to that block.
939 Otherwise, set it to 0. */
940
941 if (cond_stack
942 && (rtl_label == cond_stack->data.cond.endif_label
943 || rtl_label == cond_stack->data.cond.next_label))
944 end_block = cond_stack;
945 /* If we are in a loop, recognize certain labels which
946 are likely targets. This reduces the number of fixups
947 we need to create. */
948 else if (loop_stack
949 && (rtl_label == loop_stack->data.loop.start_label
950 || rtl_label == loop_stack->data.loop.end_label
951 || rtl_label == loop_stack->data.loop.continue_label))
952 end_block = loop_stack;
953 else
954 end_block = 0;
955
956 /* Now set END_BLOCK to the binding level to which we will return. */
957
958 if (end_block)
959 {
960 struct nesting *next_block = end_block->all;
961 block = block_stack;
962
963 /* First see if the END_BLOCK is inside the innermost binding level.
964 If so, then no cleanups or stack levels are relevant. */
965 while (next_block && next_block != block)
966 next_block = next_block->all;
967
968 if (next_block)
969 return 0;
970
971 /* Otherwise, set END_BLOCK to the innermost binding level
972 which is outside the relevant control-structure nesting. */
973 next_block = block_stack->next;
974 for (block = block_stack; block != end_block; block = block->all)
975 if (block == next_block)
976 next_block = next_block->next;
977 end_block = next_block;
978 }
979
980 /* Does any containing block have a stack level or cleanups?
981 If not, no fixup is needed, and that is the normal case
982 (the only case, for standard C). */
983 for (block = block_stack; block != end_block; block = block->next)
984 if (block->data.block.stack_level != 0
985 || block->data.block.cleanups != 0)
986 break;
987
988 if (block != end_block)
989 {
990 /* Ok, a fixup is needed. Add a fixup to the list of such. */
991 struct goto_fixup *fixup
992 = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
993 /* In case an old stack level is restored, make sure that comes
994 after any pending stack adjust. */
995 /* ?? If the fixup isn't to come at the present position,
996 doing the stack adjust here isn't useful. Doing it with our
997 settings at that location isn't useful either. Let's hope
998 someone does it! */
999 if (last_insn == 0)
1000 do_pending_stack_adjust ();
1001 fixup->target = tree_label;
1002 fixup->target_rtl = rtl_label;
1003
1004 /* Create a BLOCK node and a corresponding matched set of
1005 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
1006 this point. The notes will encapsulate any and all fixup
1007 code which we might later insert at this point in the insn
1008 stream. Also, the BLOCK node will be the parent (i.e. the
1009 `SUPERBLOCK') of any other BLOCK nodes which we might create
1010 later on when we are expanding the fixup code.
1011
1012 Note that optimization passes (including expand_end_loop)
1013 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1014 as a placeholder. */
1015
1016 {
1017 register rtx original_before_jump
1018 = last_insn ? last_insn : get_last_insn ();
1019 rtx start;
1020
1021 start_sequence ();
1022 pushlevel (0);
1023 start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
1024 fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1025 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
1026 fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */
1027 end_sequence ();
1028 emit_insns_after (start, original_before_jump);
1029 }
1030
1031 fixup->block_start_count = current_block_start_count;
1032 fixup->stack_level = 0;
1033 fixup->cleanup_list_list
1034 = ((block->data.block.outer_cleanups
1035 || block->data.block.cleanups)
1036 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1037 block->data.block.outer_cleanups)
1038 : 0);
1039 fixup->next = goto_fixup_chain;
1040 goto_fixup_chain = fixup;
1041 }
1042
1043 return block != 0;
1044 }
1045
1046
1047 \f
1048 /* Expand any needed fixups in the outputmost binding level of the
1049 function. FIRST_INSN is the first insn in the function. */
1050
1051 void
1052 expand_fixups (first_insn)
1053 rtx first_insn;
1054 {
1055 fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
1056 }
1057
1058 /* When exiting a binding contour, process all pending gotos requiring fixups.
1059 THISBLOCK is the structure that describes the block being exited.
1060 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1061 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1062 FIRST_INSN is the insn that began this contour.
1063
1064 Gotos that jump out of this contour must restore the
1065 stack level and do the cleanups before actually jumping.
1066
1067 DONT_JUMP_IN nonzero means report error there is a jump into this
1068 contour from before the beginning of the contour.
1069 This is also done if STACK_LEVEL is nonzero. */
1070
1071 static void
1072 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1073 struct nesting *thisblock;
1074 rtx stack_level;
1075 tree cleanup_list;
1076 rtx first_insn;
1077 int dont_jump_in;
1078 {
1079 register struct goto_fixup *f, *prev;
1080
1081 /* F is the fixup we are considering; PREV is the previous one. */
1082 /* We run this loop in two passes so that cleanups of exited blocks
1083 are run first, and blocks that are exited are marked so
1084 afterwards. */
1085
1086 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1087 {
1088 /* Test for a fixup that is inactive because it is already handled. */
1089 if (f->before_jump == 0)
1090 {
1091 /* Delete inactive fixup from the chain, if that is easy to do. */
1092 if (prev != 0)
1093 prev->next = f->next;
1094 }
1095 /* Has this fixup's target label been defined?
1096 If so, we can finalize it. */
1097 else if (PREV_INSN (f->target_rtl) != 0)
1098 {
1099 register rtx cleanup_insns;
1100
1101 /* Get the first non-label after the label
1102 this goto jumps to. If that's before this scope begins,
1103 we don't have a jump into the scope. */
1104 rtx after_label = f->target_rtl;
1105 while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
1106 after_label = NEXT_INSN (after_label);
1107
1108 /* If this fixup jumped into this contour from before the beginning
1109 of this contour, report an error. */
1110 /* ??? Bug: this does not detect jumping in through intermediate
1111 blocks that have stack levels or cleanups.
1112 It detects only a problem with the innermost block
1113 around the label. */
1114 if (f->target != 0
1115 && (dont_jump_in || stack_level || cleanup_list)
1116 /* If AFTER_LABEL is 0, it means the jump goes to the end
1117 of the rtl, which means it jumps into this scope. */
1118 && (after_label == 0
1119 || INSN_UID (first_insn) < INSN_UID (after_label))
1120 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1121 && ! DECL_ERROR_ISSUED (f->target))
1122 {
1123 error_with_decl (f->target,
1124 "label `%s' used before containing binding contour");
1125 /* Prevent multiple errors for one label. */
1126 DECL_ERROR_ISSUED (f->target) = 1;
1127 }
1128
1129 /* We will expand the cleanups into a sequence of their own and
1130 then later on we will attach this new sequence to the insn
1131 stream just ahead of the actual jump insn. */
1132
1133 start_sequence ();
1134
1135 /* Temporarily restore the lexical context where we will
1136 logically be inserting the fixup code. We do this for the
1137 sake of getting the debugging information right. */
1138
1139 pushlevel (0);
1140 set_block (f->context);
1141
1142 /* Expand the cleanups for blocks this jump exits. */
1143 if (f->cleanup_list_list)
1144 {
1145 tree lists;
1146 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1147 /* Marked elements correspond to blocks that have been closed.
1148 Do their cleanups. */
1149 if (TREE_ADDRESSABLE (lists)
1150 && TREE_VALUE (lists) != 0)
1151 {
1152 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1153 /* Pop any pushes done in the cleanups,
1154 in case function is about to return. */
1155 do_pending_stack_adjust ();
1156 }
1157 }
1158
1159 /* Restore stack level for the biggest contour that this
1160 jump jumps out of. */
1161 if (f->stack_level)
1162 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1163
1164 /* Finish up the sequence containing the insns which implement the
1165 necessary cleanups, and then attach that whole sequence to the
1166 insn stream just ahead of the actual jump insn. Attaching it
1167 at that point insures that any cleanups which are in fact
1168 implicit C++ object destructions (which must be executed upon
1169 leaving the block) appear (to the debugger) to be taking place
1170 in an area of the generated code where the object(s) being
1171 destructed are still "in scope". */
1172
1173 cleanup_insns = get_insns ();
1174 poplevel (1, 0, 0);
1175
1176 end_sequence ();
1177 emit_insns_after (cleanup_insns, f->before_jump);
1178
1179
1180 f->before_jump = 0;
1181 }
1182 }
1183
1184 /* For any still-undefined labels, do the cleanups for this block now.
1185 We must do this now since items in the cleanup list may go out
1186 of scope when the block ends. */
1187 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1188 if (f->before_jump != 0
1189 && PREV_INSN (f->target_rtl) == 0
1190 /* Label has still not appeared. If we are exiting a block with
1191 a stack level to restore, that started before the fixup,
1192 mark this stack level as needing restoration
1193 when the fixup is later finalized. */
1194 && thisblock != 0
1195 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1196 means the label is undefined. That's erroneous, but possible. */
1197 && (thisblock->data.block.block_start_count
1198 <= f->block_start_count))
1199 {
1200 tree lists = f->cleanup_list_list;
1201 rtx cleanup_insns;
1202
1203 for (; lists; lists = TREE_CHAIN (lists))
1204 /* If the following elt. corresponds to our containing block
1205 then the elt. must be for this block. */
1206 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1207 {
1208 start_sequence ();
1209 pushlevel (0);
1210 set_block (f->context);
1211 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1212 do_pending_stack_adjust ();
1213 cleanup_insns = get_insns ();
1214 poplevel (1, 0, 0);
1215 end_sequence ();
1216 if (cleanup_insns != 0)
1217 f->before_jump
1218 = emit_insns_after (cleanup_insns, f->before_jump);
1219
1220 f->cleanup_list_list = TREE_CHAIN (lists);
1221 }
1222
1223 if (stack_level)
1224 f->stack_level = stack_level;
1225 }
1226 }
1227 \f
1228 /* Return the number of times character C occurs in string S. */
1229 static int
1230 n_occurrences (c, s)
1231 int c;
1232 const char *s;
1233 {
1234 int n = 0;
1235 while (*s)
1236 n += (*s++ == c);
1237 return n;
1238 }
1239 \f
1240 /* Generate RTL for an asm statement (explicit assembler code).
1241 BODY is a STRING_CST node containing the assembler code text,
1242 or an ADDR_EXPR containing a STRING_CST. */
1243
1244 void
1245 expand_asm (body)
1246 tree body;
1247 {
1248 if (current_function_check_memory_usage)
1249 {
1250 error ("`asm' cannot be used in function where memory usage is checked");
1251 return;
1252 }
1253
1254 if (TREE_CODE (body) == ADDR_EXPR)
1255 body = TREE_OPERAND (body, 0);
1256
1257 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1258 TREE_STRING_POINTER (body)));
1259 last_expr_type = 0;
1260 }
1261
1262 /* Generate RTL for an asm statement with arguments.
1263 STRING is the instruction template.
1264 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1265 Each output or input has an expression in the TREE_VALUE and
1266 a constraint-string in the TREE_PURPOSE.
1267 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1268 that is clobbered by this insn.
1269
1270 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1271 Some elements of OUTPUTS may be replaced with trees representing temporary
1272 values. The caller should copy those temporary values to the originally
1273 specified lvalues.
1274
1275 VOL nonzero means the insn is volatile; don't optimize it. */
1276
1277 void
1278 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1279 tree string, outputs, inputs, clobbers;
1280 int vol;
1281 char *filename;
1282 int line;
1283 {
1284 rtvec argvec, constraints;
1285 rtx body;
1286 int ninputs = list_length (inputs);
1287 int noutputs = list_length (outputs);
1288 int ninout = 0;
1289 int nclobbers;
1290 tree tail;
1291 register int i;
1292 /* Vector of RTX's of evaluated output operands. */
1293 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1294 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1295 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1296 enum machine_mode *inout_mode
1297 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1298 /* The insn we have emitted. */
1299 rtx insn;
1300
1301 /* An ASM with no outputs needs to be treated as volatile, for now. */
1302 if (noutputs == 0)
1303 vol = 1;
1304
1305 if (current_function_check_memory_usage)
1306 {
1307 error ("`asm' cannot be used with `-fcheck-memory-usage'");
1308 return;
1309 }
1310
1311 #ifdef MD_ASM_CLOBBERS
1312 /* Sometimes we wish to automatically clobber registers across an asm.
1313 Case in point is when the i386 backend moved from cc0 to a hard reg --
1314 maintaining source-level compatability means automatically clobbering
1315 the flags register. */
1316 MD_ASM_CLOBBERS (clobbers);
1317 #endif
1318
1319 if (current_function_check_memory_usage)
1320 {
1321 error ("`asm' cannot be used in function where memory usage is checked");
1322 return;
1323 }
1324
1325 /* Count the number of meaningful clobbered registers, ignoring what
1326 we would ignore later. */
1327 nclobbers = 0;
1328 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1329 {
1330 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1331 i = decode_reg_name (regname);
1332 if (i >= 0 || i == -4)
1333 ++nclobbers;
1334 else if (i == -2)
1335 error ("unknown register name `%s' in `asm'", regname);
1336 }
1337
1338 last_expr_type = 0;
1339
1340 /* Check that the number of alternatives is constant across all
1341 operands. */
1342 if (outputs || inputs)
1343 {
1344 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1345 int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1346 tree next = inputs;
1347
1348 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1349 {
1350 error ("too many alternatives in `asm'");
1351 return;
1352 }
1353
1354 tmp = outputs;
1355 while (tmp)
1356 {
1357 char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1358 if (n_occurrences (',', constraint) != nalternatives)
1359 {
1360 error ("operand constraints for `asm' differ in number of alternatives");
1361 return;
1362 }
1363 if (TREE_CHAIN (tmp))
1364 tmp = TREE_CHAIN (tmp);
1365 else
1366 tmp = next, next = 0;
1367 }
1368 }
1369
1370 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1371 {
1372 tree val = TREE_VALUE (tail);
1373 tree type = TREE_TYPE (val);
1374 char *constraint;
1375 char *p;
1376 int c_len;
1377 int j;
1378 int is_inout = 0;
1379 int allows_reg = 0;
1380 int allows_mem = 0;
1381
1382 /* If there's an erroneous arg, emit no insn. */
1383 if (TREE_TYPE (val) == error_mark_node)
1384 return;
1385
1386 /* Make sure constraint has `=' and does not have `+'. Also, see
1387 if it allows any register. Be liberal on the latter test, since
1388 the worst that happens if we get it wrong is we issue an error
1389 message. */
1390
1391 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1392 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1393
1394 /* Allow the `=' or `+' to not be at the beginning of the string,
1395 since it wasn't explicitly documented that way, and there is a
1396 large body of code that puts it last. Swap the character to
1397 the front, so as not to uglify any place else. */
1398 switch (c_len)
1399 {
1400 default:
1401 if ((p = strchr (constraint, '=')) != NULL)
1402 break;
1403 if ((p = strchr (constraint, '+')) != NULL)
1404 break;
1405 case 0:
1406 error ("output operand constraint lacks `='");
1407 return;
1408 }
1409
1410 if (p != constraint)
1411 {
1412 j = *p;
1413 bcopy (constraint, constraint+1, p-constraint);
1414 *constraint = j;
1415
1416 warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1417 }
1418
1419 is_inout = constraint[0] == '+';
1420 /* Replace '+' with '='. */
1421 constraint[0] = '=';
1422 /* Make sure we can specify the matching operand. */
1423 if (is_inout && i > 9)
1424 {
1425 error ("output operand constraint %d contains `+'", i);
1426 return;
1427 }
1428
1429 for (j = 1; j < c_len; j++)
1430 switch (constraint[j])
1431 {
1432 case '+':
1433 case '=':
1434 error ("operand constraint contains '+' or '=' at illegal position.");
1435 return;
1436
1437 case '%':
1438 if (i + 1 == ninputs + noutputs)
1439 {
1440 error ("`%%' constraint used with last operand");
1441 return;
1442 }
1443 break;
1444
1445 case '?': case '!': case '*': case '&':
1446 case 'E': case 'F': case 'G': case 'H':
1447 case 's': case 'i': case 'n':
1448 case 'I': case 'J': case 'K': case 'L': case 'M':
1449 case 'N': case 'O': case 'P': case ',':
1450 #ifdef EXTRA_CONSTRAINT
1451 case 'Q': case 'R': case 'S': case 'T': case 'U':
1452 #endif
1453 break;
1454
1455 case '0': case '1': case '2': case '3': case '4':
1456 case '5': case '6': case '7': case '8': case '9':
1457 error ("matching constraint not valid in output operand");
1458 break;
1459
1460 case 'V': case 'm': case 'o':
1461 allows_mem = 1;
1462 break;
1463
1464 case '<': case '>':
1465 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1466 excepting those that expand_call created. So match memory
1467 and hope. */
1468 allows_mem = 1;
1469 break;
1470
1471 case 'g': case 'X':
1472 allows_reg = 1;
1473 allows_mem = 1;
1474 break;
1475
1476 case 'p': case 'r':
1477 default:
1478 allows_reg = 1;
1479 break;
1480 }
1481
1482 /* If an output operand is not a decl or indirect ref and our constraint
1483 allows a register, make a temporary to act as an intermediate.
1484 Make the asm insn write into that, then our caller will copy it to
1485 the real output operand. Likewise for promoted variables. */
1486
1487 real_output_rtx[i] = NULL_RTX;
1488 if ((TREE_CODE (val) == INDIRECT_REF
1489 && allows_mem)
1490 || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1491 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1492 && ! (GET_CODE (DECL_RTL (val)) == REG
1493 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1494 || ! allows_reg
1495 || is_inout)
1496 {
1497 if (! allows_reg)
1498 mark_addressable (TREE_VALUE (tail));
1499
1500 output_rtx[i]
1501 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1502 EXPAND_MEMORY_USE_WO);
1503
1504 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1505 error ("output number %d not directly addressable", i);
1506 if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1507 {
1508 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1509 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1510 if (is_inout)
1511 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1512 }
1513 }
1514 else
1515 {
1516 output_rtx[i] = assign_temp (type, 0, 0, 0);
1517 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1518 }
1519
1520 if (is_inout)
1521 {
1522 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1523 inout_opnum[ninout++] = i;
1524 }
1525 }
1526
1527 ninputs += ninout;
1528 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1529 {
1530 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1531 return;
1532 }
1533
1534 /* Make vectors for the expression-rtx and constraint strings. */
1535
1536 argvec = rtvec_alloc (ninputs);
1537 constraints = rtvec_alloc (ninputs);
1538
1539 body = gen_rtx_ASM_OPERANDS (VOIDmode,
1540 TREE_STRING_POINTER (string), "", 0, argvec,
1541 constraints, filename, line);
1542
1543 MEM_VOLATILE_P (body) = vol;
1544
1545 /* Eval the inputs and put them into ARGVEC.
1546 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1547
1548 i = 0;
1549 for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1550 {
1551 int j;
1552 int allows_reg = 0, allows_mem = 0;
1553 char *constraint, *orig_constraint;
1554 int c_len;
1555 rtx op;
1556
1557 /* If there's an erroneous arg, emit no insn,
1558 because the ASM_INPUT would get VOIDmode
1559 and that could cause a crash in reload. */
1560 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1561 return;
1562
1563 /* ??? Can this happen, and does the error message make any sense? */
1564 if (TREE_PURPOSE (tail) == NULL_TREE)
1565 {
1566 error ("hard register `%s' listed as input operand to `asm'",
1567 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1568 return;
1569 }
1570
1571 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1572 constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1573 orig_constraint = constraint;
1574
1575 /* Make sure constraint has neither `=', `+', nor '&'. */
1576
1577 for (j = 0; j < c_len; j++)
1578 switch (constraint[j])
1579 {
1580 case '+': case '=': case '&':
1581 if (constraint == orig_constraint)
1582 {
1583 error ("input operand constraint contains `%c'", constraint[j]);
1584 return;
1585 }
1586 break;
1587
1588 case '%':
1589 if (constraint == orig_constraint
1590 && i + 1 == ninputs - ninout)
1591 {
1592 error ("`%%' constraint used with last operand");
1593 return;
1594 }
1595 break;
1596
1597 case 'V': case 'm': case 'o':
1598 allows_mem = 1;
1599 break;
1600
1601 case '<': case '>':
1602 case '?': case '!': case '*':
1603 case 'E': case 'F': case 'G': case 'H': case 'X':
1604 case 's': case 'i': case 'n':
1605 case 'I': case 'J': case 'K': case 'L': case 'M':
1606 case 'N': case 'O': case 'P': case ',':
1607 #ifdef EXTRA_CONSTRAINT
1608 case 'Q': case 'R': case 'S': case 'T': case 'U':
1609 #endif
1610 break;
1611
1612 /* Whether or not a numeric constraint allows a register is
1613 decided by the matching constraint, and so there is no need
1614 to do anything special with them. We must handle them in
1615 the default case, so that we don't unnecessarily force
1616 operands to memory. */
1617 case '0': case '1': case '2': case '3': case '4':
1618 case '5': case '6': case '7': case '8': case '9':
1619 if (constraint[j] >= '0' + noutputs)
1620 {
1621 error
1622 ("matching constraint references invalid operand number");
1623 return;
1624 }
1625
1626 /* Try and find the real constraint for this dup. */
1627 if ((j == 0 && c_len == 1)
1628 || (j == 1 && c_len == 2 && constraint[0] == '%'))
1629 {
1630 tree o = outputs;
1631 for (j = constraint[j] - '0'; j > 0; --j)
1632 o = TREE_CHAIN (o);
1633
1634 c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1635 constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1636 j = 0;
1637 break;
1638 }
1639
1640 /* ... fall through ... */
1641
1642 case 'p': case 'r':
1643 default:
1644 allows_reg = 1;
1645 break;
1646
1647 case 'g':
1648 allows_reg = 1;
1649 allows_mem = 1;
1650 break;
1651 }
1652
1653 if (! allows_reg && allows_mem)
1654 mark_addressable (TREE_VALUE (tail));
1655
1656 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1657
1658 if (asm_operand_ok (op, constraint) <= 0)
1659 {
1660 if (allows_reg)
1661 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1662 else if (!allows_mem)
1663 warning ("asm operand %d probably doesn't match constraints", i);
1664 else if (CONSTANT_P (op))
1665 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1666 op);
1667 else if (GET_CODE (op) == REG
1668 || GET_CODE (op) == SUBREG
1669 || GET_CODE (op) == CONCAT)
1670 {
1671 tree type = TREE_TYPE (TREE_VALUE (tail));
1672 rtx memloc = assign_temp (type, 1, 1, 1);
1673
1674 emit_move_insn (memloc, op);
1675 op = memloc;
1676 }
1677 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1678 /* We won't recognize volatile memory as available a
1679 memory_operand at this point. Ignore it. */
1680 ;
1681 else if (queued_subexp_p (op))
1682 ;
1683 else
1684 /* ??? Leave this only until we have experience with what
1685 happens in combine and elsewhere when constraints are
1686 not satisfied. */
1687 warning ("asm operand %d probably doesn't match constraints", i);
1688 }
1689 XVECEXP (body, 3, i) = op;
1690
1691 XVECEXP (body, 4, i) /* constraints */
1692 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1693 orig_constraint);
1694 i++;
1695 }
1696
1697 /* Protect all the operands from the queue,
1698 now that they have all been evaluated. */
1699
1700 for (i = 0; i < ninputs - ninout; i++)
1701 XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1702
1703 for (i = 0; i < noutputs; i++)
1704 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1705
1706 /* For in-out operands, copy output rtx to input rtx. */
1707 for (i = 0; i < ninout; i++)
1708 {
1709 static char match[9+1][2]
1710 = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1711 int j = inout_opnum[i];
1712
1713 XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
1714 = output_rtx[j];
1715 XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
1716 = gen_rtx_ASM_INPUT (inout_mode[i], match[j]);
1717 }
1718
1719 /* Now, for each output, construct an rtx
1720 (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1721 ARGVEC CONSTRAINTS))
1722 If there is more than one, put them inside a PARALLEL. */
1723
1724 if (noutputs == 1 && nclobbers == 0)
1725 {
1726 XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1727 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1728 }
1729 else if (noutputs == 0 && nclobbers == 0)
1730 {
1731 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1732 insn = emit_insn (body);
1733 }
1734 else
1735 {
1736 rtx obody = body;
1737 int num = noutputs;
1738 if (num == 0) num = 1;
1739 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1740
1741 /* For each output operand, store a SET. */
1742
1743 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1744 {
1745 XVECEXP (body, 0, i)
1746 = gen_rtx_SET (VOIDmode,
1747 output_rtx[i],
1748 gen_rtx_ASM_OPERANDS
1749 (VOIDmode,
1750 TREE_STRING_POINTER (string),
1751 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1752 i, argvec, constraints,
1753 filename, line));
1754
1755 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1756 }
1757
1758 /* If there are no outputs (but there are some clobbers)
1759 store the bare ASM_OPERANDS into the PARALLEL. */
1760
1761 if (i == 0)
1762 XVECEXP (body, 0, i++) = obody;
1763
1764 /* Store (clobber REG) for each clobbered register specified. */
1765
1766 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1767 {
1768 char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1769 int j = decode_reg_name (regname);
1770
1771 if (j < 0)
1772 {
1773 if (j == -3) /* `cc', which is not a register */
1774 continue;
1775
1776 if (j == -4) /* `memory', don't cache memory across asm */
1777 {
1778 XVECEXP (body, 0, i++)
1779 = gen_rtx_CLOBBER (VOIDmode,
1780 gen_rtx_MEM
1781 (BLKmode,
1782 gen_rtx_SCRATCH (VOIDmode)));
1783 continue;
1784 }
1785
1786 /* Ignore unknown register, error already signaled. */
1787 continue;
1788 }
1789
1790 /* Use QImode since that's guaranteed to clobber just one reg. */
1791 XVECEXP (body, 0, i++)
1792 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1793 }
1794
1795 insn = emit_insn (body);
1796 }
1797
1798 /* For any outputs that needed reloading into registers, spill them
1799 back to where they belong. */
1800 for (i = 0; i < noutputs; ++i)
1801 if (real_output_rtx[i])
1802 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1803
1804 free_temp_slots ();
1805 }
1806 \f
1807 /* Generate RTL to evaluate the expression EXP
1808 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
1809
1810 void
1811 expand_expr_stmt (exp)
1812 tree exp;
1813 {
1814 /* If -W, warn about statements with no side effects,
1815 except for an explicit cast to void (e.g. for assert()), and
1816 except inside a ({...}) where they may be useful. */
1817 if (expr_stmts_for_value == 0 && exp != error_mark_node)
1818 {
1819 if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1820 && !(TREE_CODE (exp) == CONVERT_EXPR
1821 && TREE_TYPE (exp) == void_type_node))
1822 warning_with_file_and_line (emit_filename, emit_lineno,
1823 "statement with no effect");
1824 else if (warn_unused)
1825 warn_if_unused_value (exp);
1826 }
1827
1828 /* If EXP is of function type and we are expanding statements for
1829 value, convert it to pointer-to-function. */
1830 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1831 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1832
1833 last_expr_type = TREE_TYPE (exp);
1834 last_expr_value = expand_expr (exp,
1835 (expr_stmts_for_value
1836 ? NULL_RTX : const0_rtx),
1837 VOIDmode, 0);
1838
1839 /* If all we do is reference a volatile value in memory,
1840 copy it to a register to be sure it is actually touched. */
1841 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1842 && TREE_THIS_VOLATILE (exp))
1843 {
1844 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1845 ;
1846 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1847 copy_to_reg (last_expr_value);
1848 else
1849 {
1850 rtx lab = gen_label_rtx ();
1851
1852 /* Compare the value with itself to reference it. */
1853 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1854 expand_expr (TYPE_SIZE (last_expr_type),
1855 NULL_RTX, VOIDmode, 0),
1856 BLKmode, 0,
1857 TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1858 lab);
1859 emit_label (lab);
1860 }
1861 }
1862
1863 /* If this expression is part of a ({...}) and is in memory, we may have
1864 to preserve temporaries. */
1865 preserve_temp_slots (last_expr_value);
1866
1867 /* Free any temporaries used to evaluate this expression. Any temporary
1868 used as a result of this expression will already have been preserved
1869 above. */
1870 free_temp_slots ();
1871
1872 emit_queue ();
1873 }
1874
1875 /* Warn if EXP contains any computations whose results are not used.
1876 Return 1 if a warning is printed; 0 otherwise. */
1877
1878 int
1879 warn_if_unused_value (exp)
1880 tree exp;
1881 {
1882 if (TREE_USED (exp))
1883 return 0;
1884
1885 switch (TREE_CODE (exp))
1886 {
1887 case PREINCREMENT_EXPR:
1888 case POSTINCREMENT_EXPR:
1889 case PREDECREMENT_EXPR:
1890 case POSTDECREMENT_EXPR:
1891 case MODIFY_EXPR:
1892 case INIT_EXPR:
1893 case TARGET_EXPR:
1894 case CALL_EXPR:
1895 case METHOD_CALL_EXPR:
1896 case RTL_EXPR:
1897 case TRY_CATCH_EXPR:
1898 case WITH_CLEANUP_EXPR:
1899 case EXIT_EXPR:
1900 /* We don't warn about COND_EXPR because it may be a useful
1901 construct if either arm contains a side effect. */
1902 case COND_EXPR:
1903 return 0;
1904
1905 case BIND_EXPR:
1906 /* For a binding, warn if no side effect within it. */
1907 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1908
1909 case SAVE_EXPR:
1910 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1911
1912 case TRUTH_ORIF_EXPR:
1913 case TRUTH_ANDIF_EXPR:
1914 /* In && or ||, warn if 2nd operand has no side effect. */
1915 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1916
1917 case COMPOUND_EXPR:
1918 if (TREE_NO_UNUSED_WARNING (exp))
1919 return 0;
1920 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1921 return 1;
1922 /* Let people do `(foo (), 0)' without a warning. */
1923 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1924 return 0;
1925 return warn_if_unused_value (TREE_OPERAND (exp, 1));
1926
1927 case NOP_EXPR:
1928 case CONVERT_EXPR:
1929 case NON_LVALUE_EXPR:
1930 /* Don't warn about values cast to void. */
1931 if (TREE_TYPE (exp) == void_type_node)
1932 return 0;
1933 /* Don't warn about conversions not explicit in the user's program. */
1934 if (TREE_NO_UNUSED_WARNING (exp))
1935 return 0;
1936 /* Assignment to a cast usually results in a cast of a modify.
1937 Don't complain about that. There can be an arbitrary number of
1938 casts before the modify, so we must loop until we find the first
1939 non-cast expression and then test to see if that is a modify. */
1940 {
1941 tree tem = TREE_OPERAND (exp, 0);
1942
1943 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1944 tem = TREE_OPERAND (tem, 0);
1945
1946 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1947 || TREE_CODE (tem) == CALL_EXPR)
1948 return 0;
1949 }
1950 goto warn;
1951
1952 case INDIRECT_REF:
1953 /* Don't warn about automatic dereferencing of references, since
1954 the user cannot control it. */
1955 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1956 return warn_if_unused_value (TREE_OPERAND (exp, 0));
1957 /* ... fall through ... */
1958
1959 default:
1960 /* Referencing a volatile value is a side effect, so don't warn. */
1961 if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1962 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1963 && TREE_THIS_VOLATILE (exp))
1964 return 0;
1965 warn:
1966 warning_with_file_and_line (emit_filename, emit_lineno,
1967 "value computed is not used");
1968 return 1;
1969 }
1970 }
1971
1972 /* Clear out the memory of the last expression evaluated. */
1973
1974 void
1975 clear_last_expr ()
1976 {
1977 last_expr_type = 0;
1978 }
1979
1980 /* Begin a statement which will return a value.
1981 Return the RTL_EXPR for this statement expr.
1982 The caller must save that value and pass it to expand_end_stmt_expr. */
1983
1984 tree
1985 expand_start_stmt_expr ()
1986 {
1987 int momentary;
1988 tree t;
1989
1990 /* Make the RTL_EXPR node temporary, not momentary,
1991 so that rtl_expr_chain doesn't become garbage. */
1992 momentary = suspend_momentary ();
1993 t = make_node (RTL_EXPR);
1994 resume_momentary (momentary);
1995 do_pending_stack_adjust ();
1996 start_sequence_for_rtl_expr (t);
1997 NO_DEFER_POP;
1998 expr_stmts_for_value++;
1999 return t;
2000 }
2001
2002 /* Restore the previous state at the end of a statement that returns a value.
2003 Returns a tree node representing the statement's value and the
2004 insns to compute the value.
2005
2006 The nodes of that expression have been freed by now, so we cannot use them.
2007 But we don't want to do that anyway; the expression has already been
2008 evaluated and now we just want to use the value. So generate a RTL_EXPR
2009 with the proper type and RTL value.
2010
2011 If the last substatement was not an expression,
2012 return something with type `void'. */
2013
2014 tree
2015 expand_end_stmt_expr (t)
2016 tree t;
2017 {
2018 OK_DEFER_POP;
2019
2020 if (last_expr_type == 0)
2021 {
2022 last_expr_type = void_type_node;
2023 last_expr_value = const0_rtx;
2024 }
2025 else if (last_expr_value == 0)
2026 /* There are some cases where this can happen, such as when the
2027 statement is void type. */
2028 last_expr_value = const0_rtx;
2029 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2030 /* Remove any possible QUEUED. */
2031 last_expr_value = protect_from_queue (last_expr_value, 0);
2032
2033 emit_queue ();
2034
2035 TREE_TYPE (t) = last_expr_type;
2036 RTL_EXPR_RTL (t) = last_expr_value;
2037 RTL_EXPR_SEQUENCE (t) = get_insns ();
2038
2039 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2040
2041 end_sequence ();
2042
2043 /* Don't consider deleting this expr or containing exprs at tree level. */
2044 TREE_SIDE_EFFECTS (t) = 1;
2045 /* Propagate volatility of the actual RTL expr. */
2046 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2047
2048 last_expr_type = 0;
2049 expr_stmts_for_value--;
2050
2051 return t;
2052 }
2053 \f
2054 /* Generate RTL for the start of an if-then. COND is the expression
2055 whose truth should be tested.
2056
2057 If EXITFLAG is nonzero, this conditional is visible to
2058 `exit_something'. */
2059
2060 void
2061 expand_start_cond (cond, exitflag)
2062 tree cond;
2063 int exitflag;
2064 {
2065 struct nesting *thiscond = ALLOC_NESTING ();
2066
2067 /* Make an entry on cond_stack for the cond we are entering. */
2068
2069 thiscond->next = cond_stack;
2070 thiscond->all = nesting_stack;
2071 thiscond->depth = ++nesting_depth;
2072 thiscond->data.cond.next_label = gen_label_rtx ();
2073 /* Before we encounter an `else', we don't need a separate exit label
2074 unless there are supposed to be exit statements
2075 to exit this conditional. */
2076 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2077 thiscond->data.cond.endif_label = thiscond->exit_label;
2078 cond_stack = thiscond;
2079 nesting_stack = thiscond;
2080
2081 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2082 }
2083
2084 /* Generate RTL between then-clause and the elseif-clause
2085 of an if-then-elseif-.... */
2086
2087 void
2088 expand_start_elseif (cond)
2089 tree cond;
2090 {
2091 if (cond_stack->data.cond.endif_label == 0)
2092 cond_stack->data.cond.endif_label = gen_label_rtx ();
2093 emit_jump (cond_stack->data.cond.endif_label);
2094 emit_label (cond_stack->data.cond.next_label);
2095 cond_stack->data.cond.next_label = gen_label_rtx ();
2096 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2097 }
2098
2099 /* Generate RTL between the then-clause and the else-clause
2100 of an if-then-else. */
2101
2102 void
2103 expand_start_else ()
2104 {
2105 if (cond_stack->data.cond.endif_label == 0)
2106 cond_stack->data.cond.endif_label = gen_label_rtx ();
2107
2108 emit_jump (cond_stack->data.cond.endif_label);
2109 emit_label (cond_stack->data.cond.next_label);
2110 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2111 }
2112
2113 /* After calling expand_start_else, turn this "else" into an "else if"
2114 by providing another condition. */
2115
2116 void
2117 expand_elseif (cond)
2118 tree cond;
2119 {
2120 cond_stack->data.cond.next_label = gen_label_rtx ();
2121 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2122 }
2123
2124 /* Generate RTL for the end of an if-then.
2125 Pop the record for it off of cond_stack. */
2126
2127 void
2128 expand_end_cond ()
2129 {
2130 struct nesting *thiscond = cond_stack;
2131
2132 do_pending_stack_adjust ();
2133 if (thiscond->data.cond.next_label)
2134 emit_label (thiscond->data.cond.next_label);
2135 if (thiscond->data.cond.endif_label)
2136 emit_label (thiscond->data.cond.endif_label);
2137
2138 POPSTACK (cond_stack);
2139 last_expr_type = 0;
2140 }
2141
2142
2143 \f
2144 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2145 loop should be exited by `exit_something'. This is a loop for which
2146 `expand_continue' will jump to the top of the loop.
2147
2148 Make an entry on loop_stack to record the labels associated with
2149 this loop. */
2150
2151 struct nesting *
2152 expand_start_loop (exit_flag)
2153 int exit_flag;
2154 {
2155 register struct nesting *thisloop = ALLOC_NESTING ();
2156
2157 /* Make an entry on loop_stack for the loop we are entering. */
2158
2159 thisloop->next = loop_stack;
2160 thisloop->all = nesting_stack;
2161 thisloop->depth = ++nesting_depth;
2162 thisloop->data.loop.start_label = gen_label_rtx ();
2163 thisloop->data.loop.end_label = gen_label_rtx ();
2164 thisloop->data.loop.alt_end_label = 0;
2165 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2166 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2167 loop_stack = thisloop;
2168 nesting_stack = thisloop;
2169
2170 do_pending_stack_adjust ();
2171 emit_queue ();
2172 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2173 emit_label (thisloop->data.loop.start_label);
2174
2175 return thisloop;
2176 }
2177
2178 /* Like expand_start_loop but for a loop where the continuation point
2179 (for expand_continue_loop) will be specified explicitly. */
2180
2181 struct nesting *
2182 expand_start_loop_continue_elsewhere (exit_flag)
2183 int exit_flag;
2184 {
2185 struct nesting *thisloop = expand_start_loop (exit_flag);
2186 loop_stack->data.loop.continue_label = gen_label_rtx ();
2187 return thisloop;
2188 }
2189
2190 /* Specify the continuation point for a loop started with
2191 expand_start_loop_continue_elsewhere.
2192 Use this at the point in the code to which a continue statement
2193 should jump. */
2194
2195 void
2196 expand_loop_continue_here ()
2197 {
2198 do_pending_stack_adjust ();
2199 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2200 emit_label (loop_stack->data.loop.continue_label);
2201 }
2202
2203 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2204 Pop the block off of loop_stack. */
2205
2206 void
2207 expand_end_loop ()
2208 {
2209 rtx start_label = loop_stack->data.loop.start_label;
2210 rtx insn = get_last_insn ();
2211 int needs_end_jump = 1;
2212
2213 /* Mark the continue-point at the top of the loop if none elsewhere. */
2214 if (start_label == loop_stack->data.loop.continue_label)
2215 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2216
2217 do_pending_stack_adjust ();
2218
2219 /* If optimizing, perhaps reorder the loop.
2220 First, try to use a condjump near the end.
2221 expand_exit_loop_if_false ends loops with unconditional jumps,
2222 like this:
2223
2224 if (test) goto label;
2225 optional: cleanup
2226 goto loop_stack->data.loop.end_label
2227 barrier
2228 label:
2229
2230 If we find such a pattern, we can end the loop earlier. */
2231
2232 if (optimize
2233 && GET_CODE (insn) == CODE_LABEL
2234 && LABEL_NAME (insn) == NULL
2235 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2236 {
2237 rtx label = insn;
2238 rtx jump = PREV_INSN (PREV_INSN (label));
2239
2240 if (GET_CODE (jump) == JUMP_INSN
2241 && GET_CODE (PATTERN (jump)) == SET
2242 && SET_DEST (PATTERN (jump)) == pc_rtx
2243 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2244 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2245 == loop_stack->data.loop.end_label))
2246 {
2247 rtx prev;
2248
2249 /* The test might be complex and reference LABEL multiple times,
2250 like the loop in loop_iterations to set vtop. To handle this,
2251 we move LABEL. */
2252 insn = PREV_INSN (label);
2253 reorder_insns (label, label, start_label);
2254
2255 for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2256 {
2257 /* We ignore line number notes, but if we see any other note,
2258 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2259 NOTE_INSN_LOOP_*, we disable this optimization. */
2260 if (GET_CODE (prev) == NOTE)
2261 {
2262 if (NOTE_LINE_NUMBER (prev) < 0)
2263 break;
2264 continue;
2265 }
2266 if (GET_CODE (prev) == CODE_LABEL)
2267 break;
2268 if (GET_CODE (prev) == JUMP_INSN)
2269 {
2270 if (GET_CODE (PATTERN (prev)) == SET
2271 && SET_DEST (PATTERN (prev)) == pc_rtx
2272 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2273 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2274 == LABEL_REF)
2275 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2276 {
2277 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2278 = start_label;
2279 emit_note_after (NOTE_INSN_LOOP_END, prev);
2280 needs_end_jump = 0;
2281 }
2282 break;
2283 }
2284 }
2285 }
2286 }
2287
2288 /* If the loop starts with a loop exit, roll that to the end where
2289 it will optimize together with the jump back.
2290
2291 We look for the conditional branch to the exit, except that once
2292 we find such a branch, we don't look past 30 instructions.
2293
2294 In more detail, if the loop presently looks like this (in pseudo-C):
2295
2296 start_label:
2297 if (test) goto end_label;
2298 body;
2299 goto start_label;
2300 end_label:
2301
2302 transform it to look like:
2303
2304 goto start_label;
2305 newstart_label:
2306 body;
2307 start_label:
2308 if (test) goto end_label;
2309 goto newstart_label;
2310 end_label:
2311
2312 Here, the `test' may actually consist of some reasonably complex
2313 code, terminating in a test. */
2314
2315 if (optimize
2316 && needs_end_jump
2317 &&
2318 ! (GET_CODE (insn) == JUMP_INSN
2319 && GET_CODE (PATTERN (insn)) == SET
2320 && SET_DEST (PATTERN (insn)) == pc_rtx
2321 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2322 {
2323 int eh_regions = 0;
2324 int num_insns = 0;
2325 rtx last_test_insn = NULL_RTX;
2326
2327 /* Scan insns from the top of the loop looking for a qualified
2328 conditional exit. */
2329 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2330 insn = NEXT_INSN (insn))
2331 {
2332 if (GET_CODE (insn) == NOTE)
2333 {
2334 if (optimize < 2
2335 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2336 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2337 /* The code that actually moves the exit test will
2338 carefully leave BLOCK notes in their original
2339 location. That means, however, that we can't debug
2340 the exit test itself. So, we refuse to move code
2341 containing BLOCK notes at low optimization levels. */
2342 break;
2343
2344 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2345 ++eh_regions;
2346 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2347 {
2348 --eh_regions;
2349 if (eh_regions < 0)
2350 /* We've come to the end of an EH region, but
2351 never saw the beginning of that region. That
2352 means that an EH region begins before the top
2353 of the loop, and ends in the middle of it. The
2354 existence of such a situation violates a basic
2355 assumption in this code, since that would imply
2356 that even when EH_REGIONS is zero, we might
2357 move code out of an exception region. */
2358 abort ();
2359 }
2360
2361 /* We must not walk into a nested loop. */
2362 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2363 break;
2364
2365 /* We already know this INSN is a NOTE, so there's no
2366 point in looking at it to see if it's a JUMP. */
2367 continue;
2368 }
2369
2370 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2371 num_insns++;
2372
2373 if (last_test_insn && num_insns > 30)
2374 break;
2375
2376 if (eh_regions > 0)
2377 /* We don't want to move a partial EH region. Consider:
2378
2379 while ( ( { try {
2380 if (cond ()) 0;
2381 else {
2382 bar();
2383 1;
2384 }
2385 } catch (...) {
2386 1;
2387 } )) {
2388 body;
2389 }
2390
2391 This isn't legal C++, but here's what it's supposed to
2392 mean: if cond() is true, stop looping. Otherwise,
2393 call bar, and keep looping. In addition, if cond
2394 throws an exception, catch it and keep looping. Such
2395 constructs are certainy legal in LISP.
2396
2397 We should not move the `if (cond()) 0' test since then
2398 the EH-region for the try-block would be broken up.
2399 (In this case we would the EH_BEG note for the `try'
2400 and `if cond()' but not the call to bar() or the
2401 EH_END note.)
2402
2403 So we don't look for tests within an EH region. */
2404 continue;
2405
2406 if (GET_CODE (insn) == JUMP_INSN
2407 && GET_CODE (PATTERN (insn)) == SET
2408 && SET_DEST (PATTERN (insn)) == pc_rtx)
2409 {
2410 /* This is indeed a jump. */
2411 rtx dest1 = NULL_RTX;
2412 rtx dest2 = NULL_RTX;
2413 rtx potential_last_test;
2414 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2415 {
2416 /* A conditional jump. */
2417 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2418 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2419 potential_last_test = insn;
2420 }
2421 else
2422 {
2423 /* An unconditional jump. */
2424 dest1 = SET_SRC (PATTERN (insn));
2425 /* Include the BARRIER after the JUMP. */
2426 potential_last_test = NEXT_INSN (insn);
2427 }
2428
2429 do {
2430 if (dest1 && GET_CODE (dest1) == LABEL_REF
2431 && ((XEXP (dest1, 0)
2432 == loop_stack->data.loop.alt_end_label)
2433 || (XEXP (dest1, 0)
2434 == loop_stack->data.loop.end_label)))
2435 {
2436 last_test_insn = potential_last_test;
2437 break;
2438 }
2439
2440 /* If this was a conditional jump, there may be
2441 another label at which we should look. */
2442 dest1 = dest2;
2443 dest2 = NULL_RTX;
2444 } while (dest1);
2445 }
2446 }
2447
2448 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2449 {
2450 /* We found one. Move everything from there up
2451 to the end of the loop, and add a jump into the loop
2452 to jump to there. */
2453 register rtx newstart_label = gen_label_rtx ();
2454 register rtx start_move = start_label;
2455 rtx next_insn;
2456
2457 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2458 then we want to move this note also. */
2459 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2460 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2461 == NOTE_INSN_LOOP_CONT))
2462 start_move = PREV_INSN (start_move);
2463
2464 emit_label_after (newstart_label, PREV_INSN (start_move));
2465
2466 /* Actually move the insns. Start at the beginning, and
2467 keep copying insns until we've copied the
2468 last_test_insn. */
2469 for (insn = start_move; insn; insn = next_insn)
2470 {
2471 /* Figure out which insn comes after this one. We have
2472 to do this before we move INSN. */
2473 if (insn == last_test_insn)
2474 /* We've moved all the insns. */
2475 next_insn = NULL_RTX;
2476 else
2477 next_insn = NEXT_INSN (insn);
2478
2479 if (GET_CODE (insn) == NOTE
2480 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2481 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2482 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2483 NOTE_INSN_BLOCK_ENDs because the correct generation
2484 of debugging information depends on these appearing
2485 in the same order in the RTL and in the tree
2486 structure, where they are represented as BLOCKs.
2487 So, we don't move block notes. Of course, moving
2488 the code inside the block is likely to make it
2489 impossible to debug the instructions in the exit
2490 test, but such is the price of optimization. */
2491 continue;
2492
2493 /* Move the INSN. */
2494 reorder_insns (insn, insn, get_last_insn ());
2495 }
2496
2497 emit_jump_insn_after (gen_jump (start_label),
2498 PREV_INSN (newstart_label));
2499 emit_barrier_after (PREV_INSN (newstart_label));
2500 start_label = newstart_label;
2501 }
2502 }
2503
2504 if (needs_end_jump)
2505 {
2506 emit_jump (start_label);
2507 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2508 }
2509 emit_label (loop_stack->data.loop.end_label);
2510
2511 POPSTACK (loop_stack);
2512
2513 last_expr_type = 0;
2514 }
2515
2516 /* Generate a jump to the current loop's continue-point.
2517 This is usually the top of the loop, but may be specified
2518 explicitly elsewhere. If not currently inside a loop,
2519 return 0 and do nothing; caller will print an error message. */
2520
2521 int
2522 expand_continue_loop (whichloop)
2523 struct nesting *whichloop;
2524 {
2525 last_expr_type = 0;
2526 if (whichloop == 0)
2527 whichloop = loop_stack;
2528 if (whichloop == 0)
2529 return 0;
2530 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2531 NULL_RTX);
2532 return 1;
2533 }
2534
2535 /* Generate a jump to exit the current loop. If not currently inside a loop,
2536 return 0 and do nothing; caller will print an error message. */
2537
2538 int
2539 expand_exit_loop (whichloop)
2540 struct nesting *whichloop;
2541 {
2542 last_expr_type = 0;
2543 if (whichloop == 0)
2544 whichloop = loop_stack;
2545 if (whichloop == 0)
2546 return 0;
2547 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2548 return 1;
2549 }
2550
2551 /* Generate a conditional jump to exit the current loop if COND
2552 evaluates to zero. If not currently inside a loop,
2553 return 0 and do nothing; caller will print an error message. */
2554
2555 int
2556 expand_exit_loop_if_false (whichloop, cond)
2557 struct nesting *whichloop;
2558 tree cond;
2559 {
2560 rtx label = gen_label_rtx ();
2561 rtx last_insn;
2562 last_expr_type = 0;
2563
2564 if (whichloop == 0)
2565 whichloop = loop_stack;
2566 if (whichloop == 0)
2567 return 0;
2568 /* In order to handle fixups, we actually create a conditional jump
2569 around a unconditional branch to exit the loop. If fixups are
2570 necessary, they go before the unconditional branch. */
2571
2572
2573 do_jump (cond, NULL_RTX, label);
2574 last_insn = get_last_insn ();
2575 if (GET_CODE (last_insn) == CODE_LABEL)
2576 whichloop->data.loop.alt_end_label = last_insn;
2577 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2578 NULL_RTX);
2579 emit_label (label);
2580
2581 return 1;
2582 }
2583
2584 /* Return nonzero if the loop nest is empty. Else return zero. */
2585
2586 int
2587 stmt_loop_nest_empty ()
2588 {
2589 return (loop_stack == NULL);
2590 }
2591
2592 /* Return non-zero if we should preserve sub-expressions as separate
2593 pseudos. We never do so if we aren't optimizing. We always do so
2594 if -fexpensive-optimizations.
2595
2596 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2597 the loop may still be a small one. */
2598
2599 int
2600 preserve_subexpressions_p ()
2601 {
2602 rtx insn;
2603
2604 if (flag_expensive_optimizations)
2605 return 1;
2606
2607 if (optimize == 0 || current_function == 0 || loop_stack == 0)
2608 return 0;
2609
2610 insn = get_last_insn_anywhere ();
2611
2612 return (insn
2613 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2614 < n_non_fixed_regs * 3));
2615
2616 }
2617
2618 /* Generate a jump to exit the current loop, conditional, binding contour
2619 or case statement. Not all such constructs are visible to this function,
2620 only those started with EXIT_FLAG nonzero. Individual languages use
2621 the EXIT_FLAG parameter to control which kinds of constructs you can
2622 exit this way.
2623
2624 If not currently inside anything that can be exited,
2625 return 0 and do nothing; caller will print an error message. */
2626
2627 int
2628 expand_exit_something ()
2629 {
2630 struct nesting *n;
2631 last_expr_type = 0;
2632 for (n = nesting_stack; n; n = n->all)
2633 if (n->exit_label != 0)
2634 {
2635 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2636 return 1;
2637 }
2638
2639 return 0;
2640 }
2641 \f
2642 /* Generate RTL to return from the current function, with no value.
2643 (That is, we do not do anything about returning any value.) */
2644
2645 void
2646 expand_null_return ()
2647 {
2648 struct nesting *block = block_stack;
2649 rtx last_insn = 0;
2650
2651 /* Does any pending block have cleanups? */
2652
2653 while (block && block->data.block.cleanups == 0)
2654 block = block->next;
2655
2656 /* If yes, use a goto to return, since that runs cleanups. */
2657
2658 expand_null_return_1 (last_insn, block != 0);
2659 }
2660
2661 /* Generate RTL to return from the current function, with value VAL. */
2662
2663 static void
2664 expand_value_return (val)
2665 rtx val;
2666 {
2667 struct nesting *block = block_stack;
2668 rtx last_insn = get_last_insn ();
2669 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2670
2671 /* Copy the value to the return location
2672 unless it's already there. */
2673
2674 if (return_reg != val)
2675 {
2676 #ifdef PROMOTE_FUNCTION_RETURN
2677 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2678 int unsignedp = TREE_UNSIGNED (type);
2679 enum machine_mode mode
2680 = promote_mode (type, DECL_MODE (DECL_RESULT (current_function_decl)),
2681 &unsignedp, 1);
2682
2683 if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode)
2684 convert_move (return_reg, val, unsignedp);
2685 else
2686 #endif
2687 emit_move_insn (return_reg, val);
2688 }
2689 if (GET_CODE (return_reg) == REG
2690 && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2691 emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2692 /* Handle calls that return values in multiple non-contiguous locations.
2693 The Irix 6 ABI has examples of this. */
2694 else if (GET_CODE (return_reg) == PARALLEL)
2695 {
2696 int i;
2697
2698 for (i = 0; i < XVECLEN (return_reg, 0); i++)
2699 {
2700 rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2701
2702 if (GET_CODE (x) == REG
2703 && REGNO (x) < FIRST_PSEUDO_REGISTER)
2704 emit_insn (gen_rtx_USE (VOIDmode, x));
2705 }
2706 }
2707
2708 /* Does any pending block have cleanups? */
2709
2710 while (block && block->data.block.cleanups == 0)
2711 block = block->next;
2712
2713 /* If yes, use a goto to return, since that runs cleanups.
2714 Use LAST_INSN to put cleanups *before* the move insn emitted above. */
2715
2716 expand_null_return_1 (last_insn, block != 0);
2717 }
2718
2719 /* Output a return with no value. If LAST_INSN is nonzero,
2720 pretend that the return takes place after LAST_INSN.
2721 If USE_GOTO is nonzero then don't use a return instruction;
2722 go to the return label instead. This causes any cleanups
2723 of pending blocks to be executed normally. */
2724
2725 static void
2726 expand_null_return_1 (last_insn, use_goto)
2727 rtx last_insn;
2728 int use_goto;
2729 {
2730 rtx end_label = cleanup_label ? cleanup_label : return_label;
2731
2732 clear_pending_stack_adjust ();
2733 do_pending_stack_adjust ();
2734 last_expr_type = 0;
2735
2736 /* PCC-struct return always uses an epilogue. */
2737 if (current_function_returns_pcc_struct || use_goto)
2738 {
2739 if (end_label == 0)
2740 end_label = return_label = gen_label_rtx ();
2741 expand_goto_internal (NULL_TREE, end_label, last_insn);
2742 return;
2743 }
2744
2745 /* Otherwise output a simple return-insn if one is available,
2746 unless it won't do the job. */
2747 #ifdef HAVE_return
2748 if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2749 {
2750 emit_jump_insn (gen_return ());
2751 emit_barrier ();
2752 return;
2753 }
2754 #endif
2755
2756 /* Otherwise jump to the epilogue. */
2757 expand_goto_internal (NULL_TREE, end_label, last_insn);
2758 }
2759 \f
2760 /* Generate RTL to evaluate the expression RETVAL and return it
2761 from the current function. */
2762
2763 void
2764 expand_return (retval)
2765 tree retval;
2766 {
2767 /* If there are any cleanups to be performed, then they will
2768 be inserted following LAST_INSN. It is desirable
2769 that the last_insn, for such purposes, should be the
2770 last insn before computing the return value. Otherwise, cleanups
2771 which call functions can clobber the return value. */
2772 /* ??? rms: I think that is erroneous, because in C++ it would
2773 run destructors on variables that might be used in the subsequent
2774 computation of the return value. */
2775 rtx last_insn = 0;
2776 register rtx val = 0;
2777 register rtx op0;
2778 tree retval_rhs;
2779 int cleanups;
2780
2781 /* If function wants no value, give it none. */
2782 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2783 {
2784 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2785 emit_queue ();
2786 expand_null_return ();
2787 return;
2788 }
2789
2790 /* Are any cleanups needed? E.g. C++ destructors to be run? */
2791 /* This is not sufficient. We also need to watch for cleanups of the
2792 expression we are about to expand. Unfortunately, we cannot know
2793 if it has cleanups until we expand it, and we want to change how we
2794 expand it depending upon if we need cleanups. We can't win. */
2795 #if 0
2796 cleanups = any_pending_cleanups (1);
2797 #else
2798 cleanups = 1;
2799 #endif
2800
2801 if (TREE_CODE (retval) == RESULT_DECL)
2802 retval_rhs = retval;
2803 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2804 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2805 retval_rhs = TREE_OPERAND (retval, 1);
2806 else if (TREE_TYPE (retval) == void_type_node)
2807 /* Recognize tail-recursive call to void function. */
2808 retval_rhs = retval;
2809 else
2810 retval_rhs = NULL_TREE;
2811
2812 /* Only use `last_insn' if there are cleanups which must be run. */
2813 if (cleanups || cleanup_label != 0)
2814 last_insn = get_last_insn ();
2815
2816 /* Distribute return down conditional expr if either of the sides
2817 may involve tail recursion (see test below). This enhances the number
2818 of tail recursions we see. Don't do this always since it can produce
2819 sub-optimal code in some cases and we distribute assignments into
2820 conditional expressions when it would help. */
2821
2822 if (optimize && retval_rhs != 0
2823 && frame_offset == 0
2824 && TREE_CODE (retval_rhs) == COND_EXPR
2825 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2826 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2827 {
2828 rtx label = gen_label_rtx ();
2829 tree expr;
2830
2831 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2832 start_cleanup_deferral ();
2833 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2834 DECL_RESULT (current_function_decl),
2835 TREE_OPERAND (retval_rhs, 1));
2836 TREE_SIDE_EFFECTS (expr) = 1;
2837 expand_return (expr);
2838 emit_label (label);
2839
2840 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2841 DECL_RESULT (current_function_decl),
2842 TREE_OPERAND (retval_rhs, 2));
2843 TREE_SIDE_EFFECTS (expr) = 1;
2844 expand_return (expr);
2845 end_cleanup_deferral ();
2846 return;
2847 }
2848
2849 /* Attempt to optimize the call if it is tail recursive. */
2850 if (optimize_tail_recursion (retval_rhs, last_insn))
2851 return;
2852
2853 #ifdef HAVE_return
2854 /* This optimization is safe if there are local cleanups
2855 because expand_null_return takes care of them.
2856 ??? I think it should also be safe when there is a cleanup label,
2857 because expand_null_return takes care of them, too.
2858 Any reason why not? */
2859 if (HAVE_return && cleanup_label == 0
2860 && ! current_function_returns_pcc_struct
2861 && BRANCH_COST <= 1)
2862 {
2863 /* If this is return x == y; then generate
2864 if (x == y) return 1; else return 0;
2865 if we can do it with explicit return insns and branches are cheap,
2866 but not if we have the corresponding scc insn. */
2867 int has_scc = 0;
2868 if (retval_rhs)
2869 switch (TREE_CODE (retval_rhs))
2870 {
2871 case EQ_EXPR:
2872 #ifdef HAVE_seq
2873 has_scc = HAVE_seq;
2874 #endif
2875 case NE_EXPR:
2876 #ifdef HAVE_sne
2877 has_scc = HAVE_sne;
2878 #endif
2879 case GT_EXPR:
2880 #ifdef HAVE_sgt
2881 has_scc = HAVE_sgt;
2882 #endif
2883 case GE_EXPR:
2884 #ifdef HAVE_sge
2885 has_scc = HAVE_sge;
2886 #endif
2887 case LT_EXPR:
2888 #ifdef HAVE_slt
2889 has_scc = HAVE_slt;
2890 #endif
2891 case LE_EXPR:
2892 #ifdef HAVE_sle
2893 has_scc = HAVE_sle;
2894 #endif
2895 case TRUTH_ANDIF_EXPR:
2896 case TRUTH_ORIF_EXPR:
2897 case TRUTH_AND_EXPR:
2898 case TRUTH_OR_EXPR:
2899 case TRUTH_NOT_EXPR:
2900 case TRUTH_XOR_EXPR:
2901 if (! has_scc)
2902 {
2903 op0 = gen_label_rtx ();
2904 jumpifnot (retval_rhs, op0);
2905 expand_value_return (const1_rtx);
2906 emit_label (op0);
2907 expand_value_return (const0_rtx);
2908 return;
2909 }
2910 break;
2911
2912 default:
2913 break;
2914 }
2915 }
2916 #endif /* HAVE_return */
2917
2918 /* If the result is an aggregate that is being returned in one (or more)
2919 registers, load the registers here. The compiler currently can't handle
2920 copying a BLKmode value into registers. We could put this code in a
2921 more general area (for use by everyone instead of just function
2922 call/return), but until this feature is generally usable it is kept here
2923 (and in expand_call). The value must go into a pseudo in case there
2924 are cleanups that will clobber the real return register. */
2925
2926 if (retval_rhs != 0
2927 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2928 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2929 {
2930 int i, bitpos, xbitpos;
2931 int big_endian_correction = 0;
2932 int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2933 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2934 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2935 (unsigned int)BITS_PER_WORD);
2936 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2937 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2938 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2939 enum machine_mode tmpmode, result_reg_mode;
2940
2941 /* Structures whose size is not a multiple of a word are aligned
2942 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2943 machine, this means we must skip the empty high order bytes when
2944 calculating the bit offset. */
2945 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2946 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2947 * BITS_PER_UNIT));
2948
2949 /* Copy the structure BITSIZE bits at a time. */
2950 for (bitpos = 0, xbitpos = big_endian_correction;
2951 bitpos < bytes * BITS_PER_UNIT;
2952 bitpos += bitsize, xbitpos += bitsize)
2953 {
2954 /* We need a new destination pseudo each time xbitpos is
2955 on a word boundary and when xbitpos == big_endian_correction
2956 (the first time through). */
2957 if (xbitpos % BITS_PER_WORD == 0
2958 || xbitpos == big_endian_correction)
2959 {
2960 /* Generate an appropriate register. */
2961 dst = gen_reg_rtx (word_mode);
2962 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2963
2964 /* Clobber the destination before we move anything into it. */
2965 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2966 }
2967
2968 /* We need a new source operand each time bitpos is on a word
2969 boundary. */
2970 if (bitpos % BITS_PER_WORD == 0)
2971 src = operand_subword_force (result_val,
2972 bitpos / BITS_PER_WORD,
2973 BLKmode);
2974
2975 /* Use bitpos for the source extraction (left justified) and
2976 xbitpos for the destination store (right justified). */
2977 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2978 extract_bit_field (src, bitsize,
2979 bitpos % BITS_PER_WORD, 1,
2980 NULL_RTX, word_mode,
2981 word_mode,
2982 bitsize / BITS_PER_UNIT,
2983 BITS_PER_WORD),
2984 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2985 }
2986
2987 /* Find the smallest integer mode large enough to hold the
2988 entire structure and use that mode instead of BLKmode
2989 on the USE insn for the return register. */
2990 bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2991 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2992 tmpmode != VOIDmode;
2993 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2994 {
2995 /* Have we found a large enough mode? */
2996 if (GET_MODE_SIZE (tmpmode) >= bytes)
2997 break;
2998 }
2999
3000 /* No suitable mode found. */
3001 if (tmpmode == VOIDmode)
3002 abort ();
3003
3004 PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
3005
3006 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3007 result_reg_mode = word_mode;
3008 else
3009 result_reg_mode = tmpmode;
3010 result_reg = gen_reg_rtx (result_reg_mode);
3011
3012 emit_queue ();
3013 for (i = 0; i < n_regs; i++)
3014 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3015 result_pseudos[i]);
3016
3017 if (tmpmode != result_reg_mode)
3018 result_reg = gen_lowpart (tmpmode, result_reg);
3019
3020 expand_value_return (result_reg);
3021 }
3022 else if (cleanups
3023 && retval_rhs != 0
3024 && TREE_TYPE (retval_rhs) != void_type_node
3025 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
3026 {
3027 /* Calculate the return value into a pseudo reg. */
3028 val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
3029 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3030 val = force_not_mem (val);
3031 emit_queue ();
3032 /* Return the calculated value, doing cleanups first. */
3033 expand_value_return (val);
3034 }
3035 else
3036 {
3037 /* No cleanups or no hard reg used;
3038 calculate value into hard return reg. */
3039 expand_expr (retval, const0_rtx, VOIDmode, 0);
3040 emit_queue ();
3041 expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
3042 }
3043 }
3044
3045 /* Return 1 if the end of the generated RTX is not a barrier.
3046 This means code already compiled can drop through. */
3047
3048 int
3049 drop_through_at_end_p ()
3050 {
3051 rtx insn = get_last_insn ();
3052 while (insn && GET_CODE (insn) == NOTE)
3053 insn = PREV_INSN (insn);
3054 return insn && GET_CODE (insn) != BARRIER;
3055 }
3056 \f
3057 /* Test CALL_EXPR to determine if it is a potential tail recursion call
3058 and emit code to optimize the tail recursion. LAST_INSN indicates where
3059 to place the jump to the tail recursion label. Return TRUE if the
3060 call was optimized into a goto.
3061
3062 This is only used by expand_return, but expand_call is expected to
3063 use it soon. */
3064
3065 int
3066 optimize_tail_recursion (call_expr, last_insn)
3067 tree call_expr;
3068 rtx last_insn;
3069 {
3070 /* For tail-recursive call to current function,
3071 just jump back to the beginning.
3072 It's unsafe if any auto variable in this function
3073 has its address taken; for simplicity,
3074 require stack frame to be empty. */
3075 if (optimize && call_expr != 0
3076 && frame_offset == 0
3077 && TREE_CODE (call_expr) == CALL_EXPR
3078 && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
3079 && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
3080 /* Finish checking validity, and if valid emit code
3081 to set the argument variables for the new call. */
3082 && tail_recursion_args (TREE_OPERAND (call_expr, 1),
3083 DECL_ARGUMENTS (current_function_decl)))
3084 {
3085 if (tail_recursion_label == 0)
3086 {
3087 tail_recursion_label = gen_label_rtx ();
3088 emit_label_after (tail_recursion_label,
3089 tail_recursion_reentry);
3090 }
3091 emit_queue ();
3092 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3093 emit_barrier ();
3094 return 1;
3095 }
3096
3097 return 0;
3098 }
3099
3100 /* Emit code to alter this function's formal parms for a tail-recursive call.
3101 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3102 FORMALS is the chain of decls of formals.
3103 Return 1 if this can be done;
3104 otherwise return 0 and do not emit any code. */
3105
3106 static int
3107 tail_recursion_args (actuals, formals)
3108 tree actuals, formals;
3109 {
3110 register tree a = actuals, f = formals;
3111 register int i;
3112 register rtx *argvec;
3113
3114 /* Check that number and types of actuals are compatible
3115 with the formals. This is not always true in valid C code.
3116 Also check that no formal needs to be addressable
3117 and that all formals are scalars. */
3118
3119 /* Also count the args. */
3120
3121 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3122 {
3123 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3124 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3125 return 0;
3126 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3127 return 0;
3128 }
3129 if (a != 0 || f != 0)
3130 return 0;
3131
3132 /* Compute all the actuals. */
3133
3134 argvec = (rtx *) alloca (i * sizeof (rtx));
3135
3136 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3137 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3138
3139 /* Find which actual values refer to current values of previous formals.
3140 Copy each of them now, before any formal is changed. */
3141
3142 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3143 {
3144 int copy = 0;
3145 register int j;
3146 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3147 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3148 { copy = 1; break; }
3149 if (copy)
3150 argvec[i] = copy_to_reg (argvec[i]);
3151 }
3152
3153 /* Store the values of the actuals into the formals. */
3154
3155 for (f = formals, a = actuals, i = 0; f;
3156 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3157 {
3158 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3159 emit_move_insn (DECL_RTL (f), argvec[i]);
3160 else
3161 convert_move (DECL_RTL (f), argvec[i],
3162 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3163 }
3164
3165 free_temp_slots ();
3166 return 1;
3167 }
3168 \f
3169 /* Generate the RTL code for entering a binding contour.
3170 The variables are declared one by one, by calls to `expand_decl'.
3171
3172 EXIT_FLAG is nonzero if this construct should be visible to
3173 `exit_something'. */
3174
3175 void
3176 expand_start_bindings (exit_flag)
3177 int exit_flag;
3178 {
3179 struct nesting *thisblock = ALLOC_NESTING ();
3180 rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3181
3182 /* Make an entry on block_stack for the block we are entering. */
3183
3184 thisblock->next = block_stack;
3185 thisblock->all = nesting_stack;
3186 thisblock->depth = ++nesting_depth;
3187 thisblock->data.block.stack_level = 0;
3188 thisblock->data.block.cleanups = 0;
3189 thisblock->data.block.n_function_calls = 0;
3190 thisblock->data.block.exception_region = 0;
3191 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3192
3193 thisblock->data.block.conditional_code = 0;
3194 thisblock->data.block.last_unconditional_cleanup = note;
3195 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3196
3197 if (block_stack
3198 && !(block_stack->data.block.cleanups == NULL_TREE
3199 && block_stack->data.block.outer_cleanups == NULL_TREE))
3200 thisblock->data.block.outer_cleanups
3201 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3202 block_stack->data.block.outer_cleanups);
3203 else
3204 thisblock->data.block.outer_cleanups = 0;
3205 thisblock->data.block.label_chain = 0;
3206 thisblock->data.block.innermost_stack_block = stack_block_stack;
3207 thisblock->data.block.first_insn = note;
3208 thisblock->data.block.block_start_count = ++current_block_start_count;
3209 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3210 block_stack = thisblock;
3211 nesting_stack = thisblock;
3212
3213 /* Make a new level for allocating stack slots. */
3214 push_temp_slots ();
3215 }
3216
3217 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3218 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3219 expand_expr are made. After we end the region, we know that all
3220 space for all temporaries that were created by TARGET_EXPRs will be
3221 destroyed and their space freed for reuse. */
3222
3223 void
3224 expand_start_target_temps ()
3225 {
3226 /* This is so that even if the result is preserved, the space
3227 allocated will be freed, as we know that it is no longer in use. */
3228 push_temp_slots ();
3229
3230 /* Start a new binding layer that will keep track of all cleanup
3231 actions to be performed. */
3232 expand_start_bindings (0);
3233
3234 target_temp_slot_level = temp_slot_level;
3235 }
3236
3237 void
3238 expand_end_target_temps ()
3239 {
3240 expand_end_bindings (NULL_TREE, 0, 0);
3241
3242 /* This is so that even if the result is preserved, the space
3243 allocated will be freed, as we know that it is no longer in use. */
3244 pop_temp_slots ();
3245 }
3246
3247 /* Mark top block of block_stack as an implicit binding for an
3248 exception region. This is used to prevent infinite recursion when
3249 ending a binding with expand_end_bindings. It is only ever called
3250 by expand_eh_region_start, as that it the only way to create a
3251 block stack for a exception region. */
3252
3253 void
3254 mark_block_as_eh_region ()
3255 {
3256 block_stack->data.block.exception_region = 1;
3257 if (block_stack->next
3258 && block_stack->next->data.block.conditional_code)
3259 {
3260 block_stack->data.block.conditional_code
3261 = block_stack->next->data.block.conditional_code;
3262 block_stack->data.block.last_unconditional_cleanup
3263 = block_stack->next->data.block.last_unconditional_cleanup;
3264 block_stack->data.block.cleanup_ptr
3265 = block_stack->next->data.block.cleanup_ptr;
3266 }
3267 }
3268
3269 /* True if we are currently emitting insns in an area of output code
3270 that is controlled by a conditional expression. This is used by
3271 the cleanup handling code to generate conditional cleanup actions. */
3272
3273 int
3274 conditional_context ()
3275 {
3276 return block_stack && block_stack->data.block.conditional_code;
3277 }
3278
3279 /* Mark top block of block_stack as not for an implicit binding for an
3280 exception region. This is only ever done by expand_eh_region_end
3281 to let expand_end_bindings know that it is being called explicitly
3282 to end the binding layer for just the binding layer associated with
3283 the exception region, otherwise expand_end_bindings would try and
3284 end all implicit binding layers for exceptions regions, and then
3285 one normal binding layer. */
3286
3287 void
3288 mark_block_as_not_eh_region ()
3289 {
3290 block_stack->data.block.exception_region = 0;
3291 }
3292
3293 /* True if the top block of block_stack was marked as for an exception
3294 region by mark_block_as_eh_region. */
3295
3296 int
3297 is_eh_region ()
3298 {
3299 return (current_function && block_stack
3300 && block_stack->data.block.exception_region);
3301 }
3302
3303 /* Given a pointer to a BLOCK node, save a pointer to the most recently
3304 generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3305 BLOCK node. */
3306
3307 void
3308 remember_end_note (block)
3309 register tree block;
3310 {
3311 BLOCK_END_NOTE (block) = last_block_end_note;
3312 last_block_end_note = NULL_RTX;
3313 }
3314
3315 /* Emit a handler label for a nonlocal goto handler.
3316 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3317
3318 static rtx
3319 expand_nl_handler_label (slot, before_insn)
3320 rtx slot, before_insn;
3321 {
3322 rtx insns;
3323 rtx handler_label = gen_label_rtx ();
3324
3325 /* Don't let jump_optimize delete the handler. */
3326 LABEL_PRESERVE_P (handler_label) = 1;
3327
3328 start_sequence ();
3329 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3330 insns = get_insns ();
3331 end_sequence ();
3332 emit_insns_before (insns, before_insn);
3333
3334 emit_label (handler_label);
3335
3336 return handler_label;
3337 }
3338
3339 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3340 handler. */
3341 static void
3342 expand_nl_goto_receiver ()
3343 {
3344 #ifdef HAVE_nonlocal_goto
3345 if (! HAVE_nonlocal_goto)
3346 #endif
3347 /* First adjust our frame pointer to its actual value. It was
3348 previously set to the start of the virtual area corresponding to
3349 the stacked variables when we branched here and now needs to be
3350 adjusted to the actual hardware fp value.
3351
3352 Assignments are to virtual registers are converted by
3353 instantiate_virtual_regs into the corresponding assignment
3354 to the underlying register (fp in this case) that makes
3355 the original assignment true.
3356 So the following insn will actually be
3357 decrementing fp by STARTING_FRAME_OFFSET. */
3358 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3359
3360 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3361 if (fixed_regs[ARG_POINTER_REGNUM])
3362 {
3363 #ifdef ELIMINABLE_REGS
3364 /* If the argument pointer can be eliminated in favor of the
3365 frame pointer, we don't need to restore it. We assume here
3366 that if such an elimination is present, it can always be used.
3367 This is the case on all known machines; if we don't make this
3368 assumption, we do unnecessary saving on many machines. */
3369 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3370 size_t i;
3371
3372 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3373 if (elim_regs[i].from == ARG_POINTER_REGNUM
3374 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3375 break;
3376
3377 if (i == sizeof elim_regs / sizeof elim_regs [0])
3378 #endif
3379 {
3380 /* Now restore our arg pointer from the address at which it
3381 was saved in our stack frame.
3382 If there hasn't be space allocated for it yet, make
3383 some now. */
3384 if (arg_pointer_save_area == 0)
3385 arg_pointer_save_area
3386 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3387 emit_move_insn (virtual_incoming_args_rtx,
3388 /* We need a pseudo here, or else
3389 instantiate_virtual_regs_1 complains. */
3390 copy_to_reg (arg_pointer_save_area));
3391 }
3392 }
3393 #endif
3394
3395 #ifdef HAVE_nonlocal_goto_receiver
3396 if (HAVE_nonlocal_goto_receiver)
3397 emit_insn (gen_nonlocal_goto_receiver ());
3398 #endif
3399 }
3400
3401 /* Make handlers for nonlocal gotos taking place in the function calls in
3402 block THISBLOCK. */
3403
3404 static void
3405 expand_nl_goto_receivers (thisblock)
3406 struct nesting *thisblock;
3407 {
3408 tree link;
3409 rtx afterward = gen_label_rtx ();
3410 rtx insns, slot;
3411 rtx label_list;
3412 int any_invalid;
3413
3414 /* Record the handler address in the stack slot for that purpose,
3415 during this block, saving and restoring the outer value. */
3416 if (thisblock->next != 0)
3417 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3418 {
3419 rtx save_receiver = gen_reg_rtx (Pmode);
3420 emit_move_insn (XEXP (slot, 0), save_receiver);
3421
3422 start_sequence ();
3423 emit_move_insn (save_receiver, XEXP (slot, 0));
3424 insns = get_insns ();
3425 end_sequence ();
3426 emit_insns_before (insns, thisblock->data.block.first_insn);
3427 }
3428
3429 /* Jump around the handlers; they run only when specially invoked. */
3430 emit_jump (afterward);
3431
3432 /* Make a separate handler for each label. */
3433 link = nonlocal_labels;
3434 slot = nonlocal_goto_handler_slots;
3435 label_list = NULL_RTX;
3436 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3437 /* Skip any labels we shouldn't be able to jump to from here,
3438 we generate one special handler for all of them below which just calls
3439 abort. */
3440 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3441 {
3442 rtx lab;
3443 lab = expand_nl_handler_label (XEXP (slot, 0),
3444 thisblock->data.block.first_insn);
3445 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3446
3447 expand_nl_goto_receiver ();
3448
3449 /* Jump to the "real" nonlocal label. */
3450 expand_goto (TREE_VALUE (link));
3451 }
3452
3453 /* A second pass over all nonlocal labels; this time we handle those
3454 we should not be able to jump to at this point. */
3455 link = nonlocal_labels;
3456 slot = nonlocal_goto_handler_slots;
3457 any_invalid = 0;
3458 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3459 if (DECL_TOO_LATE (TREE_VALUE (link)))
3460 {
3461 rtx lab;
3462 lab = expand_nl_handler_label (XEXP (slot, 0),
3463 thisblock->data.block.first_insn);
3464 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3465 any_invalid = 1;
3466 }
3467
3468 if (any_invalid)
3469 {
3470 expand_nl_goto_receiver ();
3471 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3472 VOIDmode, 0);
3473 emit_barrier ();
3474 }
3475
3476 nonlocal_goto_handler_labels = label_list;
3477 emit_label (afterward);
3478 }
3479
3480 /* Generate RTL code to terminate a binding contour.
3481
3482 VARS is the chain of VAR_DECL nodes for the variables bound in this
3483 contour. There may actually be other nodes in this chain, but any
3484 nodes other than VAR_DECLS are ignored.
3485
3486 MARK_ENDS is nonzero if we should put a note at the beginning
3487 and end of this binding contour.
3488
3489 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3490 (That is true automatically if the contour has a saved stack level.) */
3491
3492 void
3493 expand_end_bindings (vars, mark_ends, dont_jump_in)
3494 tree vars;
3495 int mark_ends;
3496 int dont_jump_in;
3497 {
3498 register struct nesting *thisblock;
3499 register tree decl;
3500
3501 while (block_stack->data.block.exception_region)
3502 {
3503 /* Because we don't need or want a new temporary level and
3504 because we didn't create one in expand_eh_region_start,
3505 create a fake one now to avoid removing one in
3506 expand_end_bindings. */
3507 push_temp_slots ();
3508
3509 block_stack->data.block.exception_region = 0;
3510
3511 expand_end_bindings (NULL_TREE, 0, 0);
3512 }
3513
3514 /* Since expand_eh_region_start does an expand_start_bindings, we
3515 have to first end all the bindings that were created by
3516 expand_eh_region_start. */
3517
3518 thisblock = block_stack;
3519
3520 if (warn_unused)
3521 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3522 if (TREE_CODE (decl) == VAR_DECL
3523 && ! TREE_USED (decl)
3524 && ! DECL_IN_SYSTEM_HEADER (decl)
3525 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3526 warning_with_decl (decl, "unused variable `%s'");
3527
3528 if (thisblock->exit_label)
3529 {
3530 do_pending_stack_adjust ();
3531 emit_label (thisblock->exit_label);
3532 }
3533
3534 /* If necessary, make handlers for nonlocal gotos taking
3535 place in the function calls in this block. */
3536 if (function_call_count != thisblock->data.block.n_function_calls
3537 && nonlocal_labels
3538 /* Make handler for outermost block
3539 if there were any nonlocal gotos to this function. */
3540 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3541 /* Make handler for inner block if it has something
3542 special to do when you jump out of it. */
3543 : (thisblock->data.block.cleanups != 0
3544 || thisblock->data.block.stack_level != 0)))
3545 expand_nl_goto_receivers (thisblock);
3546
3547 /* Don't allow jumping into a block that has a stack level.
3548 Cleanups are allowed, though. */
3549 if (dont_jump_in
3550 || thisblock->data.block.stack_level != 0)
3551 {
3552 struct label_chain *chain;
3553
3554 /* Any labels in this block are no longer valid to go to.
3555 Mark them to cause an error message. */
3556 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3557 {
3558 DECL_TOO_LATE (chain->label) = 1;
3559 /* If any goto without a fixup came to this label,
3560 that must be an error, because gotos without fixups
3561 come from outside all saved stack-levels. */
3562 if (TREE_ADDRESSABLE (chain->label))
3563 error_with_decl (chain->label,
3564 "label `%s' used before containing binding contour");
3565 }
3566 }
3567
3568 /* Restore stack level in effect before the block
3569 (only if variable-size objects allocated). */
3570 /* Perform any cleanups associated with the block. */
3571
3572 if (thisblock->data.block.stack_level != 0
3573 || thisblock->data.block.cleanups != 0)
3574 {
3575 /* Only clean up here if this point can actually be reached. */
3576 int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3577
3578 /* Don't let cleanups affect ({...}) constructs. */
3579 int old_expr_stmts_for_value = expr_stmts_for_value;
3580 rtx old_last_expr_value = last_expr_value;
3581 tree old_last_expr_type = last_expr_type;
3582 expr_stmts_for_value = 0;
3583
3584 /* Do the cleanups. */
3585 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3586 if (reachable)
3587 do_pending_stack_adjust ();
3588
3589 expr_stmts_for_value = old_expr_stmts_for_value;
3590 last_expr_value = old_last_expr_value;
3591 last_expr_type = old_last_expr_type;
3592
3593 /* Restore the stack level. */
3594
3595 if (reachable && thisblock->data.block.stack_level != 0)
3596 {
3597 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3598 thisblock->data.block.stack_level, NULL_RTX);
3599 if (nonlocal_goto_handler_slots != 0)
3600 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3601 NULL_RTX);
3602 }
3603
3604 /* Any gotos out of this block must also do these things.
3605 Also report any gotos with fixups that came to labels in this
3606 level. */
3607 fixup_gotos (thisblock,
3608 thisblock->data.block.stack_level,
3609 thisblock->data.block.cleanups,
3610 thisblock->data.block.first_insn,
3611 dont_jump_in);
3612 }
3613
3614 /* Mark the beginning and end of the scope if requested.
3615 We do this now, after running cleanups on the variables
3616 just going out of scope, so they are in scope for their cleanups. */
3617
3618 if (mark_ends)
3619 last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3620 else
3621 /* Get rid of the beginning-mark if we don't make an end-mark. */
3622 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3623
3624 /* If doing stupid register allocation, make sure lives of all
3625 register variables declared here extend thru end of scope. */
3626
3627 if (obey_regdecls)
3628 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3629 if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3630 use_variable (DECL_RTL (decl));
3631
3632 /* Restore the temporary level of TARGET_EXPRs. */
3633 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3634
3635 /* Restore block_stack level for containing block. */
3636
3637 stack_block_stack = thisblock->data.block.innermost_stack_block;
3638 POPSTACK (block_stack);
3639
3640 /* Pop the stack slot nesting and free any slots at this level. */
3641 pop_temp_slots ();
3642 }
3643 \f
3644 /* Generate RTL for the automatic variable declaration DECL.
3645 (Other kinds of declarations are simply ignored if seen here.) */
3646
3647 void
3648 expand_decl (decl)
3649 register tree decl;
3650 {
3651 struct nesting *thisblock;
3652 tree type;
3653
3654 type = TREE_TYPE (decl);
3655
3656 /* Only automatic variables need any expansion done.
3657 Static and external variables, and external functions,
3658 will be handled by `assemble_variable' (called from finish_decl).
3659 TYPE_DECL and CONST_DECL require nothing.
3660 PARM_DECLs are handled in `assign_parms'. */
3661
3662 if (TREE_CODE (decl) != VAR_DECL)
3663 return;
3664 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3665 return;
3666
3667 thisblock = block_stack;
3668
3669 /* Create the RTL representation for the variable. */
3670
3671 if (type == error_mark_node)
3672 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3673 else if (DECL_SIZE (decl) == 0)
3674 /* Variable with incomplete type. */
3675 {
3676 if (DECL_INITIAL (decl) == 0)
3677 /* Error message was already done; now avoid a crash. */
3678 DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3679 else
3680 /* An initializer is going to decide the size of this array.
3681 Until we know the size, represent its address with a reg. */
3682 DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3683 MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3684 }
3685 else if (DECL_MODE (decl) != BLKmode
3686 /* If -ffloat-store, don't put explicit float vars
3687 into regs. */
3688 && !(flag_float_store
3689 && TREE_CODE (type) == REAL_TYPE)
3690 && ! TREE_THIS_VOLATILE (decl)
3691 && ! TREE_ADDRESSABLE (decl)
3692 && (DECL_REGISTER (decl) || ! obey_regdecls)
3693 /* if -fcheck-memory-usage, check all variables. */
3694 && ! current_function_check_memory_usage)
3695 {
3696 /* Automatic variable that can go in a register. */
3697 int unsignedp = TREE_UNSIGNED (type);
3698 enum machine_mode reg_mode
3699 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3700
3701 DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3702 mark_user_reg (DECL_RTL (decl));
3703
3704 if (POINTER_TYPE_P (type))
3705 mark_reg_pointer (DECL_RTL (decl),
3706 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3707 / BITS_PER_UNIT));
3708 }
3709
3710 else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3711 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3712 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3713 || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3714 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3715 {
3716 /* Variable of fixed size that goes on the stack. */
3717 rtx oldaddr = 0;
3718 rtx addr;
3719
3720 /* If we previously made RTL for this decl, it must be an array
3721 whose size was determined by the initializer.
3722 The old address was a register; set that register now
3723 to the proper address. */
3724 if (DECL_RTL (decl) != 0)
3725 {
3726 if (GET_CODE (DECL_RTL (decl)) != MEM
3727 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3728 abort ();
3729 oldaddr = XEXP (DECL_RTL (decl), 0);
3730 }
3731
3732 DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3733 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3734 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3735
3736 /* Set alignment we actually gave this decl. */
3737 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3738 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3739
3740 if (oldaddr)
3741 {
3742 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3743 if (addr != oldaddr)
3744 emit_move_insn (oldaddr, addr);
3745 }
3746
3747 /* If this is a memory ref that contains aggregate components,
3748 mark it as such for cse and loop optimize. */
3749 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3750 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3751 #if 0
3752 /* If this is in memory because of -ffloat-store,
3753 set the volatile bit, to prevent optimizations from
3754 undoing the effects. */
3755 if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3756 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3757 #endif
3758
3759 MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3760 }
3761 else
3762 /* Dynamic-size object: must push space on the stack. */
3763 {
3764 rtx address, size;
3765
3766 /* Record the stack pointer on entry to block, if have
3767 not already done so. */
3768 if (thisblock->data.block.stack_level == 0)
3769 {
3770 do_pending_stack_adjust ();
3771 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3772 &thisblock->data.block.stack_level,
3773 thisblock->data.block.first_insn);
3774 stack_block_stack = thisblock;
3775 }
3776
3777 /* Compute the variable's size, in bytes. */
3778 size = expand_expr (size_binop (CEIL_DIV_EXPR,
3779 DECL_SIZE (decl),
3780 size_int (BITS_PER_UNIT)),
3781 NULL_RTX, VOIDmode, 0);
3782 free_temp_slots ();
3783
3784 /* Allocate space on the stack for the variable. Note that
3785 DECL_ALIGN says how the variable is to be aligned and we
3786 cannot use it to conclude anything about the alignment of
3787 the size. */
3788 address = allocate_dynamic_stack_space (size, NULL_RTX,
3789 TYPE_ALIGN (TREE_TYPE (decl)));
3790
3791 /* Reference the variable indirect through that rtx. */
3792 DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3793
3794 /* If this is a memory ref that contains aggregate components,
3795 mark it as such for cse and loop optimize. */
3796 MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3797 AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3798
3799 /* Indicate the alignment we actually gave this variable. */
3800 #ifdef STACK_BOUNDARY
3801 DECL_ALIGN (decl) = STACK_BOUNDARY;
3802 #else
3803 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3804 #endif
3805 }
3806
3807 if (TREE_THIS_VOLATILE (decl))
3808 MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3809 #if 0 /* A variable is not necessarily unchanging
3810 just because it is const. RTX_UNCHANGING_P
3811 means no change in the function,
3812 not merely no change in the variable's scope.
3813 It is correct to set RTX_UNCHANGING_P if the variable's scope
3814 is the whole function. There's no convenient way to test that. */
3815 if (TREE_READONLY (decl))
3816 RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3817 #endif
3818
3819 /* If doing stupid register allocation, make sure life of any
3820 register variable starts here, at the start of its scope. */
3821
3822 if (obey_regdecls)
3823 use_variable (DECL_RTL (decl));
3824 }
3825
3826
3827 \f
3828 /* Emit code to perform the initialization of a declaration DECL. */
3829
3830 void
3831 expand_decl_init (decl)
3832 tree decl;
3833 {
3834 int was_used = TREE_USED (decl);
3835
3836 /* If this is a CONST_DECL, we don't have to generate any code, but
3837 if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3838 to be set while in the obstack containing the constant. If we don't
3839 do this, we can lose if we have functions nested three deep and the middle
3840 function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3841 the innermost function is the first to expand that STRING_CST. */
3842 if (TREE_CODE (decl) == CONST_DECL)
3843 {
3844 if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3845 expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3846 EXPAND_INITIALIZER);
3847 return;
3848 }
3849
3850 if (TREE_STATIC (decl))
3851 return;
3852
3853 /* Compute and store the initial value now. */
3854
3855 if (DECL_INITIAL (decl) == error_mark_node)
3856 {
3857 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3858
3859 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3860 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3861 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3862 0, 0);
3863 emit_queue ();
3864 }
3865 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3866 {
3867 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3868 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3869 emit_queue ();
3870 }
3871
3872 /* Don't let the initialization count as "using" the variable. */
3873 TREE_USED (decl) = was_used;
3874
3875 /* Free any temporaries we made while initializing the decl. */
3876 preserve_temp_slots (NULL_RTX);
3877 free_temp_slots ();
3878 }
3879
3880 /* CLEANUP is an expression to be executed at exit from this binding contour;
3881 for example, in C++, it might call the destructor for this variable.
3882
3883 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3884 CLEANUP multiple times, and have the correct semantics. This
3885 happens in exception handling, for gotos, returns, breaks that
3886 leave the current scope.
3887
3888 If CLEANUP is nonzero and DECL is zero, we record a cleanup
3889 that is not associated with any particular variable. */
3890
3891 int
3892 expand_decl_cleanup (decl, cleanup)
3893 tree decl, cleanup;
3894 {
3895 struct nesting *thisblock;
3896
3897 /* Error if we are not in any block. */
3898 if (current_function == 0 || block_stack == 0)
3899 return 0;
3900
3901 thisblock = block_stack;
3902
3903 /* Record the cleanup if there is one. */
3904
3905 if (cleanup != 0)
3906 {
3907 tree t;
3908 rtx seq;
3909 tree *cleanups = &thisblock->data.block.cleanups;
3910 int cond_context = conditional_context ();
3911
3912 if (cond_context)
3913 {
3914 rtx flag = gen_reg_rtx (word_mode);
3915 rtx set_flag_0;
3916 tree cond;
3917
3918 start_sequence ();
3919 emit_move_insn (flag, const0_rtx);
3920 set_flag_0 = get_insns ();
3921 end_sequence ();
3922
3923 thisblock->data.block.last_unconditional_cleanup
3924 = emit_insns_after (set_flag_0,
3925 thisblock->data.block.last_unconditional_cleanup);
3926
3927 emit_move_insn (flag, const1_rtx);
3928
3929 /* All cleanups must be on the function_obstack. */
3930 push_obstacks_nochange ();
3931 resume_temporary_allocation ();
3932
3933 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3934 DECL_RTL (cond) = flag;
3935
3936 /* Conditionalize the cleanup. */
3937 cleanup = build (COND_EXPR, void_type_node,
3938 truthvalue_conversion (cond),
3939 cleanup, integer_zero_node);
3940 cleanup = fold (cleanup);
3941
3942 pop_obstacks ();
3943
3944 cleanups = thisblock->data.block.cleanup_ptr;
3945 }
3946
3947 /* All cleanups must be on the function_obstack. */
3948 push_obstacks_nochange ();
3949 resume_temporary_allocation ();
3950 cleanup = unsave_expr (cleanup);
3951 pop_obstacks ();
3952
3953 t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3954
3955 if (! cond_context)
3956 /* If this block has a cleanup, it belongs in stack_block_stack. */
3957 stack_block_stack = thisblock;
3958
3959 if (cond_context)
3960 {
3961 start_sequence ();
3962 }
3963
3964 /* If this was optimized so that there is no exception region for the
3965 cleanup, then mark the TREE_LIST node, so that we can later tell
3966 if we need to call expand_eh_region_end. */
3967 if (! using_eh_for_cleanups_p
3968 || expand_eh_region_start_tree (decl, cleanup))
3969 TREE_ADDRESSABLE (t) = 1;
3970 /* If that started a new EH region, we're in a new block. */
3971 thisblock = block_stack;
3972
3973 if (cond_context)
3974 {
3975 seq = get_insns ();
3976 end_sequence ();
3977 if (seq)
3978 thisblock->data.block.last_unconditional_cleanup
3979 = emit_insns_after (seq,
3980 thisblock->data.block.last_unconditional_cleanup);
3981 }
3982 else
3983 {
3984 thisblock->data.block.last_unconditional_cleanup
3985 = get_last_insn ();
3986 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3987 }
3988 }
3989 return 1;
3990 }
3991
3992 /* Like expand_decl_cleanup, but suppress generating an exception handler
3993 to perform the cleanup. */
3994
3995 int
3996 expand_decl_cleanup_no_eh (decl, cleanup)
3997 tree decl, cleanup;
3998 {
3999 int save_eh = using_eh_for_cleanups_p;
4000 int result;
4001
4002 using_eh_for_cleanups_p = 0;
4003 result = expand_decl_cleanup (decl, cleanup);
4004 using_eh_for_cleanups_p = save_eh;
4005
4006 return result;
4007 }
4008
4009 /* Arrange for the top element of the dynamic cleanup chain to be
4010 popped if we exit the current binding contour. DECL is the
4011 associated declaration, if any, otherwise NULL_TREE. If the
4012 current contour is left via an exception, then __sjthrow will pop
4013 the top element off the dynamic cleanup chain. The code that
4014 avoids doing the action we push into the cleanup chain in the
4015 exceptional case is contained in expand_cleanups.
4016
4017 This routine is only used by expand_eh_region_start, and that is
4018 the only way in which an exception region should be started. This
4019 routine is only used when using the setjmp/longjmp codegen method
4020 for exception handling. */
4021
4022 int
4023 expand_dcc_cleanup (decl)
4024 tree decl;
4025 {
4026 struct nesting *thisblock;
4027 tree cleanup;
4028
4029 /* Error if we are not in any block. */
4030 if (current_function == 0 || block_stack == 0)
4031 return 0;
4032 thisblock = block_stack;
4033
4034 /* Record the cleanup for the dynamic handler chain. */
4035
4036 /* All cleanups must be on the function_obstack. */
4037 push_obstacks_nochange ();
4038 resume_temporary_allocation ();
4039 cleanup = make_node (POPDCC_EXPR);
4040 pop_obstacks ();
4041
4042 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4043 thisblock->data.block.cleanups
4044 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4045
4046 /* If this block has a cleanup, it belongs in stack_block_stack. */
4047 stack_block_stack = thisblock;
4048 return 1;
4049 }
4050
4051 /* Arrange for the top element of the dynamic handler chain to be
4052 popped if we exit the current binding contour. DECL is the
4053 associated declaration, if any, otherwise NULL_TREE. If the current
4054 contour is left via an exception, then __sjthrow will pop the top
4055 element off the dynamic handler chain. The code that avoids doing
4056 the action we push into the handler chain in the exceptional case
4057 is contained in expand_cleanups.
4058
4059 This routine is only used by expand_eh_region_start, and that is
4060 the only way in which an exception region should be started. This
4061 routine is only used when using the setjmp/longjmp codegen method
4062 for exception handling. */
4063
4064 int
4065 expand_dhc_cleanup (decl)
4066 tree decl;
4067 {
4068 struct nesting *thisblock;
4069 tree cleanup;
4070
4071 /* Error if we are not in any block. */
4072 if (current_function == 0 || block_stack == 0)
4073 return 0;
4074 thisblock = block_stack;
4075
4076 /* Record the cleanup for the dynamic handler chain. */
4077
4078 /* All cleanups must be on the function_obstack. */
4079 push_obstacks_nochange ();
4080 resume_temporary_allocation ();
4081 cleanup = make_node (POPDHC_EXPR);
4082 pop_obstacks ();
4083
4084 /* Add the cleanup in a manner similar to expand_decl_cleanup. */
4085 thisblock->data.block.cleanups
4086 = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
4087
4088 /* If this block has a cleanup, it belongs in stack_block_stack. */
4089 stack_block_stack = thisblock;
4090 return 1;
4091 }
4092 \f
4093 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4094 DECL_ELTS is the list of elements that belong to DECL's type.
4095 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4096
4097 void
4098 expand_anon_union_decl (decl, cleanup, decl_elts)
4099 tree decl, cleanup, decl_elts;
4100 {
4101 struct nesting *thisblock = current_function == 0 ? 0 : block_stack;
4102 rtx x;
4103
4104 expand_decl (decl);
4105 expand_decl_cleanup (decl, cleanup);
4106 x = DECL_RTL (decl);
4107
4108 while (decl_elts)
4109 {
4110 tree decl_elt = TREE_VALUE (decl_elts);
4111 tree cleanup_elt = TREE_PURPOSE (decl_elts);
4112 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4113
4114 /* Propagate the union's alignment to the elements. */
4115 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4116
4117 /* If the element has BLKmode and the union doesn't, the union is
4118 aligned such that the element doesn't need to have BLKmode, so
4119 change the element's mode to the appropriate one for its size. */
4120 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4121 DECL_MODE (decl_elt) = mode
4122 = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
4123 MODE_INT, 1);
4124
4125 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4126 instead create a new MEM rtx with the proper mode. */
4127 if (GET_CODE (x) == MEM)
4128 {
4129 if (mode == GET_MODE (x))
4130 DECL_RTL (decl_elt) = x;
4131 else
4132 {
4133 DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
4134 MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
4135 RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
4136 }
4137 }
4138 else if (GET_CODE (x) == REG)
4139 {
4140 if (mode == GET_MODE (x))
4141 DECL_RTL (decl_elt) = x;
4142 else
4143 DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
4144 }
4145 else
4146 abort ();
4147
4148 /* Record the cleanup if there is one. */
4149
4150 if (cleanup != 0)
4151 thisblock->data.block.cleanups
4152 = temp_tree_cons (decl_elt, cleanup_elt,
4153 thisblock->data.block.cleanups);
4154
4155 decl_elts = TREE_CHAIN (decl_elts);
4156 }
4157 }
4158 \f
4159 /* Expand a list of cleanups LIST.
4160 Elements may be expressions or may be nested lists.
4161
4162 If DONT_DO is nonnull, then any list-element
4163 whose TREE_PURPOSE matches DONT_DO is omitted.
4164 This is sometimes used to avoid a cleanup associated with
4165 a value that is being returned out of the scope.
4166
4167 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4168 goto and handle protection regions specially in that case.
4169
4170 If REACHABLE, we emit code, otherwise just inform the exception handling
4171 code about this finalization. */
4172
4173 static void
4174 expand_cleanups (list, dont_do, in_fixup, reachable)
4175 tree list;
4176 tree dont_do;
4177 int in_fixup;
4178 int reachable;
4179 {
4180 tree tail;
4181 for (tail = list; tail; tail = TREE_CHAIN (tail))
4182 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4183 {
4184 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4185 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4186 else
4187 {
4188 if (! in_fixup)
4189 {
4190 tree cleanup = TREE_VALUE (tail);
4191
4192 /* See expand_d{h,c}c_cleanup for why we avoid this. */
4193 if (TREE_CODE (cleanup) != POPDHC_EXPR
4194 && TREE_CODE (cleanup) != POPDCC_EXPR
4195 /* See expand_eh_region_start_tree for this case. */
4196 && ! TREE_ADDRESSABLE (tail))
4197 {
4198 cleanup = protect_with_terminate (cleanup);
4199 expand_eh_region_end (cleanup);
4200 }
4201 }
4202
4203 if (reachable)
4204 {
4205 /* Cleanups may be run multiple times. For example,
4206 when exiting a binding contour, we expand the
4207 cleanups associated with that contour. When a goto
4208 within that binding contour has a target outside that
4209 contour, it will expand all cleanups from its scope to
4210 the target. Though the cleanups are expanded multiple
4211 times, the control paths are non-overlapping so the
4212 cleanups will not be executed twice. */
4213
4214 /* We may need to protect fixups with rethrow regions. */
4215 int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4216
4217 if (protect)
4218 expand_fixup_region_start ();
4219
4220 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4221 if (protect)
4222 expand_fixup_region_end (TREE_VALUE (tail));
4223 free_temp_slots ();
4224 }
4225 }
4226 }
4227 }
4228
4229 /* Mark when the context we are emitting RTL for as a conditional
4230 context, so that any cleanup actions we register with
4231 expand_decl_init will be properly conditionalized when those
4232 cleanup actions are later performed. Must be called before any
4233 expression (tree) is expanded that is within a conditional context. */
4234
4235 void
4236 start_cleanup_deferral ()
4237 {
4238 /* block_stack can be NULL if we are inside the parameter list. It is
4239 OK to do nothing, because cleanups aren't possible here. */
4240 if (block_stack)
4241 ++block_stack->data.block.conditional_code;
4242 }
4243
4244 /* Mark the end of a conditional region of code. Because cleanup
4245 deferrals may be nested, we may still be in a conditional region
4246 after we end the currently deferred cleanups, only after we end all
4247 deferred cleanups, are we back in unconditional code. */
4248
4249 void
4250 end_cleanup_deferral ()
4251 {
4252 /* block_stack can be NULL if we are inside the parameter list. It is
4253 OK to do nothing, because cleanups aren't possible here. */
4254 if (block_stack)
4255 --block_stack->data.block.conditional_code;
4256 }
4257
4258 /* Move all cleanups from the current block_stack
4259 to the containing block_stack, where they are assumed to
4260 have been created. If anything can cause a temporary to
4261 be created, but not expanded for more than one level of
4262 block_stacks, then this code will have to change. */
4263
4264 void
4265 move_cleanups_up ()
4266 {
4267 struct nesting *block = block_stack;
4268 struct nesting *outer = block->next;
4269
4270 outer->data.block.cleanups
4271 = chainon (block->data.block.cleanups,
4272 outer->data.block.cleanups);
4273 block->data.block.cleanups = 0;
4274 }
4275
4276 tree
4277 last_cleanup_this_contour ()
4278 {
4279 if (block_stack == 0)
4280 return 0;
4281
4282 return block_stack->data.block.cleanups;
4283 }
4284
4285 /* Return 1 if there are any pending cleanups at this point.
4286 If THIS_CONTOUR is nonzero, check the current contour as well.
4287 Otherwise, look only at the contours that enclose this one. */
4288
4289 int
4290 any_pending_cleanups (this_contour)
4291 int this_contour;
4292 {
4293 struct nesting *block;
4294
4295 if (block_stack == 0)
4296 return 0;
4297
4298 if (this_contour && block_stack->data.block.cleanups != NULL)
4299 return 1;
4300 if (block_stack->data.block.cleanups == 0
4301 && block_stack->data.block.outer_cleanups == 0)
4302 return 0;
4303
4304 for (block = block_stack->next; block; block = block->next)
4305 if (block->data.block.cleanups != 0)
4306 return 1;
4307
4308 return 0;
4309 }
4310 \f
4311 /* Enter a case (Pascal) or switch (C) statement.
4312 Push a block onto case_stack and nesting_stack
4313 to accumulate the case-labels that are seen
4314 and to record the labels generated for the statement.
4315
4316 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4317 Otherwise, this construct is transparent for `exit_something'.
4318
4319 EXPR is the index-expression to be dispatched on.
4320 TYPE is its nominal type. We could simply convert EXPR to this type,
4321 but instead we take short cuts. */
4322
4323 void
4324 expand_start_case (exit_flag, expr, type, printname)
4325 int exit_flag;
4326 tree expr;
4327 tree type;
4328 const char *printname;
4329 {
4330 register struct nesting *thiscase = ALLOC_NESTING ();
4331
4332 /* Make an entry on case_stack for the case we are entering. */
4333
4334 thiscase->next = case_stack;
4335 thiscase->all = nesting_stack;
4336 thiscase->depth = ++nesting_depth;
4337 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4338 thiscase->data.case_stmt.case_list = 0;
4339 thiscase->data.case_stmt.index_expr = expr;
4340 thiscase->data.case_stmt.nominal_type = type;
4341 thiscase->data.case_stmt.default_label = 0;
4342 thiscase->data.case_stmt.num_ranges = 0;
4343 thiscase->data.case_stmt.printname = printname;
4344 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4345 case_stack = thiscase;
4346 nesting_stack = thiscase;
4347
4348 do_pending_stack_adjust ();
4349
4350 /* Make sure case_stmt.start points to something that won't
4351 need any transformation before expand_end_case. */
4352 if (GET_CODE (get_last_insn ()) != NOTE)
4353 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4354
4355 thiscase->data.case_stmt.start = get_last_insn ();
4356
4357 start_cleanup_deferral ();
4358 }
4359
4360
4361 /* Start a "dummy case statement" within which case labels are invalid
4362 and are not connected to any larger real case statement.
4363 This can be used if you don't want to let a case statement jump
4364 into the middle of certain kinds of constructs. */
4365
4366 void
4367 expand_start_case_dummy ()
4368 {
4369 register struct nesting *thiscase = ALLOC_NESTING ();
4370
4371 /* Make an entry on case_stack for the dummy. */
4372
4373 thiscase->next = case_stack;
4374 thiscase->all = nesting_stack;
4375 thiscase->depth = ++nesting_depth;
4376 thiscase->exit_label = 0;
4377 thiscase->data.case_stmt.case_list = 0;
4378 thiscase->data.case_stmt.start = 0;
4379 thiscase->data.case_stmt.nominal_type = 0;
4380 thiscase->data.case_stmt.default_label = 0;
4381 thiscase->data.case_stmt.num_ranges = 0;
4382 case_stack = thiscase;
4383 nesting_stack = thiscase;
4384 start_cleanup_deferral ();
4385 }
4386
4387 /* End a dummy case statement. */
4388
4389 void
4390 expand_end_case_dummy ()
4391 {
4392 end_cleanup_deferral ();
4393 POPSTACK (case_stack);
4394 }
4395
4396 /* Return the data type of the index-expression
4397 of the innermost case statement, or null if none. */
4398
4399 tree
4400 case_index_expr_type ()
4401 {
4402 if (case_stack)
4403 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4404 return 0;
4405 }
4406 \f
4407 static void
4408 check_seenlabel ()
4409 {
4410 /* If this is the first label, warn if any insns have been emitted. */
4411 if (case_stack->data.case_stmt.line_number_status >= 0)
4412 {
4413 rtx insn;
4414
4415 restore_line_number_status
4416 (case_stack->data.case_stmt.line_number_status);
4417 case_stack->data.case_stmt.line_number_status = -1;
4418
4419 for (insn = case_stack->data.case_stmt.start;
4420 insn;
4421 insn = NEXT_INSN (insn))
4422 {
4423 if (GET_CODE (insn) == CODE_LABEL)
4424 break;
4425 if (GET_CODE (insn) != NOTE
4426 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4427 {
4428 do
4429 insn = PREV_INSN (insn);
4430 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4431
4432 /* If insn is zero, then there must have been a syntax error. */
4433 if (insn)
4434 warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4435 NOTE_LINE_NUMBER(insn),
4436 "unreachable code at beginning of %s",
4437 case_stack->data.case_stmt.printname);
4438 break;
4439 }
4440 }
4441 }
4442 }
4443
4444 /* Accumulate one case or default label inside a case or switch statement.
4445 VALUE is the value of the case (a null pointer, for a default label).
4446 The function CONVERTER, when applied to arguments T and V,
4447 converts the value V to the type T.
4448
4449 If not currently inside a case or switch statement, return 1 and do
4450 nothing. The caller will print a language-specific error message.
4451 If VALUE is a duplicate or overlaps, return 2 and do nothing
4452 except store the (first) duplicate node in *DUPLICATE.
4453 If VALUE is out of range, return 3 and do nothing.
4454 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4455 Return 0 on success.
4456
4457 Extended to handle range statements. */
4458
4459 int
4460 pushcase (value, converter, label, duplicate)
4461 register tree value;
4462 tree (*converter) PROTO((tree, tree));
4463 register tree label;
4464 tree *duplicate;
4465 {
4466 tree index_type;
4467 tree nominal_type;
4468
4469 /* Fail if not inside a real case statement. */
4470 if (! (case_stack && case_stack->data.case_stmt.start))
4471 return 1;
4472
4473 if (stack_block_stack
4474 && stack_block_stack->depth > case_stack->depth)
4475 return 5;
4476
4477 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4478 nominal_type = case_stack->data.case_stmt.nominal_type;
4479
4480 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4481 if (index_type == error_mark_node)
4482 return 0;
4483
4484 /* Convert VALUE to the type in which the comparisons are nominally done. */
4485 if (value != 0)
4486 value = (*converter) (nominal_type, value);
4487
4488 check_seenlabel ();
4489
4490 /* Fail if this value is out of range for the actual type of the index
4491 (which may be narrower than NOMINAL_TYPE). */
4492 if (value != 0 && ! int_fits_type_p (value, index_type))
4493 return 3;
4494
4495 /* Fail if this is a duplicate or overlaps another entry. */
4496 if (value == 0)
4497 {
4498 if (case_stack->data.case_stmt.default_label != 0)
4499 {
4500 *duplicate = case_stack->data.case_stmt.default_label;
4501 return 2;
4502 }
4503 case_stack->data.case_stmt.default_label = label;
4504 }
4505 else
4506 return add_case_node (value, value, label, duplicate);
4507
4508 expand_label (label);
4509 return 0;
4510 }
4511
4512 /* Like pushcase but this case applies to all values between VALUE1 and
4513 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4514 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4515 starts at VALUE1 and ends at the highest value of the index type.
4516 If both are NULL, this case applies to all values.
4517
4518 The return value is the same as that of pushcase but there is one
4519 additional error code: 4 means the specified range was empty. */
4520
4521 int
4522 pushcase_range (value1, value2, converter, label, duplicate)
4523 register tree value1, value2;
4524 tree (*converter) PROTO((tree, tree));
4525 register tree label;
4526 tree *duplicate;
4527 {
4528 tree index_type;
4529 tree nominal_type;
4530
4531 /* Fail if not inside a real case statement. */
4532 if (! (case_stack && case_stack->data.case_stmt.start))
4533 return 1;
4534
4535 if (stack_block_stack
4536 && stack_block_stack->depth > case_stack->depth)
4537 return 5;
4538
4539 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4540 nominal_type = case_stack->data.case_stmt.nominal_type;
4541
4542 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4543 if (index_type == error_mark_node)
4544 return 0;
4545
4546 check_seenlabel ();
4547
4548 /* Convert VALUEs to type in which the comparisons are nominally done
4549 and replace any unspecified value with the corresponding bound. */
4550 if (value1 == 0)
4551 value1 = TYPE_MIN_VALUE (index_type);
4552 if (value2 == 0)
4553 value2 = TYPE_MAX_VALUE (index_type);
4554
4555 /* Fail if the range is empty. Do this before any conversion since
4556 we want to allow out-of-range empty ranges. */
4557 if (value2 && tree_int_cst_lt (value2, value1))
4558 return 4;
4559
4560 value1 = (*converter) (nominal_type, value1);
4561
4562 /* If the max was unbounded, use the max of the nominal_type we are
4563 converting to. Do this after the < check above to suppress false
4564 positives. */
4565 if (!value2)
4566 value2 = TYPE_MAX_VALUE (nominal_type);
4567 value2 = (*converter) (nominal_type, value2);
4568
4569 /* Fail if these values are out of range. */
4570 if (TREE_CONSTANT_OVERFLOW (value1)
4571 || ! int_fits_type_p (value1, index_type))
4572 return 3;
4573
4574 if (TREE_CONSTANT_OVERFLOW (value2)
4575 || ! int_fits_type_p (value2, index_type))
4576 return 3;
4577
4578 return add_case_node (value1, value2, label, duplicate);
4579 }
4580
4581 /* Do the actual insertion of a case label for pushcase and pushcase_range
4582 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4583 slowdown for large switch statements. */
4584
4585 static int
4586 add_case_node (low, high, label, duplicate)
4587 tree low, high;
4588 tree label;
4589 tree *duplicate;
4590 {
4591 struct case_node *p, **q, *r;
4592
4593 q = &case_stack->data.case_stmt.case_list;
4594 p = *q;
4595
4596 while ((r = *q))
4597 {
4598 p = r;
4599
4600 /* Keep going past elements distinctly greater than HIGH. */
4601 if (tree_int_cst_lt (high, p->low))
4602 q = &p->left;
4603
4604 /* or distinctly less than LOW. */
4605 else if (tree_int_cst_lt (p->high, low))
4606 q = &p->right;
4607
4608 else
4609 {
4610 /* We have an overlap; this is an error. */
4611 *duplicate = p->code_label;
4612 return 2;
4613 }
4614 }
4615
4616 /* Add this label to the chain, and succeed.
4617 Copy LOW, HIGH so they are on temporary rather than momentary
4618 obstack and will thus survive till the end of the case statement. */
4619
4620 r = (struct case_node *) oballoc (sizeof (struct case_node));
4621 r->low = copy_node (low);
4622
4623 /* If the bounds are equal, turn this into the one-value case. */
4624
4625 if (tree_int_cst_equal (low, high))
4626 r->high = r->low;
4627 else
4628 {
4629 r->high = copy_node (high);
4630 case_stack->data.case_stmt.num_ranges++;
4631 }
4632
4633 r->code_label = label;
4634 expand_label (label);
4635
4636 *q = r;
4637 r->parent = p;
4638 r->left = 0;
4639 r->right = 0;
4640 r->balance = 0;
4641
4642 while (p)
4643 {
4644 struct case_node *s;
4645
4646 if (r == p->left)
4647 {
4648 int b;
4649
4650 if (! (b = p->balance))
4651 /* Growth propagation from left side. */
4652 p->balance = -1;
4653 else if (b < 0)
4654 {
4655 if (r->balance < 0)
4656 {
4657 /* R-Rotation */
4658 if ((p->left = s = r->right))
4659 s->parent = p;
4660
4661 r->right = p;
4662 p->balance = 0;
4663 r->balance = 0;
4664 s = p->parent;
4665 p->parent = r;
4666
4667 if ((r->parent = s))
4668 {
4669 if (s->left == p)
4670 s->left = r;
4671 else
4672 s->right = r;
4673 }
4674 else
4675 case_stack->data.case_stmt.case_list = r;
4676 }
4677 else
4678 /* r->balance == +1 */
4679 {
4680 /* LR-Rotation */
4681
4682 int b2;
4683 struct case_node *t = r->right;
4684
4685 if ((p->left = s = t->right))
4686 s->parent = p;
4687
4688 t->right = p;
4689 if ((r->right = s = t->left))
4690 s->parent = r;
4691
4692 t->left = r;
4693 b = t->balance;
4694 b2 = b < 0;
4695 p->balance = b2;
4696 b2 = -b2 - b;
4697 r->balance = b2;
4698 t->balance = 0;
4699 s = p->parent;
4700 p->parent = t;
4701 r->parent = t;
4702
4703 if ((t->parent = s))
4704 {
4705 if (s->left == p)
4706 s->left = t;
4707 else
4708 s->right = t;
4709 }
4710 else
4711 case_stack->data.case_stmt.case_list = t;
4712 }
4713 break;
4714 }
4715
4716 else
4717 {
4718 /* p->balance == +1; growth of left side balances the node. */
4719 p->balance = 0;
4720 break;
4721 }
4722 }
4723 else
4724 /* r == p->right */
4725 {
4726 int b;
4727
4728 if (! (b = p->balance))
4729 /* Growth propagation from right side. */
4730 p->balance++;
4731 else if (b > 0)
4732 {
4733 if (r->balance > 0)
4734 {
4735 /* L-Rotation */
4736
4737 if ((p->right = s = r->left))
4738 s->parent = p;
4739
4740 r->left = p;
4741 p->balance = 0;
4742 r->balance = 0;
4743 s = p->parent;
4744 p->parent = r;
4745 if ((r->parent = s))
4746 {
4747 if (s->left == p)
4748 s->left = r;
4749 else
4750 s->right = r;
4751 }
4752
4753 else
4754 case_stack->data.case_stmt.case_list = r;
4755 }
4756
4757 else
4758 /* r->balance == -1 */
4759 {
4760 /* RL-Rotation */
4761 int b2;
4762 struct case_node *t = r->left;
4763
4764 if ((p->right = s = t->left))
4765 s->parent = p;
4766
4767 t->left = p;
4768
4769 if ((r->left = s = t->right))
4770 s->parent = r;
4771
4772 t->right = r;
4773 b = t->balance;
4774 b2 = b < 0;
4775 r->balance = b2;
4776 b2 = -b2 - b;
4777 p->balance = b2;
4778 t->balance = 0;
4779 s = p->parent;
4780 p->parent = t;
4781 r->parent = t;
4782
4783 if ((t->parent = s))
4784 {
4785 if (s->left == p)
4786 s->left = t;
4787 else
4788 s->right = t;
4789 }
4790
4791 else
4792 case_stack->data.case_stmt.case_list = t;
4793 }
4794 break;
4795 }
4796 else
4797 {
4798 /* p->balance == -1; growth of right side balances the node. */
4799 p->balance = 0;
4800 break;
4801 }
4802 }
4803
4804 r = p;
4805 p = p->parent;
4806 }
4807
4808 return 0;
4809 }
4810
4811 \f
4812 /* Returns the number of possible values of TYPE.
4813 Returns -1 if the number is unknown or variable.
4814 Returns -2 if the number does not fit in a HOST_WIDE_INT.
4815 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4816 do not increase monotonically (there may be duplicates);
4817 to 1 if the values increase monotonically, but not always by 1;
4818 otherwise sets it to 0. */
4819
4820 HOST_WIDE_INT
4821 all_cases_count (type, spareness)
4822 tree type;
4823 int *spareness;
4824 {
4825 HOST_WIDE_INT count;
4826 *spareness = 0;
4827
4828 switch (TREE_CODE (type))
4829 {
4830 tree t;
4831 case BOOLEAN_TYPE:
4832 count = 2;
4833 break;
4834 case CHAR_TYPE:
4835 count = 1 << BITS_PER_UNIT;
4836 break;
4837 default:
4838 case INTEGER_TYPE:
4839 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4840 || TYPE_MAX_VALUE (type) == NULL
4841 || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4842 return -1;
4843 else
4844 {
4845 /* count
4846 = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4847 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4848 but with overflow checking. */
4849 tree mint = TYPE_MIN_VALUE (type);
4850 tree maxt = TYPE_MAX_VALUE (type);
4851 HOST_WIDE_INT lo, hi;
4852 neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4853 &lo, &hi);
4854 add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4855 lo, hi, &lo, &hi);
4856 add_double (lo, hi, 1, 0, &lo, &hi);
4857 if (hi != 0 || lo < 0)
4858 return -2;
4859 count = lo;
4860 }
4861 break;
4862 case ENUMERAL_TYPE:
4863 count = 0;
4864 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4865 {
4866 if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4867 || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4868 || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4869 != TREE_INT_CST_LOW (TREE_VALUE (t)))
4870 *spareness = 1;
4871 count++;
4872 }
4873 if (*spareness == 1)
4874 {
4875 tree prev = TREE_VALUE (TYPE_VALUES (type));
4876 for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4877 {
4878 if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4879 {
4880 *spareness = 2;
4881 break;
4882 }
4883 prev = TREE_VALUE (t);
4884 }
4885
4886 }
4887 }
4888 return count;
4889 }
4890
4891
4892 #define BITARRAY_TEST(ARRAY, INDEX) \
4893 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4894 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4895 #define BITARRAY_SET(ARRAY, INDEX) \
4896 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4897 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4898
4899 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4900 with the case values we have seen, assuming the case expression
4901 has the given TYPE.
4902 SPARSENESS is as determined by all_cases_count.
4903
4904 The time needed is proportional to COUNT, unless
4905 SPARSENESS is 2, in which case quadratic time is needed. */
4906
4907 void
4908 mark_seen_cases (type, cases_seen, count, sparseness)
4909 tree type;
4910 unsigned char *cases_seen;
4911 long count;
4912 int sparseness;
4913 {
4914 tree next_node_to_try = NULL_TREE;
4915 long next_node_offset = 0;
4916
4917 register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4918 tree val = make_node (INTEGER_CST);
4919 TREE_TYPE (val) = type;
4920 if (! root)
4921 ; /* Do nothing */
4922 else if (sparseness == 2)
4923 {
4924 tree t;
4925 HOST_WIDE_INT xlo;
4926
4927 /* This less efficient loop is only needed to handle
4928 duplicate case values (multiple enum constants
4929 with the same value). */
4930 TREE_TYPE (val) = TREE_TYPE (root->low);
4931 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4932 t = TREE_CHAIN (t), xlo++)
4933 {
4934 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4935 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4936 n = root;
4937 do
4938 {
4939 /* Keep going past elements distinctly greater than VAL. */
4940 if (tree_int_cst_lt (val, n->low))
4941 n = n->left;
4942
4943 /* or distinctly less than VAL. */
4944 else if (tree_int_cst_lt (n->high, val))
4945 n = n->right;
4946
4947 else
4948 {
4949 /* We have found a matching range. */
4950 BITARRAY_SET (cases_seen, xlo);
4951 break;
4952 }
4953 }
4954 while (n);
4955 }
4956 }
4957 else
4958 {
4959 if (root->left)
4960 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4961 for (n = root; n; n = n->right)
4962 {
4963 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4964 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4965 while ( ! tree_int_cst_lt (n->high, val))
4966 {
4967 /* Calculate (into xlo) the "offset" of the integer (val).
4968 The element with lowest value has offset 0, the next smallest
4969 element has offset 1, etc. */
4970
4971 HOST_WIDE_INT xlo, xhi;
4972 tree t;
4973 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4974 {
4975 /* The TYPE_VALUES will be in increasing order, so
4976 starting searching where we last ended. */
4977 t = next_node_to_try;
4978 xlo = next_node_offset;
4979 xhi = 0;
4980 for (;;)
4981 {
4982 if (t == NULL_TREE)
4983 {
4984 t = TYPE_VALUES (type);
4985 xlo = 0;
4986 }
4987 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4988 {
4989 next_node_to_try = TREE_CHAIN (t);
4990 next_node_offset = xlo + 1;
4991 break;
4992 }
4993 xlo++;
4994 t = TREE_CHAIN (t);
4995 if (t == next_node_to_try)
4996 {
4997 xlo = -1;
4998 break;
4999 }
5000 }
5001 }
5002 else
5003 {
5004 t = TYPE_MIN_VALUE (type);
5005 if (t)
5006 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5007 &xlo, &xhi);
5008 else
5009 xlo = xhi = 0;
5010 add_double (xlo, xhi,
5011 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5012 &xlo, &xhi);
5013 }
5014
5015 if (xhi == 0 && xlo >= 0 && xlo < count)
5016 BITARRAY_SET (cases_seen, xlo);
5017 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5018 1, 0,
5019 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5020 }
5021 }
5022 }
5023 }
5024
5025 /* Called when the index of a switch statement is an enumerated type
5026 and there is no default label.
5027
5028 Checks that all enumeration literals are covered by the case
5029 expressions of a switch. Also, warn if there are any extra
5030 switch cases that are *not* elements of the enumerated type.
5031
5032 If all enumeration literals were covered by the case expressions,
5033 turn one of the expressions into the default expression since it should
5034 not be possible to fall through such a switch. */
5035
5036 void
5037 check_for_full_enumeration_handling (type)
5038 tree type;
5039 {
5040 register struct case_node *n;
5041 register tree chain;
5042 #if 0 /* variable used by 'if 0'ed code below. */
5043 register struct case_node **l;
5044 int all_values = 1;
5045 #endif
5046
5047 /* True iff the selector type is a numbered set mode. */
5048 int sparseness = 0;
5049
5050 /* The number of possible selector values. */
5051 HOST_WIDE_INT size;
5052
5053 /* For each possible selector value. a one iff it has been matched
5054 by a case value alternative. */
5055 unsigned char *cases_seen;
5056
5057 /* The allocated size of cases_seen, in chars. */
5058 long bytes_needed;
5059
5060 if (! warn_switch)
5061 return;
5062
5063 size = all_cases_count (type, &sparseness);
5064 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5065
5066 if (size > 0 && size < 600000
5067 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5068 this optimization if we don't have enough memory rather than
5069 aborting, as xmalloc would do. */
5070 && (cases_seen = (unsigned char *) calloc (bytes_needed, 1)) != NULL)
5071 {
5072 long i;
5073 tree v = TYPE_VALUES (type);
5074
5075 /* The time complexity of this code is normally O(N), where
5076 N being the number of members in the enumerated type.
5077 However, if type is a ENUMERAL_TYPE whose values do not
5078 increase monotonically, O(N*log(N)) time may be needed. */
5079
5080 mark_seen_cases (type, cases_seen, size, sparseness);
5081
5082 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5083 {
5084 if (BITARRAY_TEST(cases_seen, i) == 0)
5085 warning ("enumeration value `%s' not handled in switch",
5086 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5087 }
5088
5089 free (cases_seen);
5090 }
5091
5092 /* Now we go the other way around; we warn if there are case
5093 expressions that don't correspond to enumerators. This can
5094 occur since C and C++ don't enforce type-checking of
5095 assignments to enumeration variables. */
5096
5097 if (case_stack->data.case_stmt.case_list
5098 && case_stack->data.case_stmt.case_list->left)
5099 case_stack->data.case_stmt.case_list
5100 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5101 if (warn_switch)
5102 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5103 {
5104 for (chain = TYPE_VALUES (type);
5105 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5106 chain = TREE_CHAIN (chain))
5107 ;
5108
5109 if (!chain)
5110 {
5111 if (TYPE_NAME (type) == 0)
5112 warning ("case value `%ld' not in enumerated type",
5113 (long) TREE_INT_CST_LOW (n->low));
5114 else
5115 warning ("case value `%ld' not in enumerated type `%s'",
5116 (long) TREE_INT_CST_LOW (n->low),
5117 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5118 == IDENTIFIER_NODE)
5119 ? TYPE_NAME (type)
5120 : DECL_NAME (TYPE_NAME (type))));
5121 }
5122 if (!tree_int_cst_equal (n->low, n->high))
5123 {
5124 for (chain = TYPE_VALUES (type);
5125 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5126 chain = TREE_CHAIN (chain))
5127 ;
5128
5129 if (!chain)
5130 {
5131 if (TYPE_NAME (type) == 0)
5132 warning ("case value `%ld' not in enumerated type",
5133 (long) TREE_INT_CST_LOW (n->high));
5134 else
5135 warning ("case value `%ld' not in enumerated type `%s'",
5136 (long) TREE_INT_CST_LOW (n->high),
5137 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5138 == IDENTIFIER_NODE)
5139 ? TYPE_NAME (type)
5140 : DECL_NAME (TYPE_NAME (type))));
5141 }
5142 }
5143 }
5144
5145 #if 0
5146 /* ??? This optimization is disabled because it causes valid programs to
5147 fail. ANSI C does not guarantee that an expression with enum type
5148 will have a value that is the same as one of the enumeration literals. */
5149
5150 /* If all values were found as case labels, make one of them the default
5151 label. Thus, this switch will never fall through. We arbitrarily pick
5152 the last one to make the default since this is likely the most
5153 efficient choice. */
5154
5155 if (all_values)
5156 {
5157 for (l = &case_stack->data.case_stmt.case_list;
5158 (*l)->right != 0;
5159 l = &(*l)->right)
5160 ;
5161
5162 case_stack->data.case_stmt.default_label = (*l)->code_label;
5163 *l = 0;
5164 }
5165 #endif /* 0 */
5166 }
5167
5168 \f
5169 /* Terminate a case (Pascal) or switch (C) statement
5170 in which ORIG_INDEX is the expression to be tested.
5171 Generate the code to test it and jump to the right place. */
5172
5173 void
5174 expand_end_case (orig_index)
5175 tree orig_index;
5176 {
5177 tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
5178 rtx default_label = 0;
5179 register struct case_node *n;
5180 unsigned int count;
5181 rtx index;
5182 rtx table_label;
5183 int ncases;
5184 rtx *labelvec;
5185 register int i;
5186 rtx before_case;
5187 register struct nesting *thiscase = case_stack;
5188 tree index_expr, index_type;
5189 int unsignedp;
5190
5191 table_label = gen_label_rtx ();
5192 index_expr = thiscase->data.case_stmt.index_expr;
5193 index_type = TREE_TYPE (index_expr);
5194 unsignedp = TREE_UNSIGNED (index_type);
5195
5196 do_pending_stack_adjust ();
5197
5198 /* This might get an spurious warning in the presence of a syntax error;
5199 it could be fixed by moving the call to check_seenlabel after the
5200 check for error_mark_node, and copying the code of check_seenlabel that
5201 deals with case_stack->data.case_stmt.line_number_status /
5202 restore_line_number_status in front of the call to end_cleanup_deferral;
5203 However, this might miss some useful warnings in the presence of
5204 non-syntax errors. */
5205 check_seenlabel ();
5206
5207 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5208 if (index_type != error_mark_node)
5209 {
5210 /* If switch expression was an enumerated type, check that all
5211 enumeration literals are covered by the cases.
5212 No sense trying this if there's a default case, however. */
5213
5214 if (!thiscase->data.case_stmt.default_label
5215 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5216 && TREE_CODE (index_expr) != INTEGER_CST)
5217 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5218
5219 /* If we don't have a default-label, create one here,
5220 after the body of the switch. */
5221 if (thiscase->data.case_stmt.default_label == 0)
5222 {
5223 thiscase->data.case_stmt.default_label
5224 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5225 expand_label (thiscase->data.case_stmt.default_label);
5226 }
5227 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5228
5229 before_case = get_last_insn ();
5230
5231 if (thiscase->data.case_stmt.case_list
5232 && thiscase->data.case_stmt.case_list->left)
5233 thiscase->data.case_stmt.case_list
5234 = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5235
5236 /* Simplify the case-list before we count it. */
5237 group_case_nodes (thiscase->data.case_stmt.case_list);
5238
5239 /* Get upper and lower bounds of case values.
5240 Also convert all the case values to the index expr's data type. */
5241
5242 count = 0;
5243 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5244 {
5245 /* Check low and high label values are integers. */
5246 if (TREE_CODE (n->low) != INTEGER_CST)
5247 abort ();
5248 if (TREE_CODE (n->high) != INTEGER_CST)
5249 abort ();
5250
5251 n->low = convert (index_type, n->low);
5252 n->high = convert (index_type, n->high);
5253
5254 /* Count the elements and track the largest and smallest
5255 of them (treating them as signed even if they are not). */
5256 if (count++ == 0)
5257 {
5258 minval = n->low;
5259 maxval = n->high;
5260 }
5261 else
5262 {
5263 if (INT_CST_LT (n->low, minval))
5264 minval = n->low;
5265 if (INT_CST_LT (maxval, n->high))
5266 maxval = n->high;
5267 }
5268 /* A range counts double, since it requires two compares. */
5269 if (! tree_int_cst_equal (n->low, n->high))
5270 count++;
5271 }
5272
5273 orig_minval = minval;
5274
5275 /* Compute span of values. */
5276 if (count != 0)
5277 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5278
5279 end_cleanup_deferral ();
5280
5281 if (count == 0)
5282 {
5283 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5284 emit_queue ();
5285 emit_jump (default_label);
5286 }
5287
5288 /* If range of values is much bigger than number of values,
5289 make a sequence of conditional branches instead of a dispatch.
5290 If the switch-index is a constant, do it this way
5291 because we can optimize it. */
5292
5293 #ifndef CASE_VALUES_THRESHOLD
5294 #ifdef HAVE_casesi
5295 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5296 #else
5297 /* If machine does not have a case insn that compares the
5298 bounds, this means extra overhead for dispatch tables
5299 which raises the threshold for using them. */
5300 #define CASE_VALUES_THRESHOLD 5
5301 #endif /* HAVE_casesi */
5302 #endif /* CASE_VALUES_THRESHOLD */
5303
5304 else if (TREE_INT_CST_HIGH (range) != 0
5305 || count < (unsigned int) CASE_VALUES_THRESHOLD
5306 || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5307 > 10 * count)
5308 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5309 || flag_pic
5310 #endif
5311 || TREE_CODE (index_expr) == INTEGER_CST
5312 /* These will reduce to a constant. */
5313 || (TREE_CODE (index_expr) == CALL_EXPR
5314 && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5315 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5316 && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5317 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5318 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5319 {
5320 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5321
5322 /* If the index is a short or char that we do not have
5323 an insn to handle comparisons directly, convert it to
5324 a full integer now, rather than letting each comparison
5325 generate the conversion. */
5326
5327 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5328 && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5329 == CODE_FOR_nothing))
5330 {
5331 enum machine_mode wider_mode;
5332 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5333 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5334 if (cmp_optab->handlers[(int) wider_mode].insn_code
5335 != CODE_FOR_nothing)
5336 {
5337 index = convert_to_mode (wider_mode, index, unsignedp);
5338 break;
5339 }
5340 }
5341
5342 emit_queue ();
5343 do_pending_stack_adjust ();
5344
5345 index = protect_from_queue (index, 0);
5346 if (GET_CODE (index) == MEM)
5347 index = copy_to_reg (index);
5348 if (GET_CODE (index) == CONST_INT
5349 || TREE_CODE (index_expr) == INTEGER_CST)
5350 {
5351 /* Make a tree node with the proper constant value
5352 if we don't already have one. */
5353 if (TREE_CODE (index_expr) != INTEGER_CST)
5354 {
5355 index_expr
5356 = build_int_2 (INTVAL (index),
5357 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5358 index_expr = convert (index_type, index_expr);
5359 }
5360
5361 /* For constant index expressions we need only
5362 issue a unconditional branch to the appropriate
5363 target code. The job of removing any unreachable
5364 code is left to the optimisation phase if the
5365 "-O" option is specified. */
5366 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5367 if (! tree_int_cst_lt (index_expr, n->low)
5368 && ! tree_int_cst_lt (n->high, index_expr))
5369 break;
5370
5371 if (n)
5372 emit_jump (label_rtx (n->code_label));
5373 else
5374 emit_jump (default_label);
5375 }
5376 else
5377 {
5378 /* If the index expression is not constant we generate
5379 a binary decision tree to select the appropriate
5380 target code. This is done as follows:
5381
5382 The list of cases is rearranged into a binary tree,
5383 nearly optimal assuming equal probability for each case.
5384
5385 The tree is transformed into RTL, eliminating
5386 redundant test conditions at the same time.
5387
5388 If program flow could reach the end of the
5389 decision tree an unconditional jump to the
5390 default code is emitted. */
5391
5392 use_cost_table
5393 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5394 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5395 balance_case_nodes (&thiscase->data.case_stmt.case_list,
5396 NULL_PTR);
5397 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5398 default_label, index_type);
5399 emit_jump_if_reachable (default_label);
5400 }
5401 }
5402 else
5403 {
5404 int win = 0;
5405 #ifdef HAVE_casesi
5406 if (HAVE_casesi)
5407 {
5408 enum machine_mode index_mode = SImode;
5409 int index_bits = GET_MODE_BITSIZE (index_mode);
5410 rtx op1, op2;
5411 enum machine_mode op_mode;
5412
5413 /* Convert the index to SImode. */
5414 if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5415 > GET_MODE_BITSIZE (index_mode))
5416 {
5417 enum machine_mode omode = TYPE_MODE (index_type);
5418 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5419
5420 /* We must handle the endpoints in the original mode. */
5421 index_expr = build (MINUS_EXPR, index_type,
5422 index_expr, minval);
5423 minval = integer_zero_node;
5424 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5425 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5426 omode, 1, 0, default_label);
5427 /* Now we can safely truncate. */
5428 index = convert_to_mode (index_mode, index, 0);
5429 }
5430 else
5431 {
5432 if (TYPE_MODE (index_type) != index_mode)
5433 {
5434 index_expr = convert (type_for_size (index_bits, 0),
5435 index_expr);
5436 index_type = TREE_TYPE (index_expr);
5437 }
5438
5439 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5440 }
5441 emit_queue ();
5442 index = protect_from_queue (index, 0);
5443 do_pending_stack_adjust ();
5444
5445 op_mode = insn_data[(int)CODE_FOR_casesi].operand[0].mode;
5446 if (! (*insn_data[(int)CODE_FOR_casesi].operand[0].predicate)
5447 (index, op_mode))
5448 index = copy_to_mode_reg (op_mode, index);
5449
5450 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5451
5452 op_mode = insn_data[(int)CODE_FOR_casesi].operand[1].mode;
5453 if (! (*insn_data[(int)CODE_FOR_casesi].operand[1].predicate)
5454 (op1, op_mode))
5455 op1 = copy_to_mode_reg (op_mode, op1);
5456
5457 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5458
5459 op_mode = insn_data[(int)CODE_FOR_casesi].operand[2].mode;
5460 if (! (*insn_data[(int)CODE_FOR_casesi].operand[2].predicate)
5461 (op2, op_mode))
5462 op2 = copy_to_mode_reg (op_mode, op2);
5463
5464 emit_jump_insn (gen_casesi (index, op1, op2,
5465 table_label, default_label));
5466 win = 1;
5467 }
5468 #endif
5469 #ifdef HAVE_tablejump
5470 if (! win && HAVE_tablejump)
5471 {
5472 index_expr = convert (thiscase->data.case_stmt.nominal_type,
5473 fold (build (MINUS_EXPR, index_type,
5474 index_expr, minval)));
5475 index_type = TREE_TYPE (index_expr);
5476 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5477 emit_queue ();
5478 index = protect_from_queue (index, 0);
5479 do_pending_stack_adjust ();
5480
5481 do_tablejump (index, TYPE_MODE (index_type),
5482 expand_expr (range, NULL_RTX, VOIDmode, 0),
5483 table_label, default_label);
5484 win = 1;
5485 }
5486 #endif
5487 if (! win)
5488 abort ();
5489
5490 /* Get table of labels to jump to, in order of case index. */
5491
5492 ncases = TREE_INT_CST_LOW (range) + 1;
5493 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5494 bzero ((char *) labelvec, ncases * sizeof (rtx));
5495
5496 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5497 {
5498 register HOST_WIDE_INT i
5499 = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5500
5501 while (1)
5502 {
5503 labelvec[i]
5504 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5505 if (i + TREE_INT_CST_LOW (orig_minval)
5506 == TREE_INT_CST_LOW (n->high))
5507 break;
5508 i++;
5509 }
5510 }
5511
5512 /* Fill in the gaps with the default. */
5513 for (i = 0; i < ncases; i++)
5514 if (labelvec[i] == 0)
5515 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5516
5517 /* Output the table */
5518 emit_label (table_label);
5519
5520 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5521 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5522 gen_rtx_LABEL_REF (Pmode, table_label),
5523 gen_rtvec_v (ncases, labelvec),
5524 const0_rtx, const0_rtx));
5525 else
5526 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5527 gen_rtvec_v (ncases, labelvec)));
5528
5529 /* If the case insn drops through the table,
5530 after the table we must jump to the default-label.
5531 Otherwise record no drop-through after the table. */
5532 #ifdef CASE_DROPS_THROUGH
5533 emit_jump (default_label);
5534 #else
5535 emit_barrier ();
5536 #endif
5537 }
5538
5539 before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5540 reorder_insns (before_case, get_last_insn (),
5541 thiscase->data.case_stmt.start);
5542 }
5543 else
5544 end_cleanup_deferral ();
5545
5546 if (thiscase->exit_label)
5547 emit_label (thiscase->exit_label);
5548
5549 POPSTACK (case_stack);
5550
5551 free_temp_slots ();
5552 }
5553
5554 /* Convert the tree NODE into a list linked by the right field, with the left
5555 field zeroed. RIGHT is used for recursion; it is a list to be placed
5556 rightmost in the resulting list. */
5557
5558 static struct case_node *
5559 case_tree2list (node, right)
5560 struct case_node *node, *right;
5561 {
5562 struct case_node *left;
5563
5564 if (node->right)
5565 right = case_tree2list (node->right, right);
5566
5567 node->right = right;
5568 if ((left = node->left))
5569 {
5570 node->left = 0;
5571 return case_tree2list (left, node);
5572 }
5573
5574 return node;
5575 }
5576
5577 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5578
5579 static void
5580 do_jump_if_equal (op1, op2, label, unsignedp)
5581 rtx op1, op2, label;
5582 int unsignedp;
5583 {
5584 if (GET_CODE (op1) == CONST_INT
5585 && GET_CODE (op2) == CONST_INT)
5586 {
5587 if (INTVAL (op1) == INTVAL (op2))
5588 emit_jump (label);
5589 }
5590 else
5591 {
5592 enum machine_mode mode = GET_MODE (op1);
5593 if (mode == VOIDmode)
5594 mode = GET_MODE (op2);
5595 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5596 0, label);
5597 }
5598 }
5599 \f
5600 /* Not all case values are encountered equally. This function
5601 uses a heuristic to weight case labels, in cases where that
5602 looks like a reasonable thing to do.
5603
5604 Right now, all we try to guess is text, and we establish the
5605 following weights:
5606
5607 chars above space: 16
5608 digits: 16
5609 default: 12
5610 space, punct: 8
5611 tab: 4
5612 newline: 2
5613 other "\" chars: 1
5614 remaining chars: 0
5615
5616 If we find any cases in the switch that are not either -1 or in the range
5617 of valid ASCII characters, or are control characters other than those
5618 commonly used with "\", don't treat this switch scanning text.
5619
5620 Return 1 if these nodes are suitable for cost estimation, otherwise
5621 return 0. */
5622
5623 static int
5624 estimate_case_costs (node)
5625 case_node_ptr node;
5626 {
5627 tree min_ascii = build_int_2 (-1, -1);
5628 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5629 case_node_ptr n;
5630 int i;
5631
5632 /* If we haven't already made the cost table, make it now. Note that the
5633 lower bound of the table is -1, not zero. */
5634
5635 if (cost_table == NULL)
5636 {
5637 cost_table = ((short *) xcalloc (129, sizeof (short))) + 1;
5638
5639 for (i = 0; i < 128; i++)
5640 {
5641 if (ISALNUM (i))
5642 cost_table[i] = 16;
5643 else if (ISPUNCT (i))
5644 cost_table[i] = 8;
5645 else if (ISCNTRL (i))
5646 cost_table[i] = -1;
5647 }
5648
5649 cost_table[' '] = 8;
5650 cost_table['\t'] = 4;
5651 cost_table['\0'] = 4;
5652 cost_table['\n'] = 2;
5653 cost_table['\f'] = 1;
5654 cost_table['\v'] = 1;
5655 cost_table['\b'] = 1;
5656 }
5657
5658 /* See if all the case expressions look like text. It is text if the
5659 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5660 as signed arithmetic since we don't want to ever access cost_table with a
5661 value less than -1. Also check that none of the constants in a range
5662 are strange control characters. */
5663
5664 for (n = node; n; n = n->right)
5665 {
5666 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5667 return 0;
5668
5669 for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5670 if (cost_table[i] < 0)
5671 return 0;
5672 }
5673
5674 /* All interesting values are within the range of interesting
5675 ASCII characters. */
5676 return 1;
5677 }
5678
5679 /* Scan an ordered list of case nodes
5680 combining those with consecutive values or ranges.
5681
5682 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5683
5684 static void
5685 group_case_nodes (head)
5686 case_node_ptr head;
5687 {
5688 case_node_ptr node = head;
5689
5690 while (node)
5691 {
5692 rtx lb = next_real_insn (label_rtx (node->code_label));
5693 rtx lb2;
5694 case_node_ptr np = node;
5695
5696 /* Try to group the successors of NODE with NODE. */
5697 while (((np = np->right) != 0)
5698 /* Do they jump to the same place? */
5699 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5700 || (lb != 0 && lb2 != 0
5701 && simplejump_p (lb)
5702 && simplejump_p (lb2)
5703 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5704 SET_SRC (PATTERN (lb2)))))
5705 /* Are their ranges consecutive? */
5706 && tree_int_cst_equal (np->low,
5707 fold (build (PLUS_EXPR,
5708 TREE_TYPE (node->high),
5709 node->high,
5710 integer_one_node)))
5711 /* An overflow is not consecutive. */
5712 && tree_int_cst_lt (node->high,
5713 fold (build (PLUS_EXPR,
5714 TREE_TYPE (node->high),
5715 node->high,
5716 integer_one_node))))
5717 {
5718 node->high = np->high;
5719 }
5720 /* NP is the first node after NODE which can't be grouped with it.
5721 Delete the nodes in between, and move on to that node. */
5722 node->right = np;
5723 node = np;
5724 }
5725 }
5726
5727 /* Take an ordered list of case nodes
5728 and transform them into a near optimal binary tree,
5729 on the assumption that any target code selection value is as
5730 likely as any other.
5731
5732 The transformation is performed by splitting the ordered
5733 list into two equal sections plus a pivot. The parts are
5734 then attached to the pivot as left and right branches. Each
5735 branch is then transformed recursively. */
5736
5737 static void
5738 balance_case_nodes (head, parent)
5739 case_node_ptr *head;
5740 case_node_ptr parent;
5741 {
5742 register case_node_ptr np;
5743
5744 np = *head;
5745 if (np)
5746 {
5747 int cost = 0;
5748 int i = 0;
5749 int ranges = 0;
5750 register case_node_ptr *npp;
5751 case_node_ptr left;
5752
5753 /* Count the number of entries on branch. Also count the ranges. */
5754
5755 while (np)
5756 {
5757 if (!tree_int_cst_equal (np->low, np->high))
5758 {
5759 ranges++;
5760 if (use_cost_table)
5761 cost += cost_table[TREE_INT_CST_LOW (np->high)];
5762 }
5763
5764 if (use_cost_table)
5765 cost += cost_table[TREE_INT_CST_LOW (np->low)];
5766
5767 i++;
5768 np = np->right;
5769 }
5770
5771 if (i > 2)
5772 {
5773 /* Split this list if it is long enough for that to help. */
5774 npp = head;
5775 left = *npp;
5776 if (use_cost_table)
5777 {
5778 /* Find the place in the list that bisects the list's total cost,
5779 Here I gets half the total cost. */
5780 int n_moved = 0;
5781 i = (cost + 1) / 2;
5782 while (1)
5783 {
5784 /* Skip nodes while their cost does not reach that amount. */
5785 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5786 i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5787 i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5788 if (i <= 0)
5789 break;
5790 npp = &(*npp)->right;
5791 n_moved += 1;
5792 }
5793 if (n_moved == 0)
5794 {
5795 /* Leave this branch lopsided, but optimize left-hand
5796 side and fill in `parent' fields for right-hand side. */
5797 np = *head;
5798 np->parent = parent;
5799 balance_case_nodes (&np->left, np);
5800 for (; np->right; np = np->right)
5801 np->right->parent = np;
5802 return;
5803 }
5804 }
5805 /* If there are just three nodes, split at the middle one. */
5806 else if (i == 3)
5807 npp = &(*npp)->right;
5808 else
5809 {
5810 /* Find the place in the list that bisects the list's total cost,
5811 where ranges count as 2.
5812 Here I gets half the total cost. */
5813 i = (i + ranges + 1) / 2;
5814 while (1)
5815 {
5816 /* Skip nodes while their cost does not reach that amount. */
5817 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5818 i--;
5819 i--;
5820 if (i <= 0)
5821 break;
5822 npp = &(*npp)->right;
5823 }
5824 }
5825 *head = np = *npp;
5826 *npp = 0;
5827 np->parent = parent;
5828 np->left = left;
5829
5830 /* Optimize each of the two split parts. */
5831 balance_case_nodes (&np->left, np);
5832 balance_case_nodes (&np->right, np);
5833 }
5834 else
5835 {
5836 /* Else leave this branch as one level,
5837 but fill in `parent' fields. */
5838 np = *head;
5839 np->parent = parent;
5840 for (; np->right; np = np->right)
5841 np->right->parent = np;
5842 }
5843 }
5844 }
5845 \f
5846 /* Search the parent sections of the case node tree
5847 to see if a test for the lower bound of NODE would be redundant.
5848 INDEX_TYPE is the type of the index expression.
5849
5850 The instructions to generate the case decision tree are
5851 output in the same order as nodes are processed so it is
5852 known that if a parent node checks the range of the current
5853 node minus one that the current node is bounded at its lower
5854 span. Thus the test would be redundant. */
5855
5856 static int
5857 node_has_low_bound (node, index_type)
5858 case_node_ptr node;
5859 tree index_type;
5860 {
5861 tree low_minus_one;
5862 case_node_ptr pnode;
5863
5864 /* If the lower bound of this node is the lowest value in the index type,
5865 we need not test it. */
5866
5867 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5868 return 1;
5869
5870 /* If this node has a left branch, the value at the left must be less
5871 than that at this node, so it cannot be bounded at the bottom and
5872 we need not bother testing any further. */
5873
5874 if (node->left)
5875 return 0;
5876
5877 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5878 node->low, integer_one_node));
5879
5880 /* If the subtraction above overflowed, we can't verify anything.
5881 Otherwise, look for a parent that tests our value - 1. */
5882
5883 if (! tree_int_cst_lt (low_minus_one, node->low))
5884 return 0;
5885
5886 for (pnode = node->parent; pnode; pnode = pnode->parent)
5887 if (tree_int_cst_equal (low_minus_one, pnode->high))
5888 return 1;
5889
5890 return 0;
5891 }
5892
5893 /* Search the parent sections of the case node tree
5894 to see if a test for the upper bound of NODE would be redundant.
5895 INDEX_TYPE is the type of the index expression.
5896
5897 The instructions to generate the case decision tree are
5898 output in the same order as nodes are processed so it is
5899 known that if a parent node checks the range of the current
5900 node plus one that the current node is bounded at its upper
5901 span. Thus the test would be redundant. */
5902
5903 static int
5904 node_has_high_bound (node, index_type)
5905 case_node_ptr node;
5906 tree index_type;
5907 {
5908 tree high_plus_one;
5909 case_node_ptr pnode;
5910
5911 /* If there is no upper bound, obviously no test is needed. */
5912
5913 if (TYPE_MAX_VALUE (index_type) == NULL)
5914 return 1;
5915
5916 /* If the upper bound of this node is the highest value in the type
5917 of the index expression, we need not test against it. */
5918
5919 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5920 return 1;
5921
5922 /* If this node has a right branch, the value at the right must be greater
5923 than that at this node, so it cannot be bounded at the top and
5924 we need not bother testing any further. */
5925
5926 if (node->right)
5927 return 0;
5928
5929 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5930 node->high, integer_one_node));
5931
5932 /* If the addition above overflowed, we can't verify anything.
5933 Otherwise, look for a parent that tests our value + 1. */
5934
5935 if (! tree_int_cst_lt (node->high, high_plus_one))
5936 return 0;
5937
5938 for (pnode = node->parent; pnode; pnode = pnode->parent)
5939 if (tree_int_cst_equal (high_plus_one, pnode->low))
5940 return 1;
5941
5942 return 0;
5943 }
5944
5945 /* Search the parent sections of the
5946 case node tree to see if both tests for the upper and lower
5947 bounds of NODE would be redundant. */
5948
5949 static int
5950 node_is_bounded (node, index_type)
5951 case_node_ptr node;
5952 tree index_type;
5953 {
5954 return (node_has_low_bound (node, index_type)
5955 && node_has_high_bound (node, index_type));
5956 }
5957
5958 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5959
5960 static void
5961 emit_jump_if_reachable (label)
5962 rtx label;
5963 {
5964 if (GET_CODE (get_last_insn ()) != BARRIER)
5965 emit_jump (label);
5966 }
5967 \f
5968 /* Emit step-by-step code to select a case for the value of INDEX.
5969 The thus generated decision tree follows the form of the
5970 case-node binary tree NODE, whose nodes represent test conditions.
5971 INDEX_TYPE is the type of the index of the switch.
5972
5973 Care is taken to prune redundant tests from the decision tree
5974 by detecting any boundary conditions already checked by
5975 emitted rtx. (See node_has_high_bound, node_has_low_bound
5976 and node_is_bounded, above.)
5977
5978 Where the test conditions can be shown to be redundant we emit
5979 an unconditional jump to the target code. As a further
5980 optimization, the subordinates of a tree node are examined to
5981 check for bounded nodes. In this case conditional and/or
5982 unconditional jumps as a result of the boundary check for the
5983 current node are arranged to target the subordinates associated
5984 code for out of bound conditions on the current node.
5985
5986 We can assume that when control reaches the code generated here,
5987 the index value has already been compared with the parents
5988 of this node, and determined to be on the same side of each parent
5989 as this node is. Thus, if this node tests for the value 51,
5990 and a parent tested for 52, we don't need to consider
5991 the possibility of a value greater than 51. If another parent
5992 tests for the value 50, then this node need not test anything. */
5993
5994 static void
5995 emit_case_nodes (index, node, default_label, index_type)
5996 rtx index;
5997 case_node_ptr node;
5998 rtx default_label;
5999 tree index_type;
6000 {
6001 /* If INDEX has an unsigned type, we must make unsigned branches. */
6002 int unsignedp = TREE_UNSIGNED (index_type);
6003 typedef rtx rtx_fn ();
6004 enum machine_mode mode = GET_MODE (index);
6005
6006 /* See if our parents have already tested everything for us.
6007 If they have, emit an unconditional jump for this node. */
6008 if (node_is_bounded (node, index_type))
6009 emit_jump (label_rtx (node->code_label));
6010
6011 else if (tree_int_cst_equal (node->low, node->high))
6012 {
6013 /* Node is single valued. First see if the index expression matches
6014 this node and then check our children, if any. */
6015
6016 do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
6017 label_rtx (node->code_label), unsignedp);
6018
6019 if (node->right != 0 && node->left != 0)
6020 {
6021 /* This node has children on both sides.
6022 Dispatch to one side or the other
6023 by comparing the index value with this node's value.
6024 If one subtree is bounded, check that one first,
6025 so we can avoid real branches in the tree. */
6026
6027 if (node_is_bounded (node->right, index_type))
6028 {
6029 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6030 VOIDmode, 0),
6031 GT, NULL_RTX, mode, unsignedp, 0,
6032 label_rtx (node->right->code_label));
6033 emit_case_nodes (index, node->left, default_label, index_type);
6034 }
6035
6036 else if (node_is_bounded (node->left, index_type))
6037 {
6038 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6039 VOIDmode, 0),
6040 LT, NULL_RTX, mode, unsignedp, 0,
6041 label_rtx (node->left->code_label));
6042 emit_case_nodes (index, node->right, default_label, index_type);
6043 }
6044
6045 else
6046 {
6047 /* Neither node is bounded. First distinguish the two sides;
6048 then emit the code for one side at a time. */
6049
6050 tree test_label
6051 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6052
6053 /* See if the value is on the right. */
6054 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6055 VOIDmode, 0),
6056 GT, NULL_RTX, mode, unsignedp, 0,
6057 label_rtx (test_label));
6058
6059 /* Value must be on the left.
6060 Handle the left-hand subtree. */
6061 emit_case_nodes (index, node->left, default_label, index_type);
6062 /* If left-hand subtree does nothing,
6063 go to default. */
6064 emit_jump_if_reachable (default_label);
6065
6066 /* Code branches here for the right-hand subtree. */
6067 expand_label (test_label);
6068 emit_case_nodes (index, node->right, default_label, index_type);
6069 }
6070 }
6071
6072 else if (node->right != 0 && node->left == 0)
6073 {
6074 /* Here we have a right child but no left so we issue conditional
6075 branch to default and process the right child.
6076
6077 Omit the conditional branch to default if we it avoid only one
6078 right child; it costs too much space to save so little time. */
6079
6080 if (node->right->right || node->right->left
6081 || !tree_int_cst_equal (node->right->low, node->right->high))
6082 {
6083 if (!node_has_low_bound (node, index_type))
6084 {
6085 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6086 NULL_RTX,
6087 VOIDmode, 0),
6088 LT, NULL_RTX, mode, unsignedp, 0,
6089 default_label);
6090 }
6091
6092 emit_case_nodes (index, node->right, default_label, index_type);
6093 }
6094 else
6095 /* We cannot process node->right normally
6096 since we haven't ruled out the numbers less than
6097 this node's value. So handle node->right explicitly. */
6098 do_jump_if_equal (index,
6099 expand_expr (node->right->low, NULL_RTX,
6100 VOIDmode, 0),
6101 label_rtx (node->right->code_label), unsignedp);
6102 }
6103
6104 else if (node->right == 0 && node->left != 0)
6105 {
6106 /* Just one subtree, on the left. */
6107
6108 #if 0 /* The following code and comment were formerly part
6109 of the condition here, but they didn't work
6110 and I don't understand what the idea was. -- rms. */
6111 /* If our "most probable entry" is less probable
6112 than the default label, emit a jump to
6113 the default label using condition codes
6114 already lying around. With no right branch,
6115 a branch-greater-than will get us to the default
6116 label correctly. */
6117 if (use_cost_table
6118 && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
6119 ;
6120 #endif /* 0 */
6121 if (node->left->left || node->left->right
6122 || !tree_int_cst_equal (node->left->low, node->left->high))
6123 {
6124 if (!node_has_high_bound (node, index_type))
6125 {
6126 emit_cmp_and_jump_insns (index, expand_expr (node->high,
6127 NULL_RTX,
6128 VOIDmode, 0),
6129 GT, NULL_RTX, mode, unsignedp, 0,
6130 default_label);
6131 }
6132
6133 emit_case_nodes (index, node->left, default_label, index_type);
6134 }
6135 else
6136 /* We cannot process node->left normally
6137 since we haven't ruled out the numbers less than
6138 this node's value. So handle node->left explicitly. */
6139 do_jump_if_equal (index,
6140 expand_expr (node->left->low, NULL_RTX,
6141 VOIDmode, 0),
6142 label_rtx (node->left->code_label), unsignedp);
6143 }
6144 }
6145 else
6146 {
6147 /* Node is a range. These cases are very similar to those for a single
6148 value, except that we do not start by testing whether this node
6149 is the one to branch to. */
6150
6151 if (node->right != 0 && node->left != 0)
6152 {
6153 /* Node has subtrees on both sides.
6154 If the right-hand subtree is bounded,
6155 test for it first, since we can go straight there.
6156 Otherwise, we need to make a branch in the control structure,
6157 then handle the two subtrees. */
6158 tree test_label = 0;
6159
6160
6161 if (node_is_bounded (node->right, index_type))
6162 /* Right hand node is fully bounded so we can eliminate any
6163 testing and branch directly to the target code. */
6164 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6165 VOIDmode, 0),
6166 GT, NULL_RTX, mode, unsignedp, 0,
6167 label_rtx (node->right->code_label));
6168 else
6169 {
6170 /* Right hand node requires testing.
6171 Branch to a label where we will handle it later. */
6172
6173 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6174 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6175 VOIDmode, 0),
6176 GT, NULL_RTX, mode, unsignedp, 0,
6177 label_rtx (test_label));
6178 }
6179
6180 /* Value belongs to this node or to the left-hand subtree. */
6181
6182 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6183 VOIDmode, 0),
6184 GE, NULL_RTX, mode, unsignedp, 0,
6185 label_rtx (node->code_label));
6186
6187 /* Handle the left-hand subtree. */
6188 emit_case_nodes (index, node->left, default_label, index_type);
6189
6190 /* If right node had to be handled later, do that now. */
6191
6192 if (test_label)
6193 {
6194 /* If the left-hand subtree fell through,
6195 don't let it fall into the right-hand subtree. */
6196 emit_jump_if_reachable (default_label);
6197
6198 expand_label (test_label);
6199 emit_case_nodes (index, node->right, default_label, index_type);
6200 }
6201 }
6202
6203 else if (node->right != 0 && node->left == 0)
6204 {
6205 /* Deal with values to the left of this node,
6206 if they are possible. */
6207 if (!node_has_low_bound (node, index_type))
6208 {
6209 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6210 VOIDmode, 0),
6211 LT, NULL_RTX, mode, unsignedp, 0,
6212 default_label);
6213 }
6214
6215 /* Value belongs to this node or to the right-hand subtree. */
6216
6217 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6218 VOIDmode, 0),
6219 LE, NULL_RTX, mode, unsignedp, 0,
6220 label_rtx (node->code_label));
6221
6222 emit_case_nodes (index, node->right, default_label, index_type);
6223 }
6224
6225 else if (node->right == 0 && node->left != 0)
6226 {
6227 /* Deal with values to the right of this node,
6228 if they are possible. */
6229 if (!node_has_high_bound (node, index_type))
6230 {
6231 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6232 VOIDmode, 0),
6233 GT, NULL_RTX, mode, unsignedp, 0,
6234 default_label);
6235 }
6236
6237 /* Value belongs to this node or to the left-hand subtree. */
6238
6239 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6240 VOIDmode, 0),
6241 GE, NULL_RTX, mode, unsignedp, 0,
6242 label_rtx (node->code_label));
6243
6244 emit_case_nodes (index, node->left, default_label, index_type);
6245 }
6246
6247 else
6248 {
6249 /* Node has no children so we check low and high bounds to remove
6250 redundant tests. Only one of the bounds can exist,
6251 since otherwise this node is bounded--a case tested already. */
6252
6253 if (!node_has_high_bound (node, index_type))
6254 {
6255 emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6256 VOIDmode, 0),
6257 GT, NULL_RTX, mode, unsignedp, 0,
6258 default_label);
6259 }
6260
6261 if (!node_has_low_bound (node, index_type))
6262 {
6263 emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6264 VOIDmode, 0),
6265 LT, NULL_RTX, mode, unsignedp, 0,
6266 default_label);
6267 }
6268
6269 emit_jump (label_rtx (node->code_label));
6270 }
6271 }
6272 }
6273 \f
6274 /* These routines are used by the loop unrolling code. They copy BLOCK trees
6275 so that the debugging info will be correct for the unrolled loop. */
6276
6277 /* Indexed by block number, contains a pointer to the N'th block node.
6278
6279 Allocated by the call to identify_blocks, then released after the call
6280 to reorder_blocks in the function unroll_block_trees. */
6281
6282 static tree *block_vector;
6283
6284 void
6285 find_loop_tree_blocks ()
6286 {
6287 tree block = DECL_INITIAL (current_function_decl);
6288
6289 block_vector = identify_blocks (block, get_insns ());
6290 }
6291
6292 void
6293 unroll_block_trees ()
6294 {
6295 tree block = DECL_INITIAL (current_function_decl);
6296
6297 reorder_blocks (block_vector, block, get_insns ());
6298
6299 /* Release any memory allocated by identify_blocks. */
6300 if (block_vector)
6301 free (block_vector);
6302 }