]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/stmt.c
Merge basic-improvements-branch to trunk
[thirdparty/gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "except.h"
46 #include "function.h"
47 #include "insn-config.h"
48 #include "expr.h"
49 #include "libfuncs.h"
50 #include "hard-reg-set.h"
51 #include "loop.h"
52 #include "recog.h"
53 #include "machmode.h"
54 #include "toplev.h"
55 #include "output.h"
56 #include "ggc.h"
57 #include "langhooks.h"
58 #include "predict.h"
59
60 /* Assume that case vectors are not pc-relative. */
61 #ifndef CASE_VECTOR_PC_RELATIVE
62 #define CASE_VECTOR_PC_RELATIVE 0
63 #endif
64 \f
65 /* Functions and data structures for expanding case statements. */
66
67 /* Case label structure, used to hold info on labels within case
68 statements. We handle "range" labels; for a single-value label
69 as in C, the high and low limits are the same.
70
71 An AVL tree of case nodes is initially created, and later transformed
72 to a list linked via the RIGHT fields in the nodes. Nodes with
73 higher case values are later in the list.
74
75 Switch statements can be output in one of two forms. A branch table
76 is used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
79 node chain.
80
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
86 and nodes on the right having higher values. We then output the tree
87 in order. */
88
89 struct case_node GTY(())
90 {
91 struct case_node *left; /* Left son in binary tree */
92 struct case_node *right; /* Right son in binary tree; also node chain */
93 struct case_node *parent; /* Parent of node in binary tree */
94 tree low; /* Lowest index value for this label */
95 tree high; /* Highest index value for this label */
96 tree code_label; /* Label to jump to when node matches */
97 int balance;
98 };
99
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
102
103 /* These are used by estimate_case_costs and balance_case_nodes. */
104
105 /* This must be a signed type, and non-ANSI compilers lack signed char. */
106 static short cost_table_[129];
107 static int use_cost_table;
108 static int cost_table_initialized;
109
110 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
111 is unsigned. */
112 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
113 \f
114 /* Stack of control and binding constructs we are currently inside.
115
116 These constructs begin when you call `expand_start_WHATEVER'
117 and end when you call `expand_end_WHATEVER'. This stack records
118 info about how the construct began that tells the end-function
119 what to do. It also may provide information about the construct
120 to alter the behavior of other constructs within the body.
121 For example, they may affect the behavior of C `break' and `continue'.
122
123 Each construct gets one `struct nesting' object.
124 All of these objects are chained through the `all' field.
125 `nesting_stack' points to the first object (innermost construct).
126 The position of an entry on `nesting_stack' is in its `depth' field.
127
128 Each type of construct has its own individual stack.
129 For example, loops have `loop_stack'. Each object points to the
130 next object of the same type through the `next' field.
131
132 Some constructs are visible to `break' exit-statements and others
133 are not. Which constructs are visible depends on the language.
134 Therefore, the data structure allows each construct to be visible
135 or not, according to the args given when the construct is started.
136 The construct is visible if the `exit_label' field is non-null.
137 In that case, the value should be a CODE_LABEL rtx. */
138
139 struct nesting GTY(())
140 {
141 struct nesting *all;
142 struct nesting *next;
143 int depth;
144 rtx exit_label;
145 enum nesting_desc {
146 COND_NESTING,
147 LOOP_NESTING,
148 BLOCK_NESTING,
149 CASE_NESTING
150 } desc;
151 union nesting_u
152 {
153 /* For conds (if-then and if-then-else statements). */
154 struct nesting_cond
155 {
156 /* Label for the end of the if construct.
157 There is none if EXITFLAG was not set
158 and no `else' has been seen yet. */
159 rtx endif_label;
160 /* Label for the end of this alternative.
161 This may be the end of the if or the next else/elseif. */
162 rtx next_label;
163 } GTY ((tag ("COND_NESTING"))) cond;
164 /* For loops. */
165 struct nesting_loop
166 {
167 /* Label at the top of the loop; place to loop back to. */
168 rtx start_label;
169 /* Label at the end of the whole construct. */
170 rtx end_label;
171 /* Label for `continue' statement to jump to;
172 this is in front of the stepper of the loop. */
173 rtx continue_label;
174 } GTY ((tag ("LOOP_NESTING"))) loop;
175 /* For variable binding contours. */
176 struct nesting_block
177 {
178 /* Sequence number of this binding contour within the function,
179 in order of entry. */
180 int block_start_count;
181 /* Nonzero => value to restore stack to on exit. */
182 rtx stack_level;
183 /* The NOTE that starts this contour.
184 Used by expand_goto to check whether the destination
185 is within each contour or not. */
186 rtx first_insn;
187 /* Innermost containing binding contour that has a stack level. */
188 struct nesting *innermost_stack_block;
189 /* List of cleanups to be run on exit from this contour.
190 This is a list of expressions to be evaluated.
191 The TREE_PURPOSE of each link is the ..._DECL node
192 which the cleanup pertains to. */
193 tree cleanups;
194 /* List of cleanup-lists of blocks containing this block,
195 as they were at the locus where this block appears.
196 There is an element for each containing block,
197 ordered innermost containing block first.
198 The tail of this list can be 0,
199 if all remaining elements would be empty lists.
200 The element's TREE_VALUE is the cleanup-list of that block,
201 which may be null. */
202 tree outer_cleanups;
203 /* Chain of labels defined inside this binding contour.
204 For contours that have stack levels or cleanups. */
205 struct label_chain *label_chain;
206 /* Number of function calls seen, as of start of this block. */
207 int n_function_calls;
208 /* Nonzero if this is associated with an EH region. */
209 int exception_region;
210 /* The saved target_temp_slot_level from our outer block.
211 We may reset target_temp_slot_level to be the level of
212 this block, if that is done, target_temp_slot_level
213 reverts to the saved target_temp_slot_level at the very
214 end of the block. */
215 int block_target_temp_slot_level;
216 /* True if we are currently emitting insns in an area of
217 output code that is controlled by a conditional
218 expression. This is used by the cleanup handling code to
219 generate conditional cleanup actions. */
220 int conditional_code;
221 /* A place to move the start of the exception region for any
222 of the conditional cleanups, must be at the end or after
223 the start of the last unconditional cleanup, and before any
224 conditional branch points. */
225 rtx last_unconditional_cleanup;
226 } GTY ((tag ("BLOCK_NESTING"))) block;
227 /* For switch (C) or case (Pascal) statements,
228 and also for dummies (see `expand_start_case_dummy'). */
229 struct nesting_case
230 {
231 /* The insn after which the case dispatch should finally
232 be emitted. Zero for a dummy. */
233 rtx start;
234 /* A list of case labels; it is first built as an AVL tree.
235 During expand_end_case, this is converted to a list, and may be
236 rearranged into a nearly balanced binary tree. */
237 struct case_node *case_list;
238 /* Label to jump to if no case matches. */
239 tree default_label;
240 /* The expression to be dispatched on. */
241 tree index_expr;
242 /* Type that INDEX_EXPR should be converted to. */
243 tree nominal_type;
244 /* Name of this kind of statement, for warnings. */
245 const char *printname;
246 /* Used to save no_line_numbers till we see the first case label.
247 We set this to -1 when we see the first case label in this
248 case statement. */
249 int line_number_status;
250 } GTY ((tag ("CASE_NESTING"))) case_stmt;
251 } GTY ((desc ("%1.desc"))) data;
252 };
253
254 /* Allocate and return a new `struct nesting'. */
255
256 #define ALLOC_NESTING() \
257 (struct nesting *) ggc_alloc (sizeof (struct nesting))
258
259 /* Pop the nesting stack element by element until we pop off
260 the element which is at the top of STACK.
261 Update all the other stacks, popping off elements from them
262 as we pop them from nesting_stack. */
263
264 #define POPSTACK(STACK) \
265 do { struct nesting *target = STACK; \
266 struct nesting *this; \
267 do { this = nesting_stack; \
268 if (loop_stack == this) \
269 loop_stack = loop_stack->next; \
270 if (cond_stack == this) \
271 cond_stack = cond_stack->next; \
272 if (block_stack == this) \
273 block_stack = block_stack->next; \
274 if (stack_block_stack == this) \
275 stack_block_stack = stack_block_stack->next; \
276 if (case_stack == this) \
277 case_stack = case_stack->next; \
278 nesting_depth = nesting_stack->depth - 1; \
279 nesting_stack = this->all; } \
280 while (this != target); } while (0)
281 \f
282 /* In some cases it is impossible to generate code for a forward goto
283 until the label definition is seen. This happens when it may be necessary
284 for the goto to reset the stack pointer: we don't yet know how to do that.
285 So expand_goto puts an entry on this fixup list.
286 Each time a binding contour that resets the stack is exited,
287 we check each fixup.
288 If the target label has now been defined, we can insert the proper code. */
289
290 struct goto_fixup GTY(())
291 {
292 /* Points to following fixup. */
293 struct goto_fixup *next;
294 /* Points to the insn before the jump insn.
295 If more code must be inserted, it goes after this insn. */
296 rtx before_jump;
297 /* The LABEL_DECL that this jump is jumping to, or 0
298 for break, continue or return. */
299 tree target;
300 /* The BLOCK for the place where this goto was found. */
301 tree context;
302 /* The CODE_LABEL rtx that this is jumping to. */
303 rtx target_rtl;
304 /* Number of binding contours started in current function
305 before the label reference. */
306 int block_start_count;
307 /* The outermost stack level that should be restored for this jump.
308 Each time a binding contour that resets the stack is exited,
309 if the target label is *not* yet defined, this slot is updated. */
310 rtx stack_level;
311 /* List of lists of cleanup expressions to be run by this goto.
312 There is one element for each block that this goto is within.
313 The tail of this list can be 0,
314 if all remaining elements would be empty.
315 The TREE_VALUE contains the cleanup list of that block as of the
316 time this goto was seen.
317 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
318 tree cleanup_list_list;
319 };
320
321 /* Within any binding contour that must restore a stack level,
322 all labels are recorded with a chain of these structures. */
323
324 struct label_chain GTY(())
325 {
326 /* Points to following fixup. */
327 struct label_chain *next;
328 tree label;
329 };
330
331 struct stmt_status GTY(())
332 {
333 /* Chain of all pending binding contours. */
334 struct nesting * x_block_stack;
335
336 /* If any new stacks are added here, add them to POPSTACKS too. */
337
338 /* Chain of all pending binding contours that restore stack levels
339 or have cleanups. */
340 struct nesting * x_stack_block_stack;
341
342 /* Chain of all pending conditional statements. */
343 struct nesting * x_cond_stack;
344
345 /* Chain of all pending loops. */
346 struct nesting * x_loop_stack;
347
348 /* Chain of all pending case or switch statements. */
349 struct nesting * x_case_stack;
350
351 /* Separate chain including all of the above,
352 chained through the `all' field. */
353 struct nesting * x_nesting_stack;
354
355 /* Number of entries on nesting_stack now. */
356 int x_nesting_depth;
357
358 /* Number of binding contours started so far in this function. */
359 int x_block_start_count;
360
361 /* Each time we expand an expression-statement,
362 record the expr's type and its RTL value here. */
363 tree x_last_expr_type;
364 rtx x_last_expr_value;
365
366 /* Nonzero if within a ({...}) grouping, in which case we must
367 always compute a value for each expr-stmt in case it is the last one. */
368 int x_expr_stmts_for_value;
369
370 /* Filename and line number of last line-number note,
371 whether we actually emitted it or not. */
372 const char *x_emit_filename;
373 int x_emit_lineno;
374
375 struct goto_fixup *x_goto_fixup_chain;
376 };
377
378 #define block_stack (cfun->stmt->x_block_stack)
379 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
380 #define cond_stack (cfun->stmt->x_cond_stack)
381 #define loop_stack (cfun->stmt->x_loop_stack)
382 #define case_stack (cfun->stmt->x_case_stack)
383 #define nesting_stack (cfun->stmt->x_nesting_stack)
384 #define nesting_depth (cfun->stmt->x_nesting_depth)
385 #define current_block_start_count (cfun->stmt->x_block_start_count)
386 #define last_expr_type (cfun->stmt->x_last_expr_type)
387 #define last_expr_value (cfun->stmt->x_last_expr_value)
388 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389 #define emit_filename (cfun->stmt->x_emit_filename)
390 #define emit_lineno (cfun->stmt->x_emit_lineno)
391 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
392
393 /* Non-zero if we are using EH to handle cleanups. */
394 static int using_eh_for_cleanups_p = 0;
395
396 static int n_occurrences PARAMS ((int, const char *));
397 static bool parse_input_constraint PARAMS ((const char **, int, int, int,
398 int, const char * const *,
399 bool *, bool *));
400 static bool decl_conflicts_with_clobbers_p PARAMS ((tree, const HARD_REG_SET));
401 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
402 static int expand_fixup PARAMS ((tree, rtx, rtx));
403 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
404 static void expand_nl_goto_receiver PARAMS ((void));
405 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
406 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
407 rtx, int));
408 static bool check_operand_nalternatives PARAMS ((tree, tree));
409 static bool check_unique_operand_names PARAMS ((tree, tree));
410 static tree resolve_operand_names PARAMS ((tree, tree, tree,
411 const char **));
412 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
413 static void expand_null_return_1 PARAMS ((rtx));
414 static enum br_predictor return_prediction PARAMS ((rtx));
415 static void expand_value_return PARAMS ((rtx));
416 static int tail_recursion_args PARAMS ((tree, tree));
417 static void expand_cleanups PARAMS ((tree, tree, int, int));
418 static void check_seenlabel PARAMS ((void));
419 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
420 static int estimate_case_costs PARAMS ((case_node_ptr));
421 static void group_case_nodes PARAMS ((case_node_ptr));
422 static void balance_case_nodes PARAMS ((case_node_ptr *,
423 case_node_ptr));
424 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
425 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
426 static int node_is_bounded PARAMS ((case_node_ptr, tree));
427 static void emit_jump_if_reachable PARAMS ((rtx));
428 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
429 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
430 \f
431 void
432 using_eh_for_cleanups ()
433 {
434 using_eh_for_cleanups_p = 1;
435 }
436
437 void
438 init_stmt_for_function ()
439 {
440 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
441
442 /* We are not currently within any block, conditional, loop or case. */
443 block_stack = 0;
444 stack_block_stack = 0;
445 loop_stack = 0;
446 case_stack = 0;
447 cond_stack = 0;
448 nesting_stack = 0;
449 nesting_depth = 0;
450
451 current_block_start_count = 0;
452
453 /* No gotos have been expanded yet. */
454 goto_fixup_chain = 0;
455
456 /* We are not processing a ({...}) grouping. */
457 expr_stmts_for_value = 0;
458 clear_last_expr ();
459 }
460 \f
461 /* Record the current file and line. Called from emit_line_note. */
462 void
463 set_file_and_line_for_stmt (file, line)
464 const char *file;
465 int line;
466 {
467 /* If we're outputting an inline function, and we add a line note,
468 there may be no CFUN->STMT information. So, there's no need to
469 update it. */
470 if (cfun->stmt)
471 {
472 emit_filename = file;
473 emit_lineno = line;
474 }
475 }
476
477 /* Emit a no-op instruction. */
478
479 void
480 emit_nop ()
481 {
482 rtx last_insn;
483
484 last_insn = get_last_insn ();
485 if (!optimize
486 && (GET_CODE (last_insn) == CODE_LABEL
487 || (GET_CODE (last_insn) == NOTE
488 && prev_real_insn (last_insn) == 0)))
489 emit_insn (gen_nop ());
490 }
491 \f
492 /* Return the rtx-label that corresponds to a LABEL_DECL,
493 creating it if necessary. */
494
495 rtx
496 label_rtx (label)
497 tree label;
498 {
499 if (TREE_CODE (label) != LABEL_DECL)
500 abort ();
501
502 if (!DECL_RTL_SET_P (label))
503 SET_DECL_RTL (label, gen_label_rtx ());
504
505 return DECL_RTL (label);
506 }
507
508
509 /* Add an unconditional jump to LABEL as the next sequential instruction. */
510
511 void
512 emit_jump (label)
513 rtx label;
514 {
515 do_pending_stack_adjust ();
516 emit_jump_insn (gen_jump (label));
517 emit_barrier ();
518 }
519
520 /* Emit code to jump to the address
521 specified by the pointer expression EXP. */
522
523 void
524 expand_computed_goto (exp)
525 tree exp;
526 {
527 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
528
529 #ifdef POINTERS_EXTEND_UNSIGNED
530 if (GET_MODE (x) != Pmode)
531 x = convert_memory_address (Pmode, x);
532 #endif
533
534 emit_queue ();
535 do_pending_stack_adjust ();
536 emit_indirect_jump (x);
537
538 current_function_has_computed_jump = 1;
539 }
540 \f
541 /* Handle goto statements and the labels that they can go to. */
542
543 /* Specify the location in the RTL code of a label LABEL,
544 which is a LABEL_DECL tree node.
545
546 This is used for the kind of label that the user can jump to with a
547 goto statement, and for alternatives of a switch or case statement.
548 RTL labels generated for loops and conditionals don't go through here;
549 they are generated directly at the RTL level, by other functions below.
550
551 Note that this has nothing to do with defining label *names*.
552 Languages vary in how they do that and what that even means. */
553
554 void
555 expand_label (label)
556 tree label;
557 {
558 struct label_chain *p;
559
560 do_pending_stack_adjust ();
561 emit_label (label_rtx (label));
562 if (DECL_NAME (label))
563 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
564
565 if (stack_block_stack != 0)
566 {
567 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
568 p->next = stack_block_stack->data.block.label_chain;
569 stack_block_stack->data.block.label_chain = p;
570 p->label = label;
571 }
572 }
573
574 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
575 from nested functions. */
576
577 void
578 declare_nonlocal_label (label)
579 tree label;
580 {
581 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
582
583 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
584 LABEL_PRESERVE_P (label_rtx (label)) = 1;
585 if (nonlocal_goto_handler_slots == 0)
586 {
587 emit_stack_save (SAVE_NONLOCAL,
588 &nonlocal_goto_stack_level,
589 PREV_INSN (tail_recursion_reentry));
590 }
591 nonlocal_goto_handler_slots
592 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
593 }
594
595 /* Generate RTL code for a `goto' statement with target label LABEL.
596 LABEL should be a LABEL_DECL tree node that was or will later be
597 defined with `expand_label'. */
598
599 void
600 expand_goto (label)
601 tree label;
602 {
603 tree context;
604
605 /* Check for a nonlocal goto to a containing function. */
606 context = decl_function_context (label);
607 if (context != 0 && context != current_function_decl)
608 {
609 struct function *p = find_function_data (context);
610 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
611 rtx handler_slot, static_chain, save_area, insn;
612 tree link;
613
614 /* Find the corresponding handler slot for this label. */
615 handler_slot = p->x_nonlocal_goto_handler_slots;
616 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
617 link = TREE_CHAIN (link))
618 handler_slot = XEXP (handler_slot, 1);
619 handler_slot = XEXP (handler_slot, 0);
620
621 p->has_nonlocal_label = 1;
622 current_function_has_nonlocal_goto = 1;
623 LABEL_REF_NONLOCAL_P (label_ref) = 1;
624
625 /* Copy the rtl for the slots so that they won't be shared in
626 case the virtual stack vars register gets instantiated differently
627 in the parent than in the child. */
628
629 static_chain = copy_to_reg (lookup_static_chain (label));
630
631 /* Get addr of containing function's current nonlocal goto handler,
632 which will do any cleanups and then jump to the label. */
633 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
634 virtual_stack_vars_rtx,
635 static_chain));
636
637 /* Get addr of containing function's nonlocal save area. */
638 save_area = p->x_nonlocal_goto_stack_level;
639 if (save_area)
640 save_area = replace_rtx (copy_rtx (save_area),
641 virtual_stack_vars_rtx, static_chain);
642
643 #if HAVE_nonlocal_goto
644 if (HAVE_nonlocal_goto)
645 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
646 save_area, label_ref));
647 else
648 #endif
649 {
650 /* Restore frame pointer for containing function.
651 This sets the actual hard register used for the frame pointer
652 to the location of the function's incoming static chain info.
653 The non-local goto handler will then adjust it to contain the
654 proper value and reload the argument pointer, if needed. */
655 emit_move_insn (hard_frame_pointer_rtx, static_chain);
656 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
657
658 /* USE of hard_frame_pointer_rtx added for consistency;
659 not clear if really needed. */
660 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
661 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
662 emit_indirect_jump (handler_slot);
663 }
664
665 /* Search backwards to the jump insn and mark it as a
666 non-local goto. */
667 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
668 {
669 if (GET_CODE (insn) == JUMP_INSN)
670 {
671 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
672 const0_rtx, REG_NOTES (insn));
673 break;
674 }
675 else if (GET_CODE (insn) == CALL_INSN)
676 break;
677 }
678 }
679 else
680 expand_goto_internal (label, label_rtx (label), NULL_RTX);
681 }
682
683 /* Generate RTL code for a `goto' statement with target label BODY.
684 LABEL should be a LABEL_REF.
685 LAST_INSN, if non-0, is the rtx we should consider as the last
686 insn emitted (for the purposes of cleaning up a return). */
687
688 static void
689 expand_goto_internal (body, label, last_insn)
690 tree body;
691 rtx label;
692 rtx last_insn;
693 {
694 struct nesting *block;
695 rtx stack_level = 0;
696
697 if (GET_CODE (label) != CODE_LABEL)
698 abort ();
699
700 /* If label has already been defined, we can tell now
701 whether and how we must alter the stack level. */
702
703 if (PREV_INSN (label) != 0)
704 {
705 /* Find the innermost pending block that contains the label.
706 (Check containment by comparing insn-uids.)
707 Then restore the outermost stack level within that block,
708 and do cleanups of all blocks contained in it. */
709 for (block = block_stack; block; block = block->next)
710 {
711 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
712 break;
713 if (block->data.block.stack_level != 0)
714 stack_level = block->data.block.stack_level;
715 /* Execute the cleanups for blocks we are exiting. */
716 if (block->data.block.cleanups != 0)
717 {
718 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
719 do_pending_stack_adjust ();
720 }
721 }
722
723 if (stack_level)
724 {
725 /* Ensure stack adjust isn't done by emit_jump, as this
726 would clobber the stack pointer. This one should be
727 deleted as dead by flow. */
728 clear_pending_stack_adjust ();
729 do_pending_stack_adjust ();
730
731 /* Don't do this adjust if it's to the end label and this function
732 is to return with a depressed stack pointer. */
733 if (label == return_label
734 && (((TREE_CODE (TREE_TYPE (current_function_decl))
735 == FUNCTION_TYPE)
736 && (TYPE_RETURNS_STACK_DEPRESSED
737 (TREE_TYPE (current_function_decl))))))
738 ;
739 else
740 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
741 }
742
743 if (body != 0 && DECL_TOO_LATE (body))
744 error ("jump to `%s' invalidly jumps into binding contour",
745 IDENTIFIER_POINTER (DECL_NAME (body)));
746 }
747 /* Label not yet defined: may need to put this goto
748 on the fixup list. */
749 else if (! expand_fixup (body, label, last_insn))
750 {
751 /* No fixup needed. Record that the label is the target
752 of at least one goto that has no fixup. */
753 if (body != 0)
754 TREE_ADDRESSABLE (body) = 1;
755 }
756
757 emit_jump (label);
758 }
759 \f
760 /* Generate if necessary a fixup for a goto
761 whose target label in tree structure (if any) is TREE_LABEL
762 and whose target in rtl is RTL_LABEL.
763
764 If LAST_INSN is nonzero, we pretend that the jump appears
765 after insn LAST_INSN instead of at the current point in the insn stream.
766
767 The fixup will be used later to insert insns just before the goto.
768 Those insns will restore the stack level as appropriate for the
769 target label, and will (in the case of C++) also invoke any object
770 destructors which have to be invoked when we exit the scopes which
771 are exited by the goto.
772
773 Value is nonzero if a fixup is made. */
774
775 static int
776 expand_fixup (tree_label, rtl_label, last_insn)
777 tree tree_label;
778 rtx rtl_label;
779 rtx last_insn;
780 {
781 struct nesting *block, *end_block;
782
783 /* See if we can recognize which block the label will be output in.
784 This is possible in some very common cases.
785 If we succeed, set END_BLOCK to that block.
786 Otherwise, set it to 0. */
787
788 if (cond_stack
789 && (rtl_label == cond_stack->data.cond.endif_label
790 || rtl_label == cond_stack->data.cond.next_label))
791 end_block = cond_stack;
792 /* If we are in a loop, recognize certain labels which
793 are likely targets. This reduces the number of fixups
794 we need to create. */
795 else if (loop_stack
796 && (rtl_label == loop_stack->data.loop.start_label
797 || rtl_label == loop_stack->data.loop.end_label
798 || rtl_label == loop_stack->data.loop.continue_label))
799 end_block = loop_stack;
800 else
801 end_block = 0;
802
803 /* Now set END_BLOCK to the binding level to which we will return. */
804
805 if (end_block)
806 {
807 struct nesting *next_block = end_block->all;
808 block = block_stack;
809
810 /* First see if the END_BLOCK is inside the innermost binding level.
811 If so, then no cleanups or stack levels are relevant. */
812 while (next_block && next_block != block)
813 next_block = next_block->all;
814
815 if (next_block)
816 return 0;
817
818 /* Otherwise, set END_BLOCK to the innermost binding level
819 which is outside the relevant control-structure nesting. */
820 next_block = block_stack->next;
821 for (block = block_stack; block != end_block; block = block->all)
822 if (block == next_block)
823 next_block = next_block->next;
824 end_block = next_block;
825 }
826
827 /* Does any containing block have a stack level or cleanups?
828 If not, no fixup is needed, and that is the normal case
829 (the only case, for standard C). */
830 for (block = block_stack; block != end_block; block = block->next)
831 if (block->data.block.stack_level != 0
832 || block->data.block.cleanups != 0)
833 break;
834
835 if (block != end_block)
836 {
837 /* Ok, a fixup is needed. Add a fixup to the list of such. */
838 struct goto_fixup *fixup
839 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
840 /* In case an old stack level is restored, make sure that comes
841 after any pending stack adjust. */
842 /* ?? If the fixup isn't to come at the present position,
843 doing the stack adjust here isn't useful. Doing it with our
844 settings at that location isn't useful either. Let's hope
845 someone does it! */
846 if (last_insn == 0)
847 do_pending_stack_adjust ();
848 fixup->target = tree_label;
849 fixup->target_rtl = rtl_label;
850
851 /* Create a BLOCK node and a corresponding matched set of
852 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
853 this point. The notes will encapsulate any and all fixup
854 code which we might later insert at this point in the insn
855 stream. Also, the BLOCK node will be the parent (i.e. the
856 `SUPERBLOCK') of any other BLOCK nodes which we might create
857 later on when we are expanding the fixup code.
858
859 Note that optimization passes (including expand_end_loop)
860 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
861 as a placeholder. */
862
863 {
864 rtx original_before_jump
865 = last_insn ? last_insn : get_last_insn ();
866 rtx start;
867 rtx end;
868 tree block;
869
870 block = make_node (BLOCK);
871 TREE_USED (block) = 1;
872
873 if (!cfun->x_whole_function_mode_p)
874 (*lang_hooks.decls.insert_block) (block);
875 else
876 {
877 BLOCK_CHAIN (block)
878 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
879 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
880 = block;
881 }
882
883 start_sequence ();
884 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
885 if (cfun->x_whole_function_mode_p)
886 NOTE_BLOCK (start) = block;
887 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
888 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
889 if (cfun->x_whole_function_mode_p)
890 NOTE_BLOCK (end) = block;
891 fixup->context = block;
892 end_sequence ();
893 emit_insn_after (start, original_before_jump);
894 }
895
896 fixup->block_start_count = current_block_start_count;
897 fixup->stack_level = 0;
898 fixup->cleanup_list_list
899 = ((block->data.block.outer_cleanups
900 || block->data.block.cleanups)
901 ? tree_cons (NULL_TREE, block->data.block.cleanups,
902 block->data.block.outer_cleanups)
903 : 0);
904 fixup->next = goto_fixup_chain;
905 goto_fixup_chain = fixup;
906 }
907
908 return block != 0;
909 }
910 \f
911 /* Expand any needed fixups in the outputmost binding level of the
912 function. FIRST_INSN is the first insn in the function. */
913
914 void
915 expand_fixups (first_insn)
916 rtx first_insn;
917 {
918 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
919 }
920
921 /* When exiting a binding contour, process all pending gotos requiring fixups.
922 THISBLOCK is the structure that describes the block being exited.
923 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
924 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
925 FIRST_INSN is the insn that began this contour.
926
927 Gotos that jump out of this contour must restore the
928 stack level and do the cleanups before actually jumping.
929
930 DONT_JUMP_IN nonzero means report error there is a jump into this
931 contour from before the beginning of the contour.
932 This is also done if STACK_LEVEL is nonzero. */
933
934 static void
935 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
936 struct nesting *thisblock;
937 rtx stack_level;
938 tree cleanup_list;
939 rtx first_insn;
940 int dont_jump_in;
941 {
942 struct goto_fixup *f, *prev;
943
944 /* F is the fixup we are considering; PREV is the previous one. */
945 /* We run this loop in two passes so that cleanups of exited blocks
946 are run first, and blocks that are exited are marked so
947 afterwards. */
948
949 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
950 {
951 /* Test for a fixup that is inactive because it is already handled. */
952 if (f->before_jump == 0)
953 {
954 /* Delete inactive fixup from the chain, if that is easy to do. */
955 if (prev != 0)
956 prev->next = f->next;
957 }
958 /* Has this fixup's target label been defined?
959 If so, we can finalize it. */
960 else if (PREV_INSN (f->target_rtl) != 0)
961 {
962 rtx cleanup_insns;
963
964 /* If this fixup jumped into this contour from before the beginning
965 of this contour, report an error. This code used to use
966 the first non-label insn after f->target_rtl, but that's
967 wrong since such can be added, by things like put_var_into_stack
968 and have INSN_UIDs that are out of the range of the block. */
969 /* ??? Bug: this does not detect jumping in through intermediate
970 blocks that have stack levels or cleanups.
971 It detects only a problem with the innermost block
972 around the label. */
973 if (f->target != 0
974 && (dont_jump_in || stack_level || cleanup_list)
975 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
976 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
977 && ! DECL_ERROR_ISSUED (f->target))
978 {
979 error_with_decl (f->target,
980 "label `%s' used before containing binding contour");
981 /* Prevent multiple errors for one label. */
982 DECL_ERROR_ISSUED (f->target) = 1;
983 }
984
985 /* We will expand the cleanups into a sequence of their own and
986 then later on we will attach this new sequence to the insn
987 stream just ahead of the actual jump insn. */
988
989 start_sequence ();
990
991 /* Temporarily restore the lexical context where we will
992 logically be inserting the fixup code. We do this for the
993 sake of getting the debugging information right. */
994
995 (*lang_hooks.decls.pushlevel) (0);
996 (*lang_hooks.decls.set_block) (f->context);
997
998 /* Expand the cleanups for blocks this jump exits. */
999 if (f->cleanup_list_list)
1000 {
1001 tree lists;
1002 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1003 /* Marked elements correspond to blocks that have been closed.
1004 Do their cleanups. */
1005 if (TREE_ADDRESSABLE (lists)
1006 && TREE_VALUE (lists) != 0)
1007 {
1008 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1009 /* Pop any pushes done in the cleanups,
1010 in case function is about to return. */
1011 do_pending_stack_adjust ();
1012 }
1013 }
1014
1015 /* Restore stack level for the biggest contour that this
1016 jump jumps out of. */
1017 if (f->stack_level
1018 && ! (f->target_rtl == return_label
1019 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1020 == FUNCTION_TYPE)
1021 && (TYPE_RETURNS_STACK_DEPRESSED
1022 (TREE_TYPE (current_function_decl))))))
1023 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1024
1025 /* Finish up the sequence containing the insns which implement the
1026 necessary cleanups, and then attach that whole sequence to the
1027 insn stream just ahead of the actual jump insn. Attaching it
1028 at that point insures that any cleanups which are in fact
1029 implicit C++ object destructions (which must be executed upon
1030 leaving the block) appear (to the debugger) to be taking place
1031 in an area of the generated code where the object(s) being
1032 destructed are still "in scope". */
1033
1034 cleanup_insns = get_insns ();
1035 (*lang_hooks.decls.poplevel) (1, 0, 0);
1036
1037 end_sequence ();
1038 emit_insn_after (cleanup_insns, f->before_jump);
1039
1040 f->before_jump = 0;
1041 }
1042 }
1043
1044 /* For any still-undefined labels, do the cleanups for this block now.
1045 We must do this now since items in the cleanup list may go out
1046 of scope when the block ends. */
1047 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1048 if (f->before_jump != 0
1049 && PREV_INSN (f->target_rtl) == 0
1050 /* Label has still not appeared. If we are exiting a block with
1051 a stack level to restore, that started before the fixup,
1052 mark this stack level as needing restoration
1053 when the fixup is later finalized. */
1054 && thisblock != 0
1055 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1056 means the label is undefined. That's erroneous, but possible. */
1057 && (thisblock->data.block.block_start_count
1058 <= f->block_start_count))
1059 {
1060 tree lists = f->cleanup_list_list;
1061 rtx cleanup_insns;
1062
1063 for (; lists; lists = TREE_CHAIN (lists))
1064 /* If the following elt. corresponds to our containing block
1065 then the elt. must be for this block. */
1066 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1067 {
1068 start_sequence ();
1069 (*lang_hooks.decls.pushlevel) (0);
1070 (*lang_hooks.decls.set_block) (f->context);
1071 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1072 do_pending_stack_adjust ();
1073 cleanup_insns = get_insns ();
1074 (*lang_hooks.decls.poplevel) (1, 0, 0);
1075 end_sequence ();
1076 if (cleanup_insns != 0)
1077 f->before_jump
1078 = emit_insn_after (cleanup_insns, f->before_jump);
1079
1080 f->cleanup_list_list = TREE_CHAIN (lists);
1081 }
1082
1083 if (stack_level)
1084 f->stack_level = stack_level;
1085 }
1086 }
1087 \f
1088 /* Return the number of times character C occurs in string S. */
1089 static int
1090 n_occurrences (c, s)
1091 int c;
1092 const char *s;
1093 {
1094 int n = 0;
1095 while (*s)
1096 n += (*s++ == c);
1097 return n;
1098 }
1099 \f
1100 /* Generate RTL for an asm statement (explicit assembler code).
1101 BODY is a STRING_CST node containing the assembler code text,
1102 or an ADDR_EXPR containing a STRING_CST. */
1103
1104 void
1105 expand_asm (body)
1106 tree body;
1107 {
1108 if (TREE_CODE (body) == ADDR_EXPR)
1109 body = TREE_OPERAND (body, 0);
1110
1111 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1112 TREE_STRING_POINTER (body)));
1113 clear_last_expr ();
1114 }
1115
1116 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1117 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1118 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1119 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1120 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1121 constraint allows the use of a register operand. And, *IS_INOUT
1122 will be true if the operand is read-write, i.e., if it is used as
1123 an input as well as an output. If *CONSTRAINT_P is not in
1124 canonical form, it will be made canonical. (Note that `+' will be
1125 rpelaced with `=' as part of this process.)
1126
1127 Returns TRUE if all went well; FALSE if an error occurred. */
1128
1129 bool
1130 parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1131 allows_mem, allows_reg, is_inout)
1132 const char **constraint_p;
1133 int operand_num;
1134 int ninputs;
1135 int noutputs;
1136 bool *allows_mem;
1137 bool *allows_reg;
1138 bool *is_inout;
1139 {
1140 const char *constraint = *constraint_p;
1141 const char *p;
1142
1143 /* Assume the constraint doesn't allow the use of either a register
1144 or memory. */
1145 *allows_mem = false;
1146 *allows_reg = false;
1147
1148 /* Allow the `=' or `+' to not be at the beginning of the string,
1149 since it wasn't explicitly documented that way, and there is a
1150 large body of code that puts it last. Swap the character to
1151 the front, so as not to uglify any place else. */
1152 p = strchr (constraint, '=');
1153 if (!p)
1154 p = strchr (constraint, '+');
1155
1156 /* If the string doesn't contain an `=', issue an error
1157 message. */
1158 if (!p)
1159 {
1160 error ("output operand constraint lacks `='");
1161 return false;
1162 }
1163
1164 /* If the constraint begins with `+', then the operand is both read
1165 from and written to. */
1166 *is_inout = (*p == '+');
1167
1168 /* Canonicalize the output constraint so that it begins with `='. */
1169 if (p != constraint || is_inout)
1170 {
1171 char *buf;
1172 size_t c_len = strlen (constraint);
1173
1174 if (p != constraint)
1175 warning ("output constraint `%c' for operand %d is not at the beginning",
1176 *p, operand_num);
1177
1178 /* Make a copy of the constraint. */
1179 buf = alloca (c_len + 1);
1180 strcpy (buf, constraint);
1181 /* Swap the first character and the `=' or `+'. */
1182 buf[p - constraint] = buf[0];
1183 /* Make sure the first character is an `='. (Until we do this,
1184 it might be a `+'.) */
1185 buf[0] = '=';
1186 /* Replace the constraint with the canonicalized string. */
1187 *constraint_p = ggc_alloc_string (buf, c_len);
1188 constraint = *constraint_p;
1189 }
1190
1191 /* Loop through the constraint string. */
1192 for (p = constraint + 1; *p; ++p)
1193 switch (*p)
1194 {
1195 case '+':
1196 case '=':
1197 error ("operand constraint contains incorrectly positioned '+' or '='");
1198 return false;
1199
1200 case '%':
1201 if (operand_num + 1 == ninputs + noutputs)
1202 {
1203 error ("`%%' constraint used with last operand");
1204 return false;
1205 }
1206 break;
1207
1208 case 'V': case 'm': case 'o':
1209 *allows_mem = true;
1210 break;
1211
1212 case '?': case '!': case '*': case '&': case '#':
1213 case 'E': case 'F': case 'G': case 'H':
1214 case 's': case 'i': case 'n':
1215 case 'I': case 'J': case 'K': case 'L': case 'M':
1216 case 'N': case 'O': case 'P': case ',':
1217 break;
1218
1219 case '0': case '1': case '2': case '3': case '4':
1220 case '5': case '6': case '7': case '8': case '9':
1221 case '[':
1222 error ("matching constraint not valid in output operand");
1223 return false;
1224
1225 case '<': case '>':
1226 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1227 excepting those that expand_call created. So match memory
1228 and hope. */
1229 *allows_mem = true;
1230 break;
1231
1232 case 'g': case 'X':
1233 *allows_reg = true;
1234 *allows_mem = true;
1235 break;
1236
1237 case 'p': case 'r':
1238 *allows_reg = true;
1239 break;
1240
1241 default:
1242 if (!ISALPHA (*p))
1243 break;
1244 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1245 *allows_reg = true;
1246 #ifdef EXTRA_CONSTRAINT
1247 else if (EXTRA_ADDRESS_CONSTRAINT (*p))
1248 *allows_reg = true;
1249 else if (EXTRA_MEMORY_CONSTRAINT (*p))
1250 *allows_mem = true;
1251 else
1252 {
1253 /* Otherwise we can't assume anything about the nature of
1254 the constraint except that it isn't purely registers.
1255 Treat it like "g" and hope for the best. */
1256 *allows_reg = true;
1257 *allows_mem = true;
1258 }
1259 #endif
1260 break;
1261 }
1262
1263 return true;
1264 }
1265
1266 /* Similar, but for input constraints. */
1267
1268 static bool
1269 parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1270 constraints, allows_mem, allows_reg)
1271 const char **constraint_p;
1272 int input_num;
1273 int ninputs;
1274 int noutputs;
1275 int ninout;
1276 const char * const * constraints;
1277 bool *allows_mem;
1278 bool *allows_reg;
1279 {
1280 const char *constraint = *constraint_p;
1281 const char *orig_constraint = constraint;
1282 size_t c_len = strlen (constraint);
1283 size_t j;
1284
1285 /* Assume the constraint doesn't allow the use of either
1286 a register or memory. */
1287 *allows_mem = false;
1288 *allows_reg = false;
1289
1290 /* Make sure constraint has neither `=', `+', nor '&'. */
1291
1292 for (j = 0; j < c_len; j++)
1293 switch (constraint[j])
1294 {
1295 case '+': case '=': case '&':
1296 if (constraint == orig_constraint)
1297 {
1298 error ("input operand constraint contains `%c'", constraint[j]);
1299 return false;
1300 }
1301 break;
1302
1303 case '%':
1304 if (constraint == orig_constraint
1305 && input_num + 1 == ninputs - ninout)
1306 {
1307 error ("`%%' constraint used with last operand");
1308 return false;
1309 }
1310 break;
1311
1312 case 'V': case 'm': case 'o':
1313 *allows_mem = true;
1314 break;
1315
1316 case '<': case '>':
1317 case '?': case '!': case '*': case '#':
1318 case 'E': case 'F': case 'G': case 'H':
1319 case 's': case 'i': case 'n':
1320 case 'I': case 'J': case 'K': case 'L': case 'M':
1321 case 'N': case 'O': case 'P': case ',':
1322 break;
1323
1324 /* Whether or not a numeric constraint allows a register is
1325 decided by the matching constraint, and so there is no need
1326 to do anything special with them. We must handle them in
1327 the default case, so that we don't unnecessarily force
1328 operands to memory. */
1329 case '0': case '1': case '2': case '3': case '4':
1330 case '5': case '6': case '7': case '8': case '9':
1331 {
1332 char *end;
1333 unsigned long match;
1334
1335 match = strtoul (constraint + j, &end, 10);
1336 if (match >= (unsigned long) noutputs)
1337 {
1338 error ("matching constraint references invalid operand number");
1339 return false;
1340 }
1341
1342 /* Try and find the real constraint for this dup. Only do this
1343 if the matching constraint is the only alternative. */
1344 if (*end == '\0'
1345 && (j == 0 || (j == 1 && constraint[0] == '%')))
1346 {
1347 constraint = constraints[match];
1348 *constraint_p = constraint;
1349 c_len = strlen (constraint);
1350 j = 0;
1351 break;
1352 }
1353 else
1354 j = end - constraint;
1355 }
1356 /* Fall through. */
1357
1358 case 'p': case 'r':
1359 *allows_reg = true;
1360 break;
1361
1362 case 'g': case 'X':
1363 *allows_reg = true;
1364 *allows_mem = true;
1365 break;
1366
1367 default:
1368 if (! ISALPHA (constraint[j]))
1369 {
1370 error ("invalid punctuation `%c' in constraint", constraint[j]);
1371 return false;
1372 }
1373 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1374 *allows_reg = true;
1375 #ifdef EXTRA_CONSTRAINT
1376 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j]))
1377 *allows_reg = true;
1378 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j]))
1379 *allows_mem = true;
1380 else
1381 {
1382 /* Otherwise we can't assume anything about the nature of
1383 the constraint except that it isn't purely registers.
1384 Treat it like "g" and hope for the best. */
1385 *allows_reg = true;
1386 *allows_mem = true;
1387 }
1388 #endif
1389 break;
1390 }
1391
1392 return true;
1393 }
1394
1395 /* Check for overlap between registers marked in CLOBBERED_REGS and
1396 anything inappropriate in DECL. Emit error and return TRUE for error,
1397 FALSE for ok. */
1398
1399 static bool
1400 decl_conflicts_with_clobbers_p (decl, clobbered_regs)
1401 tree decl;
1402 const HARD_REG_SET clobbered_regs;
1403 {
1404 /* Conflicts between asm-declared register variables and the clobber
1405 list are not allowed. */
1406 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1407 && DECL_REGISTER (decl)
1408 && REG_P (DECL_RTL (decl))
1409 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1410 {
1411 rtx reg = DECL_RTL (decl);
1412 unsigned int regno;
1413
1414 for (regno = REGNO (reg);
1415 regno < (REGNO (reg)
1416 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1417 regno++)
1418 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1419 {
1420 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1421 IDENTIFIER_POINTER (DECL_NAME (decl)));
1422
1423 /* Reset registerness to stop multiple errors emitted for a
1424 single variable. */
1425 DECL_REGISTER (decl) = 0;
1426 return true;
1427 }
1428 }
1429 return false;
1430 }
1431
1432 /* Generate RTL for an asm statement with arguments.
1433 STRING is the instruction template.
1434 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1435 Each output or input has an expression in the TREE_VALUE and
1436 and a tree list in TREE_PURPOSE which in turn contains a constraint
1437 name in TREE_VALUE (or NULL_TREE) and a constraint string
1438 in TREE_PURPOSE.
1439 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1440 that is clobbered by this insn.
1441
1442 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1443 Some elements of OUTPUTS may be replaced with trees representing temporary
1444 values. The caller should copy those temporary values to the originally
1445 specified lvalues.
1446
1447 VOL nonzero means the insn is volatile; don't optimize it. */
1448
1449 void
1450 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1451 tree string, outputs, inputs, clobbers;
1452 int vol;
1453 const char *filename;
1454 int line;
1455 {
1456 rtvec argvec, constraintvec;
1457 rtx body;
1458 int ninputs = list_length (inputs);
1459 int noutputs = list_length (outputs);
1460 int ninout;
1461 int nclobbers;
1462 HARD_REG_SET clobbered_regs;
1463 int clobber_conflict_found = 0;
1464 tree tail;
1465 int i;
1466 /* Vector of RTX's of evaluated output operands. */
1467 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1468 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1469 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1470 enum machine_mode *inout_mode
1471 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1472 const char **constraints
1473 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1474 int old_generating_concat_p = generating_concat_p;
1475
1476 /* An ASM with no outputs needs to be treated as volatile, for now. */
1477 if (noutputs == 0)
1478 vol = 1;
1479
1480 if (! check_operand_nalternatives (outputs, inputs))
1481 return;
1482
1483 if (! check_unique_operand_names (outputs, inputs))
1484 return;
1485
1486 string = resolve_operand_names (string, outputs, inputs, constraints);
1487
1488 #ifdef MD_ASM_CLOBBERS
1489 /* Sometimes we wish to automatically clobber registers across an asm.
1490 Case in point is when the i386 backend moved from cc0 to a hard reg --
1491 maintaining source-level compatibility means automatically clobbering
1492 the flags register. */
1493 MD_ASM_CLOBBERS (clobbers);
1494 #endif
1495
1496 /* Count the number of meaningful clobbered registers, ignoring what
1497 we would ignore later. */
1498 nclobbers = 0;
1499 CLEAR_HARD_REG_SET (clobbered_regs);
1500 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1501 {
1502 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1503
1504 i = decode_reg_name (regname);
1505 if (i >= 0 || i == -4)
1506 ++nclobbers;
1507 else if (i == -2)
1508 error ("unknown register name `%s' in `asm'", regname);
1509
1510 /* Mark clobbered registers. */
1511 if (i >= 0)
1512 SET_HARD_REG_BIT (clobbered_regs, i);
1513 }
1514
1515 clear_last_expr ();
1516
1517 /* First pass over inputs and outputs checks validity and sets
1518 mark_addressable if needed. */
1519
1520 ninout = 0;
1521 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1522 {
1523 tree val = TREE_VALUE (tail);
1524 tree type = TREE_TYPE (val);
1525 const char *constraint;
1526 bool is_inout;
1527 bool allows_reg;
1528 bool allows_mem;
1529
1530 /* If there's an erroneous arg, emit no insn. */
1531 if (type == error_mark_node)
1532 return;
1533
1534 /* Try to parse the output constraint. If that fails, there's
1535 no point in going further. */
1536 constraint = constraints[i];
1537 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1538 &allows_mem, &allows_reg, &is_inout))
1539 return;
1540
1541 if (! allows_reg
1542 && (allows_mem
1543 || is_inout
1544 || (DECL_P (val)
1545 && GET_CODE (DECL_RTL (val)) == REG
1546 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1547 (*lang_hooks.mark_addressable) (val);
1548
1549 if (is_inout)
1550 ninout++;
1551 }
1552
1553 ninputs += ninout;
1554 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1555 {
1556 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1557 return;
1558 }
1559
1560 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1561 {
1562 bool allows_reg, allows_mem;
1563 const char *constraint;
1564
1565 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1566 would get VOIDmode and that could cause a crash in reload. */
1567 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1568 return;
1569
1570 constraint = constraints[i + noutputs];
1571 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1572 constraints, &allows_mem, &allows_reg))
1573 return;
1574
1575 if (! allows_reg && allows_mem)
1576 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1577 }
1578
1579 /* Second pass evaluates arguments. */
1580
1581 ninout = 0;
1582 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1583 {
1584 tree val = TREE_VALUE (tail);
1585 tree type = TREE_TYPE (val);
1586 bool is_inout;
1587 bool allows_reg;
1588 bool allows_mem;
1589
1590 if (!parse_output_constraint (&constraints[i], i, ninputs,
1591 noutputs, &allows_mem, &allows_reg,
1592 &is_inout))
1593 abort ();
1594
1595 /* If an output operand is not a decl or indirect ref and our constraint
1596 allows a register, make a temporary to act as an intermediate.
1597 Make the asm insn write into that, then our caller will copy it to
1598 the real output operand. Likewise for promoted variables. */
1599
1600 generating_concat_p = 0;
1601
1602 real_output_rtx[i] = NULL_RTX;
1603 if ((TREE_CODE (val) == INDIRECT_REF
1604 && allows_mem)
1605 || (DECL_P (val)
1606 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1607 && ! (GET_CODE (DECL_RTL (val)) == REG
1608 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1609 || ! allows_reg
1610 || is_inout)
1611 {
1612 output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1613
1614 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1615 error ("output number %d not directly addressable", i);
1616 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1617 || GET_CODE (output_rtx[i]) == CONCAT)
1618 {
1619 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1620 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1621 if (is_inout)
1622 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1623 }
1624 }
1625 else
1626 {
1627 output_rtx[i] = assign_temp (type, 0, 0, 1);
1628 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1629 }
1630
1631 generating_concat_p = old_generating_concat_p;
1632
1633 if (is_inout)
1634 {
1635 inout_mode[ninout] = TYPE_MODE (type);
1636 inout_opnum[ninout++] = i;
1637 }
1638
1639 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1640 clobber_conflict_found = 1;
1641 }
1642
1643 /* Make vectors for the expression-rtx, constraint strings,
1644 and named operands. */
1645
1646 argvec = rtvec_alloc (ninputs);
1647 constraintvec = rtvec_alloc (ninputs);
1648
1649 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1650 : GET_MODE (output_rtx[0])),
1651 TREE_STRING_POINTER (string),
1652 empty_string, 0, argvec, constraintvec,
1653 filename, line);
1654
1655 MEM_VOLATILE_P (body) = vol;
1656
1657 /* Eval the inputs and put them into ARGVEC.
1658 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1659
1660 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1661 {
1662 bool allows_reg, allows_mem;
1663 const char *constraint;
1664 tree val, type;
1665 rtx op;
1666
1667 constraint = constraints[i + noutputs];
1668 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1669 constraints, &allows_mem, &allows_reg))
1670 abort ();
1671
1672 generating_concat_p = 0;
1673
1674 val = TREE_VALUE (tail);
1675 type = TREE_TYPE (val);
1676 op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1677
1678 /* Never pass a CONCAT to an ASM. */
1679 if (GET_CODE (op) == CONCAT)
1680 op = force_reg (GET_MODE (op), op);
1681
1682 if (asm_operand_ok (op, constraint) <= 0)
1683 {
1684 if (allows_reg)
1685 op = force_reg (TYPE_MODE (type), op);
1686 else if (!allows_mem)
1687 warning ("asm operand %d probably doesn't match constraints",
1688 i + noutputs);
1689 else if (CONSTANT_P (op))
1690 op = force_const_mem (TYPE_MODE (type), op);
1691 else if (GET_CODE (op) == REG
1692 || GET_CODE (op) == SUBREG
1693 || GET_CODE (op) == ADDRESSOF
1694 || GET_CODE (op) == CONCAT)
1695 {
1696 tree qual_type = build_qualified_type (type,
1697 (TYPE_QUALS (type)
1698 | TYPE_QUAL_CONST));
1699 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1700
1701 emit_move_insn (memloc, op);
1702 op = memloc;
1703 }
1704
1705 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1706 {
1707 /* We won't recognize volatile memory as available a
1708 memory_operand at this point. Ignore it. */
1709 }
1710 else if (queued_subexp_p (op))
1711 ;
1712 else
1713 /* ??? Leave this only until we have experience with what
1714 happens in combine and elsewhere when constraints are
1715 not satisfied. */
1716 warning ("asm operand %d probably doesn't match constraints",
1717 i + noutputs);
1718 }
1719
1720 generating_concat_p = old_generating_concat_p;
1721 ASM_OPERANDS_INPUT (body, i) = op;
1722
1723 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1724 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1725
1726 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1727 clobber_conflict_found = 1;
1728 }
1729
1730 /* Protect all the operands from the queue now that they have all been
1731 evaluated. */
1732
1733 generating_concat_p = 0;
1734
1735 for (i = 0; i < ninputs - ninout; i++)
1736 ASM_OPERANDS_INPUT (body, i)
1737 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1738
1739 for (i = 0; i < noutputs; i++)
1740 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1741
1742 /* For in-out operands, copy output rtx to input rtx. */
1743 for (i = 0; i < ninout; i++)
1744 {
1745 int j = inout_opnum[i];
1746 char buffer[16];
1747
1748 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1749 = output_rtx[j];
1750
1751 sprintf (buffer, "%d", j);
1752 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1753 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1754 }
1755
1756 generating_concat_p = old_generating_concat_p;
1757
1758 /* Now, for each output, construct an rtx
1759 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1760 ARGVEC CONSTRAINTS OPNAMES))
1761 If there is more than one, put them inside a PARALLEL. */
1762
1763 if (noutputs == 1 && nclobbers == 0)
1764 {
1765 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1766 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1767 }
1768
1769 else if (noutputs == 0 && nclobbers == 0)
1770 {
1771 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1772 emit_insn (body);
1773 }
1774
1775 else
1776 {
1777 rtx obody = body;
1778 int num = noutputs;
1779
1780 if (num == 0)
1781 num = 1;
1782
1783 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1784
1785 /* For each output operand, store a SET. */
1786 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1787 {
1788 XVECEXP (body, 0, i)
1789 = gen_rtx_SET (VOIDmode,
1790 output_rtx[i],
1791 gen_rtx_ASM_OPERANDS
1792 (GET_MODE (output_rtx[i]),
1793 TREE_STRING_POINTER (string),
1794 constraints[i], i, argvec, constraintvec,
1795 filename, line));
1796
1797 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1798 }
1799
1800 /* If there are no outputs (but there are some clobbers)
1801 store the bare ASM_OPERANDS into the PARALLEL. */
1802
1803 if (i == 0)
1804 XVECEXP (body, 0, i++) = obody;
1805
1806 /* Store (clobber REG) for each clobbered register specified. */
1807
1808 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1809 {
1810 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1811 int j = decode_reg_name (regname);
1812 rtx clobbered_reg;
1813
1814 if (j < 0)
1815 {
1816 if (j == -3) /* `cc', which is not a register */
1817 continue;
1818
1819 if (j == -4) /* `memory', don't cache memory across asm */
1820 {
1821 XVECEXP (body, 0, i++)
1822 = gen_rtx_CLOBBER (VOIDmode,
1823 gen_rtx_MEM
1824 (BLKmode,
1825 gen_rtx_SCRATCH (VOIDmode)));
1826 continue;
1827 }
1828
1829 /* Ignore unknown register, error already signaled. */
1830 continue;
1831 }
1832
1833 /* Use QImode since that's guaranteed to clobber just one reg. */
1834 clobbered_reg = gen_rtx_REG (QImode, j);
1835
1836 /* Do sanity check for overlap between clobbers and respectively
1837 input and outputs that hasn't been handled. Such overlap
1838 should have been detected and reported above. */
1839 if (!clobber_conflict_found)
1840 {
1841 int opno;
1842
1843 /* We test the old body (obody) contents to avoid tripping
1844 over the under-construction body. */
1845 for (opno = 0; opno < noutputs; opno++)
1846 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1847 internal_error ("asm clobber conflict with output operand");
1848
1849 for (opno = 0; opno < ninputs - ninout; opno++)
1850 if (reg_overlap_mentioned_p (clobbered_reg,
1851 ASM_OPERANDS_INPUT (obody, opno)))
1852 internal_error ("asm clobber conflict with input operand");
1853 }
1854
1855 XVECEXP (body, 0, i++)
1856 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1857 }
1858
1859 emit_insn (body);
1860 }
1861
1862 /* For any outputs that needed reloading into registers, spill them
1863 back to where they belong. */
1864 for (i = 0; i < noutputs; ++i)
1865 if (real_output_rtx[i])
1866 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1867
1868 free_temp_slots ();
1869 }
1870
1871 /* A subroutine of expand_asm_operands. Check that all operands have
1872 the same number of alternatives. Return true if so. */
1873
1874 static bool
1875 check_operand_nalternatives (outputs, inputs)
1876 tree outputs, inputs;
1877 {
1878 if (outputs || inputs)
1879 {
1880 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1881 int nalternatives
1882 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1883 tree next = inputs;
1884
1885 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1886 {
1887 error ("too many alternatives in `asm'");
1888 return false;
1889 }
1890
1891 tmp = outputs;
1892 while (tmp)
1893 {
1894 const char *constraint
1895 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1896
1897 if (n_occurrences (',', constraint) != nalternatives)
1898 {
1899 error ("operand constraints for `asm' differ in number of alternatives");
1900 return false;
1901 }
1902
1903 if (TREE_CHAIN (tmp))
1904 tmp = TREE_CHAIN (tmp);
1905 else
1906 tmp = next, next = 0;
1907 }
1908 }
1909
1910 return true;
1911 }
1912
1913 /* A subroutine of expand_asm_operands. Check that all operand names
1914 are unique. Return true if so. We rely on the fact that these names
1915 are identifiers, and so have been canonicalized by get_identifier,
1916 so all we need are pointer comparisons. */
1917
1918 static bool
1919 check_unique_operand_names (outputs, inputs)
1920 tree outputs, inputs;
1921 {
1922 tree i, j;
1923
1924 for (i = outputs; i ; i = TREE_CHAIN (i))
1925 {
1926 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1927 if (! i_name)
1928 continue;
1929
1930 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1931 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1932 goto failure;
1933 }
1934
1935 for (i = inputs; i ; i = TREE_CHAIN (i))
1936 {
1937 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1938 if (! i_name)
1939 continue;
1940
1941 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1942 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1943 goto failure;
1944 for (j = outputs; j ; j = TREE_CHAIN (j))
1945 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1946 goto failure;
1947 }
1948
1949 return true;
1950
1951 failure:
1952 error ("duplicate asm operand name '%s'",
1953 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1954 return false;
1955 }
1956
1957 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1958 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1959 STRING and in the constraints to those numbers. */
1960
1961 static tree
1962 resolve_operand_names (string, outputs, inputs, pconstraints)
1963 tree string;
1964 tree outputs, inputs;
1965 const char **pconstraints;
1966 {
1967 char *buffer = xstrdup (TREE_STRING_POINTER (string));
1968 char *p;
1969 tree t;
1970
1971 /* Assume that we will not need extra space to perform the substitution.
1972 This because we get to remove '[' and ']', which means we cannot have
1973 a problem until we have more than 999 operands. */
1974
1975 p = buffer;
1976 while ((p = strchr (p, '%')) != NULL)
1977 {
1978 if (p[1] == '[')
1979 p += 1;
1980 else if (ISALPHA (p[1]) && p[2] == '[')
1981 p += 2;
1982 else
1983 {
1984 p += 1;
1985 continue;
1986 }
1987
1988 p = resolve_operand_name_1 (p, outputs, inputs);
1989 }
1990
1991 string = build_string (strlen (buffer), buffer);
1992 free (buffer);
1993
1994 /* Collect output constraints here because it's convenient.
1995 There should be no named operands here; this is verified
1996 in expand_asm_operand. */
1997 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
1998 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1999
2000 /* Substitute [<name>] in input constraint strings. */
2001 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2002 {
2003 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2004 if (strchr (c, '[') == NULL)
2005 *pconstraints = c;
2006 else
2007 {
2008 p = buffer = xstrdup (c);
2009 while ((p = strchr (p, '[')) != NULL)
2010 p = resolve_operand_name_1 (p, outputs, inputs);
2011
2012 *pconstraints = ggc_alloc_string (buffer, -1);
2013 free (buffer);
2014 }
2015 }
2016
2017 return string;
2018 }
2019
2020 /* A subroutine of resolve_operand_names. P points to the '[' for a
2021 potential named operand of the form [<name>]. In place, replace
2022 the name and brackets with a number. Return a pointer to the
2023 balance of the string after substitution. */
2024
2025 static char *
2026 resolve_operand_name_1 (p, outputs, inputs)
2027 char *p;
2028 tree outputs, inputs;
2029 {
2030 char *q;
2031 int op;
2032 tree t;
2033 size_t len;
2034
2035 /* Collect the operand name. */
2036 q = strchr (p, ']');
2037 if (!q)
2038 {
2039 error ("missing close brace for named operand");
2040 return strchr (p, '\0');
2041 }
2042 len = q - p - 1;
2043
2044 /* Resolve the name to a number. */
2045 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2046 {
2047 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2048 if (name)
2049 {
2050 const char *c = TREE_STRING_POINTER (name);
2051 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2052 goto found;
2053 }
2054 }
2055 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2056 {
2057 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2058 if (name)
2059 {
2060 const char *c = TREE_STRING_POINTER (name);
2061 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2062 goto found;
2063 }
2064 }
2065
2066 *q = '\0';
2067 error ("undefined named operand '%s'", p + 1);
2068 op = 0;
2069 found:
2070
2071 /* Replace the name with the number. Unfortunately, not all libraries
2072 get the return value of sprintf correct, so search for the end of the
2073 generated string by hand. */
2074 sprintf (p, "%d", op);
2075 p = strchr (p, '\0');
2076
2077 /* Verify the no extra buffer space assumption. */
2078 if (p > q)
2079 abort ();
2080
2081 /* Shift the rest of the buffer down to fill the gap. */
2082 memmove (p, q + 1, strlen (q + 1) + 1);
2083
2084 return p;
2085 }
2086 \f
2087 /* Generate RTL to evaluate the expression EXP
2088 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2089 Provided just for backward-compatibility. expand_expr_stmt_value()
2090 should be used for new code. */
2091
2092 void
2093 expand_expr_stmt (exp)
2094 tree exp;
2095 {
2096 expand_expr_stmt_value (exp, -1, 1);
2097 }
2098
2099 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2100 whether to (1) save the value of the expression, (0) discard it or
2101 (-1) use expr_stmts_for_value to tell. The use of -1 is
2102 deprecated, and retained only for backward compatibility. */
2103
2104 void
2105 expand_expr_stmt_value (exp, want_value, maybe_last)
2106 tree exp;
2107 int want_value, maybe_last;
2108 {
2109 rtx value;
2110 tree type;
2111
2112 if (want_value == -1)
2113 want_value = expr_stmts_for_value != 0;
2114
2115 /* If -W, warn about statements with no side effects,
2116 except for an explicit cast to void (e.g. for assert()), and
2117 except for last statement in ({...}) where they may be useful. */
2118 if (! want_value
2119 && (expr_stmts_for_value == 0 || ! maybe_last)
2120 && exp != error_mark_node)
2121 {
2122 if (! TREE_SIDE_EFFECTS (exp))
2123 {
2124 if ((extra_warnings || warn_unused_value)
2125 && !(TREE_CODE (exp) == CONVERT_EXPR
2126 && VOID_TYPE_P (TREE_TYPE (exp))))
2127 warning_with_file_and_line (emit_filename, emit_lineno,
2128 "statement with no effect");
2129 }
2130 else if (warn_unused_value)
2131 warn_if_unused_value (exp);
2132 }
2133
2134 /* If EXP is of function type and we are expanding statements for
2135 value, convert it to pointer-to-function. */
2136 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2137 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2138
2139 /* The call to `expand_expr' could cause last_expr_type and
2140 last_expr_value to get reset. Therefore, we set last_expr_value
2141 and last_expr_type *after* calling expand_expr. */
2142 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2143 VOIDmode, 0);
2144 type = TREE_TYPE (exp);
2145
2146 /* If all we do is reference a volatile value in memory,
2147 copy it to a register to be sure it is actually touched. */
2148 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2149 {
2150 if (TYPE_MODE (type) == VOIDmode)
2151 ;
2152 else if (TYPE_MODE (type) != BLKmode)
2153 value = copy_to_reg (value);
2154 else
2155 {
2156 rtx lab = gen_label_rtx ();
2157
2158 /* Compare the value with itself to reference it. */
2159 emit_cmp_and_jump_insns (value, value, EQ,
2160 expand_expr (TYPE_SIZE (type),
2161 NULL_RTX, VOIDmode, 0),
2162 BLKmode, 0, lab);
2163 emit_label (lab);
2164 }
2165 }
2166
2167 /* If this expression is part of a ({...}) and is in memory, we may have
2168 to preserve temporaries. */
2169 preserve_temp_slots (value);
2170
2171 /* Free any temporaries used to evaluate this expression. Any temporary
2172 used as a result of this expression will already have been preserved
2173 above. */
2174 free_temp_slots ();
2175
2176 if (want_value)
2177 {
2178 last_expr_value = value;
2179 last_expr_type = type;
2180 }
2181
2182 emit_queue ();
2183 }
2184
2185 /* Warn if EXP contains any computations whose results are not used.
2186 Return 1 if a warning is printed; 0 otherwise. */
2187
2188 int
2189 warn_if_unused_value (exp)
2190 tree exp;
2191 {
2192 if (TREE_USED (exp))
2193 return 0;
2194
2195 /* Don't warn about void constructs. This includes casting to void,
2196 void function calls, and statement expressions with a final cast
2197 to void. */
2198 if (VOID_TYPE_P (TREE_TYPE (exp)))
2199 return 0;
2200
2201 switch (TREE_CODE (exp))
2202 {
2203 case PREINCREMENT_EXPR:
2204 case POSTINCREMENT_EXPR:
2205 case PREDECREMENT_EXPR:
2206 case POSTDECREMENT_EXPR:
2207 case MODIFY_EXPR:
2208 case INIT_EXPR:
2209 case TARGET_EXPR:
2210 case CALL_EXPR:
2211 case METHOD_CALL_EXPR:
2212 case RTL_EXPR:
2213 case TRY_CATCH_EXPR:
2214 case WITH_CLEANUP_EXPR:
2215 case EXIT_EXPR:
2216 return 0;
2217
2218 case BIND_EXPR:
2219 /* For a binding, warn if no side effect within it. */
2220 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2221
2222 case SAVE_EXPR:
2223 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2224
2225 case TRUTH_ORIF_EXPR:
2226 case TRUTH_ANDIF_EXPR:
2227 /* In && or ||, warn if 2nd operand has no side effect. */
2228 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2229
2230 case COMPOUND_EXPR:
2231 if (TREE_NO_UNUSED_WARNING (exp))
2232 return 0;
2233 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2234 return 1;
2235 /* Let people do `(foo (), 0)' without a warning. */
2236 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2237 return 0;
2238 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2239
2240 case NOP_EXPR:
2241 case CONVERT_EXPR:
2242 case NON_LVALUE_EXPR:
2243 /* Don't warn about conversions not explicit in the user's program. */
2244 if (TREE_NO_UNUSED_WARNING (exp))
2245 return 0;
2246 /* Assignment to a cast usually results in a cast of a modify.
2247 Don't complain about that. There can be an arbitrary number of
2248 casts before the modify, so we must loop until we find the first
2249 non-cast expression and then test to see if that is a modify. */
2250 {
2251 tree tem = TREE_OPERAND (exp, 0);
2252
2253 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2254 tem = TREE_OPERAND (tem, 0);
2255
2256 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2257 || TREE_CODE (tem) == CALL_EXPR)
2258 return 0;
2259 }
2260 goto maybe_warn;
2261
2262 case INDIRECT_REF:
2263 /* Don't warn about automatic dereferencing of references, since
2264 the user cannot control it. */
2265 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2266 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2267 /* Fall through. */
2268
2269 default:
2270 /* Referencing a volatile value is a side effect, so don't warn. */
2271 if ((DECL_P (exp)
2272 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2273 && TREE_THIS_VOLATILE (exp))
2274 return 0;
2275
2276 /* If this is an expression which has no operands, there is no value
2277 to be unused. There are no such language-independent codes,
2278 but front ends may define such. */
2279 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2280 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2281 return 0;
2282
2283 maybe_warn:
2284 /* If this is an expression with side effects, don't warn. */
2285 if (TREE_SIDE_EFFECTS (exp))
2286 return 0;
2287
2288 warning_with_file_and_line (emit_filename, emit_lineno,
2289 "value computed is not used");
2290 return 1;
2291 }
2292 }
2293
2294 /* Clear out the memory of the last expression evaluated. */
2295
2296 void
2297 clear_last_expr ()
2298 {
2299 last_expr_type = NULL_TREE;
2300 last_expr_value = NULL_RTX;
2301 }
2302
2303 /* Begin a statement-expression, i.e., a series of statements which
2304 may return a value. Return the RTL_EXPR for this statement expr.
2305 The caller must save that value and pass it to
2306 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2307 in the statement-expression are deallocated at the end of the
2308 expression. */
2309
2310 tree
2311 expand_start_stmt_expr (has_scope)
2312 int has_scope;
2313 {
2314 tree t;
2315
2316 /* Make the RTL_EXPR node temporary, not momentary,
2317 so that rtl_expr_chain doesn't become garbage. */
2318 t = make_node (RTL_EXPR);
2319 do_pending_stack_adjust ();
2320 if (has_scope)
2321 start_sequence_for_rtl_expr (t);
2322 else
2323 start_sequence ();
2324 NO_DEFER_POP;
2325 expr_stmts_for_value++;
2326 return t;
2327 }
2328
2329 /* Restore the previous state at the end of a statement that returns a value.
2330 Returns a tree node representing the statement's value and the
2331 insns to compute the value.
2332
2333 The nodes of that expression have been freed by now, so we cannot use them.
2334 But we don't want to do that anyway; the expression has already been
2335 evaluated and now we just want to use the value. So generate a RTL_EXPR
2336 with the proper type and RTL value.
2337
2338 If the last substatement was not an expression,
2339 return something with type `void'. */
2340
2341 tree
2342 expand_end_stmt_expr (t)
2343 tree t;
2344 {
2345 OK_DEFER_POP;
2346
2347 if (! last_expr_value || ! last_expr_type)
2348 {
2349 last_expr_value = const0_rtx;
2350 last_expr_type = void_type_node;
2351 }
2352 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2353 /* Remove any possible QUEUED. */
2354 last_expr_value = protect_from_queue (last_expr_value, 0);
2355
2356 emit_queue ();
2357
2358 TREE_TYPE (t) = last_expr_type;
2359 RTL_EXPR_RTL (t) = last_expr_value;
2360 RTL_EXPR_SEQUENCE (t) = get_insns ();
2361
2362 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2363
2364 end_sequence ();
2365
2366 /* Don't consider deleting this expr or containing exprs at tree level. */
2367 TREE_SIDE_EFFECTS (t) = 1;
2368 /* Propagate volatility of the actual RTL expr. */
2369 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2370
2371 clear_last_expr ();
2372 expr_stmts_for_value--;
2373
2374 return t;
2375 }
2376 \f
2377 /* Generate RTL for the start of an if-then. COND is the expression
2378 whose truth should be tested.
2379
2380 If EXITFLAG is nonzero, this conditional is visible to
2381 `exit_something'. */
2382
2383 void
2384 expand_start_cond (cond, exitflag)
2385 tree cond;
2386 int exitflag;
2387 {
2388 struct nesting *thiscond = ALLOC_NESTING ();
2389
2390 /* Make an entry on cond_stack for the cond we are entering. */
2391
2392 thiscond->desc = COND_NESTING;
2393 thiscond->next = cond_stack;
2394 thiscond->all = nesting_stack;
2395 thiscond->depth = ++nesting_depth;
2396 thiscond->data.cond.next_label = gen_label_rtx ();
2397 /* Before we encounter an `else', we don't need a separate exit label
2398 unless there are supposed to be exit statements
2399 to exit this conditional. */
2400 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2401 thiscond->data.cond.endif_label = thiscond->exit_label;
2402 cond_stack = thiscond;
2403 nesting_stack = thiscond;
2404
2405 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2406 }
2407
2408 /* Generate RTL between then-clause and the elseif-clause
2409 of an if-then-elseif-.... */
2410
2411 void
2412 expand_start_elseif (cond)
2413 tree cond;
2414 {
2415 if (cond_stack->data.cond.endif_label == 0)
2416 cond_stack->data.cond.endif_label = gen_label_rtx ();
2417 emit_jump (cond_stack->data.cond.endif_label);
2418 emit_label (cond_stack->data.cond.next_label);
2419 cond_stack->data.cond.next_label = gen_label_rtx ();
2420 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2421 }
2422
2423 /* Generate RTL between the then-clause and the else-clause
2424 of an if-then-else. */
2425
2426 void
2427 expand_start_else ()
2428 {
2429 if (cond_stack->data.cond.endif_label == 0)
2430 cond_stack->data.cond.endif_label = gen_label_rtx ();
2431
2432 emit_jump (cond_stack->data.cond.endif_label);
2433 emit_label (cond_stack->data.cond.next_label);
2434 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2435 }
2436
2437 /* After calling expand_start_else, turn this "else" into an "else if"
2438 by providing another condition. */
2439
2440 void
2441 expand_elseif (cond)
2442 tree cond;
2443 {
2444 cond_stack->data.cond.next_label = gen_label_rtx ();
2445 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2446 }
2447
2448 /* Generate RTL for the end of an if-then.
2449 Pop the record for it off of cond_stack. */
2450
2451 void
2452 expand_end_cond ()
2453 {
2454 struct nesting *thiscond = cond_stack;
2455
2456 do_pending_stack_adjust ();
2457 if (thiscond->data.cond.next_label)
2458 emit_label (thiscond->data.cond.next_label);
2459 if (thiscond->data.cond.endif_label)
2460 emit_label (thiscond->data.cond.endif_label);
2461
2462 POPSTACK (cond_stack);
2463 clear_last_expr ();
2464 }
2465 \f
2466 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2467 loop should be exited by `exit_something'. This is a loop for which
2468 `expand_continue' will jump to the top of the loop.
2469
2470 Make an entry on loop_stack to record the labels associated with
2471 this loop. */
2472
2473 struct nesting *
2474 expand_start_loop (exit_flag)
2475 int exit_flag;
2476 {
2477 struct nesting *thisloop = ALLOC_NESTING ();
2478
2479 /* Make an entry on loop_stack for the loop we are entering. */
2480
2481 thisloop->desc = LOOP_NESTING;
2482 thisloop->next = loop_stack;
2483 thisloop->all = nesting_stack;
2484 thisloop->depth = ++nesting_depth;
2485 thisloop->data.loop.start_label = gen_label_rtx ();
2486 thisloop->data.loop.end_label = gen_label_rtx ();
2487 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2488 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2489 loop_stack = thisloop;
2490 nesting_stack = thisloop;
2491
2492 do_pending_stack_adjust ();
2493 emit_queue ();
2494 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2495 emit_label (thisloop->data.loop.start_label);
2496
2497 return thisloop;
2498 }
2499
2500 /* Like expand_start_loop but for a loop where the continuation point
2501 (for expand_continue_loop) will be specified explicitly. */
2502
2503 struct nesting *
2504 expand_start_loop_continue_elsewhere (exit_flag)
2505 int exit_flag;
2506 {
2507 struct nesting *thisloop = expand_start_loop (exit_flag);
2508 loop_stack->data.loop.continue_label = gen_label_rtx ();
2509 return thisloop;
2510 }
2511
2512 /* Begin a null, aka do { } while (0) "loop". But since the contents
2513 of said loop can still contain a break, we must frob the loop nest. */
2514
2515 struct nesting *
2516 expand_start_null_loop ()
2517 {
2518 struct nesting *thisloop = ALLOC_NESTING ();
2519
2520 /* Make an entry on loop_stack for the loop we are entering. */
2521
2522 thisloop->desc = LOOP_NESTING;
2523 thisloop->next = loop_stack;
2524 thisloop->all = nesting_stack;
2525 thisloop->depth = ++nesting_depth;
2526 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2527 thisloop->data.loop.end_label = gen_label_rtx ();
2528 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2529 thisloop->exit_label = thisloop->data.loop.end_label;
2530 loop_stack = thisloop;
2531 nesting_stack = thisloop;
2532
2533 return thisloop;
2534 }
2535
2536 /* Specify the continuation point for a loop started with
2537 expand_start_loop_continue_elsewhere.
2538 Use this at the point in the code to which a continue statement
2539 should jump. */
2540
2541 void
2542 expand_loop_continue_here ()
2543 {
2544 do_pending_stack_adjust ();
2545 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2546 emit_label (loop_stack->data.loop.continue_label);
2547 }
2548
2549 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2550 Pop the block off of loop_stack. */
2551
2552 void
2553 expand_end_loop ()
2554 {
2555 rtx start_label = loop_stack->data.loop.start_label;
2556 rtx etc_note;
2557 int eh_regions, debug_blocks;
2558 bool empty_test;
2559
2560 /* Mark the continue-point at the top of the loop if none elsewhere. */
2561 if (start_label == loop_stack->data.loop.continue_label)
2562 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2563
2564 do_pending_stack_adjust ();
2565
2566 /* If the loop starts with a loop exit, roll that to the end where
2567 it will optimize together with the jump back.
2568
2569 If the loop presently looks like this (in pseudo-C):
2570
2571 LOOP_BEG
2572 start_label:
2573 if (test) goto end_label;
2574 LOOP_END_TOP_COND
2575 body;
2576 goto start_label;
2577 end_label:
2578
2579 transform it to look like:
2580
2581 LOOP_BEG
2582 goto start_label;
2583 top_label:
2584 body;
2585 start_label:
2586 if (test) goto end_label;
2587 goto top_label;
2588 end_label:
2589
2590 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2591 the end of the entry condtional. Without this, our lexical scan
2592 can't tell the difference between an entry conditional and a
2593 body conditional that exits the loop. Mistaking the two means
2594 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2595 screw up loop unrolling.
2596
2597 Things will be oh so much better when loop optimization is done
2598 off of a proper control flow graph... */
2599
2600 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2601
2602 empty_test = true;
2603 eh_regions = debug_blocks = 0;
2604 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2605 if (GET_CODE (etc_note) == NOTE)
2606 {
2607 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2608 break;
2609
2610 /* We must not walk into a nested loop. */
2611 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2612 {
2613 etc_note = NULL_RTX;
2614 break;
2615 }
2616
2617 /* At the same time, scan for EH region notes, as we don't want
2618 to scrog region nesting. This shouldn't happen, but... */
2619 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2620 eh_regions++;
2621 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2622 {
2623 if (--eh_regions < 0)
2624 /* We've come to the end of an EH region, but never saw the
2625 beginning of that region. That means that an EH region
2626 begins before the top of the loop, and ends in the middle
2627 of it. The existence of such a situation violates a basic
2628 assumption in this code, since that would imply that even
2629 when EH_REGIONS is zero, we might move code out of an
2630 exception region. */
2631 abort ();
2632 }
2633
2634 /* Likewise for debug scopes. In this case we'll either (1) move
2635 all of the notes if they are properly nested or (2) leave the
2636 notes alone and only rotate the loop at high optimization
2637 levels when we expect to scrog debug info. */
2638 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2639 debug_blocks++;
2640 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2641 debug_blocks--;
2642 }
2643 else if (INSN_P (etc_note))
2644 empty_test = false;
2645
2646 if (etc_note
2647 && optimize
2648 && ! empty_test
2649 && eh_regions == 0
2650 && (debug_blocks == 0 || optimize >= 2)
2651 && NEXT_INSN (etc_note) != NULL_RTX
2652 && ! any_condjump_p (get_last_insn ()))
2653 {
2654 /* We found one. Move everything from START to ETC to the end
2655 of the loop, and add a jump from the top of the loop. */
2656 rtx top_label = gen_label_rtx ();
2657 rtx start_move = start_label;
2658
2659 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2660 then we want to move this note also. */
2661 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2662 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2663 start_move = PREV_INSN (start_move);
2664
2665 emit_label_before (top_label, start_move);
2666
2667 /* Actually move the insns. If the debug scopes are nested, we
2668 can move everything at once. Otherwise we have to move them
2669 one by one and squeeze out the block notes. */
2670 if (debug_blocks == 0)
2671 reorder_insns (start_move, etc_note, get_last_insn ());
2672 else
2673 {
2674 rtx insn, next_insn;
2675 for (insn = start_move; insn; insn = next_insn)
2676 {
2677 /* Figure out which insn comes after this one. We have
2678 to do this before we move INSN. */
2679 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2680
2681 if (GET_CODE (insn) == NOTE
2682 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2683 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2684 continue;
2685
2686 reorder_insns (insn, insn, get_last_insn ());
2687 }
2688 }
2689
2690 /* Add the jump from the top of the loop. */
2691 emit_jump_insn_before (gen_jump (start_label), top_label);
2692 emit_barrier_before (top_label);
2693 start_label = top_label;
2694 }
2695
2696 emit_jump (start_label);
2697 emit_note (NULL, NOTE_INSN_LOOP_END);
2698 emit_label (loop_stack->data.loop.end_label);
2699
2700 POPSTACK (loop_stack);
2701
2702 clear_last_expr ();
2703 }
2704
2705 /* Finish a null loop, aka do { } while (0). */
2706
2707 void
2708 expand_end_null_loop ()
2709 {
2710 do_pending_stack_adjust ();
2711 emit_label (loop_stack->data.loop.end_label);
2712
2713 POPSTACK (loop_stack);
2714
2715 clear_last_expr ();
2716 }
2717
2718 /* Generate a jump to the current loop's continue-point.
2719 This is usually the top of the loop, but may be specified
2720 explicitly elsewhere. If not currently inside a loop,
2721 return 0 and do nothing; caller will print an error message. */
2722
2723 int
2724 expand_continue_loop (whichloop)
2725 struct nesting *whichloop;
2726 {
2727 /* Emit information for branch prediction. */
2728 rtx note;
2729
2730 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2731 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2732 clear_last_expr ();
2733 if (whichloop == 0)
2734 whichloop = loop_stack;
2735 if (whichloop == 0)
2736 return 0;
2737 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2738 NULL_RTX);
2739 return 1;
2740 }
2741
2742 /* Generate a jump to exit the current loop. If not currently inside a loop,
2743 return 0 and do nothing; caller will print an error message. */
2744
2745 int
2746 expand_exit_loop (whichloop)
2747 struct nesting *whichloop;
2748 {
2749 clear_last_expr ();
2750 if (whichloop == 0)
2751 whichloop = loop_stack;
2752 if (whichloop == 0)
2753 return 0;
2754 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2755 return 1;
2756 }
2757
2758 /* Generate a conditional jump to exit the current loop if COND
2759 evaluates to zero. If not currently inside a loop,
2760 return 0 and do nothing; caller will print an error message. */
2761
2762 int
2763 expand_exit_loop_if_false (whichloop, cond)
2764 struct nesting *whichloop;
2765 tree cond;
2766 {
2767 rtx label;
2768 clear_last_expr ();
2769
2770 if (whichloop == 0)
2771 whichloop = loop_stack;
2772 if (whichloop == 0)
2773 return 0;
2774
2775 if (integer_nonzerop (cond))
2776 return 1;
2777 if (integer_zerop (cond))
2778 return expand_exit_loop (whichloop);
2779
2780 /* Check if we definitely won't need a fixup. */
2781 if (whichloop == nesting_stack)
2782 {
2783 jumpifnot (cond, whichloop->data.loop.end_label);
2784 return 1;
2785 }
2786
2787 /* In order to handle fixups, we actually create a conditional jump
2788 around an unconditional branch to exit the loop. If fixups are
2789 necessary, they go before the unconditional branch. */
2790
2791 label = gen_label_rtx ();
2792 jumpif (cond, label);
2793 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2794 NULL_RTX);
2795 emit_label (label);
2796
2797 return 1;
2798 }
2799
2800 /* Like expand_exit_loop_if_false except also emit a note marking
2801 the end of the conditional. Should only be used immediately
2802 after expand_loop_start. */
2803
2804 int
2805 expand_exit_loop_top_cond (whichloop, cond)
2806 struct nesting *whichloop;
2807 tree cond;
2808 {
2809 if (! expand_exit_loop_if_false (whichloop, cond))
2810 return 0;
2811
2812 emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2813 return 1;
2814 }
2815
2816 /* Return nonzero if we should preserve sub-expressions as separate
2817 pseudos. We never do so if we aren't optimizing. We always do so
2818 if -fexpensive-optimizations.
2819
2820 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2821 the loop may still be a small one. */
2822
2823 int
2824 preserve_subexpressions_p ()
2825 {
2826 rtx insn;
2827
2828 if (flag_expensive_optimizations)
2829 return 1;
2830
2831 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2832 return 0;
2833
2834 insn = get_last_insn_anywhere ();
2835
2836 return (insn
2837 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2838 < n_non_fixed_regs * 3));
2839
2840 }
2841
2842 /* Generate a jump to exit the current loop, conditional, binding contour
2843 or case statement. Not all such constructs are visible to this function,
2844 only those started with EXIT_FLAG nonzero. Individual languages use
2845 the EXIT_FLAG parameter to control which kinds of constructs you can
2846 exit this way.
2847
2848 If not currently inside anything that can be exited,
2849 return 0 and do nothing; caller will print an error message. */
2850
2851 int
2852 expand_exit_something ()
2853 {
2854 struct nesting *n;
2855 clear_last_expr ();
2856 for (n = nesting_stack; n; n = n->all)
2857 if (n->exit_label != 0)
2858 {
2859 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2860 return 1;
2861 }
2862
2863 return 0;
2864 }
2865 \f
2866 /* Generate RTL to return from the current function, with no value.
2867 (That is, we do not do anything about returning any value.) */
2868
2869 void
2870 expand_null_return ()
2871 {
2872 rtx last_insn;
2873
2874 last_insn = get_last_insn ();
2875
2876 /* If this function was declared to return a value, but we
2877 didn't, clobber the return registers so that they are not
2878 propagated live to the rest of the function. */
2879 clobber_return_register ();
2880
2881 expand_null_return_1 (last_insn);
2882 }
2883
2884 /* Try to guess whether the value of return means error code. */
2885 static enum br_predictor
2886 return_prediction (val)
2887 rtx val;
2888 {
2889 /* Different heuristics for pointers and scalars. */
2890 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2891 {
2892 /* NULL is usually not returned. */
2893 if (val == const0_rtx)
2894 return PRED_NULL_RETURN;
2895 }
2896 else
2897 {
2898 /* Negative return values are often used to indicate
2899 errors. */
2900 if (GET_CODE (val) == CONST_INT
2901 && INTVAL (val) < 0)
2902 return PRED_NEGATIVE_RETURN;
2903 /* Constant return values are also usually erors,
2904 zero/one often mean booleans so exclude them from the
2905 heuristics. */
2906 if (CONSTANT_P (val)
2907 && (val != const0_rtx && val != const1_rtx))
2908 return PRED_CONST_RETURN;
2909 }
2910 return PRED_NO_PREDICTION;
2911 }
2912
2913 /* Generate RTL to return from the current function, with value VAL. */
2914
2915 static void
2916 expand_value_return (val)
2917 rtx val;
2918 {
2919 rtx last_insn;
2920 rtx return_reg;
2921 enum br_predictor pred;
2922
2923 if ((pred = return_prediction (val)) != PRED_NO_PREDICTION)
2924 {
2925 /* Emit information for branch prediction. */
2926 rtx note;
2927
2928 note = emit_note (NULL, NOTE_INSN_PREDICTION);
2929
2930 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2931
2932 }
2933
2934 last_insn = get_last_insn ();
2935 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2936
2937 /* Copy the value to the return location
2938 unless it's already there. */
2939
2940 if (return_reg != val)
2941 {
2942 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2943 #ifdef PROMOTE_FUNCTION_RETURN
2944 int unsignedp = TREE_UNSIGNED (type);
2945 enum machine_mode old_mode
2946 = DECL_MODE (DECL_RESULT (current_function_decl));
2947 enum machine_mode mode
2948 = promote_mode (type, old_mode, &unsignedp, 1);
2949
2950 if (mode != old_mode)
2951 val = convert_modes (mode, old_mode, val, unsignedp);
2952 #endif
2953 if (GET_CODE (return_reg) == PARALLEL)
2954 emit_group_load (return_reg, val, int_size_in_bytes (type));
2955 else
2956 emit_move_insn (return_reg, val);
2957 }
2958
2959 expand_null_return_1 (last_insn);
2960 }
2961
2962 /* Output a return with no value. If LAST_INSN is nonzero,
2963 pretend that the return takes place after LAST_INSN. */
2964
2965 static void
2966 expand_null_return_1 (last_insn)
2967 rtx last_insn;
2968 {
2969 rtx end_label = cleanup_label ? cleanup_label : return_label;
2970
2971 clear_pending_stack_adjust ();
2972 do_pending_stack_adjust ();
2973 clear_last_expr ();
2974
2975 if (end_label == 0)
2976 end_label = return_label = gen_label_rtx ();
2977 expand_goto_internal (NULL_TREE, end_label, last_insn);
2978 }
2979 \f
2980 /* Generate RTL to evaluate the expression RETVAL and return it
2981 from the current function. */
2982
2983 void
2984 expand_return (retval)
2985 tree retval;
2986 {
2987 /* If there are any cleanups to be performed, then they will
2988 be inserted following LAST_INSN. It is desirable
2989 that the last_insn, for such purposes, should be the
2990 last insn before computing the return value. Otherwise, cleanups
2991 which call functions can clobber the return value. */
2992 /* ??? rms: I think that is erroneous, because in C++ it would
2993 run destructors on variables that might be used in the subsequent
2994 computation of the return value. */
2995 rtx last_insn = 0;
2996 rtx result_rtl;
2997 rtx val = 0;
2998 tree retval_rhs;
2999
3000 /* If function wants no value, give it none. */
3001 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3002 {
3003 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3004 emit_queue ();
3005 expand_null_return ();
3006 return;
3007 }
3008
3009 if (retval == error_mark_node)
3010 {
3011 /* Treat this like a return of no value from a function that
3012 returns a value. */
3013 expand_null_return ();
3014 return;
3015 }
3016 else if (TREE_CODE (retval) == RESULT_DECL)
3017 retval_rhs = retval;
3018 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3019 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3020 retval_rhs = TREE_OPERAND (retval, 1);
3021 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3022 /* Recognize tail-recursive call to void function. */
3023 retval_rhs = retval;
3024 else
3025 retval_rhs = NULL_TREE;
3026
3027 last_insn = get_last_insn ();
3028
3029 /* Distribute return down conditional expr if either of the sides
3030 may involve tail recursion (see test below). This enhances the number
3031 of tail recursions we see. Don't do this always since it can produce
3032 sub-optimal code in some cases and we distribute assignments into
3033 conditional expressions when it would help. */
3034
3035 if (optimize && retval_rhs != 0
3036 && frame_offset == 0
3037 && TREE_CODE (retval_rhs) == COND_EXPR
3038 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3039 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3040 {
3041 rtx label = gen_label_rtx ();
3042 tree expr;
3043
3044 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3045 start_cleanup_deferral ();
3046 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3047 DECL_RESULT (current_function_decl),
3048 TREE_OPERAND (retval_rhs, 1));
3049 TREE_SIDE_EFFECTS (expr) = 1;
3050 expand_return (expr);
3051 emit_label (label);
3052
3053 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3054 DECL_RESULT (current_function_decl),
3055 TREE_OPERAND (retval_rhs, 2));
3056 TREE_SIDE_EFFECTS (expr) = 1;
3057 expand_return (expr);
3058 end_cleanup_deferral ();
3059 return;
3060 }
3061
3062 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3063
3064 /* If the result is an aggregate that is being returned in one (or more)
3065 registers, load the registers here. The compiler currently can't handle
3066 copying a BLKmode value into registers. We could put this code in a
3067 more general area (for use by everyone instead of just function
3068 call/return), but until this feature is generally usable it is kept here
3069 (and in expand_call). The value must go into a pseudo in case there
3070 are cleanups that will clobber the real return register. */
3071
3072 if (retval_rhs != 0
3073 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3074 && GET_CODE (result_rtl) == REG)
3075 {
3076 int i;
3077 unsigned HOST_WIDE_INT bitpos, xbitpos;
3078 unsigned HOST_WIDE_INT big_endian_correction = 0;
3079 unsigned HOST_WIDE_INT bytes
3080 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3081 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3082 unsigned int bitsize
3083 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3084 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3085 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3086 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3087 enum machine_mode tmpmode, result_reg_mode;
3088
3089 if (bytes == 0)
3090 {
3091 expand_null_return ();
3092 return;
3093 }
3094
3095 /* Structures whose size is not a multiple of a word are aligned
3096 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3097 machine, this means we must skip the empty high order bytes when
3098 calculating the bit offset. */
3099 if (BYTES_BIG_ENDIAN
3100 && bytes % UNITS_PER_WORD)
3101 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3102 * BITS_PER_UNIT));
3103
3104 /* Copy the structure BITSIZE bits at a time. */
3105 for (bitpos = 0, xbitpos = big_endian_correction;
3106 bitpos < bytes * BITS_PER_UNIT;
3107 bitpos += bitsize, xbitpos += bitsize)
3108 {
3109 /* We need a new destination pseudo each time xbitpos is
3110 on a word boundary and when xbitpos == big_endian_correction
3111 (the first time through). */
3112 if (xbitpos % BITS_PER_WORD == 0
3113 || xbitpos == big_endian_correction)
3114 {
3115 /* Generate an appropriate register. */
3116 dst = gen_reg_rtx (word_mode);
3117 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3118
3119 /* Clear the destination before we move anything into it. */
3120 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3121 }
3122
3123 /* We need a new source operand each time bitpos is on a word
3124 boundary. */
3125 if (bitpos % BITS_PER_WORD == 0)
3126 src = operand_subword_force (result_val,
3127 bitpos / BITS_PER_WORD,
3128 BLKmode);
3129
3130 /* Use bitpos for the source extraction (left justified) and
3131 xbitpos for the destination store (right justified). */
3132 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3133 extract_bit_field (src, bitsize,
3134 bitpos % BITS_PER_WORD, 1,
3135 NULL_RTX, word_mode, word_mode,
3136 BITS_PER_WORD),
3137 BITS_PER_WORD);
3138 }
3139
3140 /* Find the smallest integer mode large enough to hold the
3141 entire structure and use that mode instead of BLKmode
3142 on the USE insn for the return register. */
3143 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3144 tmpmode != VOIDmode;
3145 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3146 /* Have we found a large enough mode? */
3147 if (GET_MODE_SIZE (tmpmode) >= bytes)
3148 break;
3149
3150 /* No suitable mode found. */
3151 if (tmpmode == VOIDmode)
3152 abort ();
3153
3154 PUT_MODE (result_rtl, tmpmode);
3155
3156 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3157 result_reg_mode = word_mode;
3158 else
3159 result_reg_mode = tmpmode;
3160 result_reg = gen_reg_rtx (result_reg_mode);
3161
3162 emit_queue ();
3163 for (i = 0; i < n_regs; i++)
3164 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3165 result_pseudos[i]);
3166
3167 if (tmpmode != result_reg_mode)
3168 result_reg = gen_lowpart (tmpmode, result_reg);
3169
3170 expand_value_return (result_reg);
3171 }
3172 else if (retval_rhs != 0
3173 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3174 && (GET_CODE (result_rtl) == REG
3175 || (GET_CODE (result_rtl) == PARALLEL)))
3176 {
3177 /* Calculate the return value into a temporary (usually a pseudo
3178 reg). */
3179 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3180 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3181
3182 val = assign_temp (nt, 0, 0, 1);
3183 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3184 val = force_not_mem (val);
3185 emit_queue ();
3186 /* Return the calculated value, doing cleanups first. */
3187 expand_value_return (val);
3188 }
3189 else
3190 {
3191 /* No cleanups or no hard reg used;
3192 calculate value into hard return reg. */
3193 expand_expr (retval, const0_rtx, VOIDmode, 0);
3194 emit_queue ();
3195 expand_value_return (result_rtl);
3196 }
3197 }
3198 \f
3199 /* Attempt to optimize a potential tail recursion call into a goto.
3200 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3201 where to place the jump to the tail recursion label.
3202
3203 Return TRUE if the call was optimized into a goto. */
3204
3205 int
3206 optimize_tail_recursion (arguments, last_insn)
3207 tree arguments;
3208 rtx last_insn;
3209 {
3210 /* Finish checking validity, and if valid emit code to set the
3211 argument variables for the new call. */
3212 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3213 {
3214 if (tail_recursion_label == 0)
3215 {
3216 tail_recursion_label = gen_label_rtx ();
3217 emit_label_after (tail_recursion_label,
3218 tail_recursion_reentry);
3219 }
3220 emit_queue ();
3221 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3222 emit_barrier ();
3223 return 1;
3224 }
3225 return 0;
3226 }
3227
3228 /* Emit code to alter this function's formal parms for a tail-recursive call.
3229 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3230 FORMALS is the chain of decls of formals.
3231 Return 1 if this can be done;
3232 otherwise return 0 and do not emit any code. */
3233
3234 static int
3235 tail_recursion_args (actuals, formals)
3236 tree actuals, formals;
3237 {
3238 tree a = actuals, f = formals;
3239 int i;
3240 rtx *argvec;
3241
3242 /* Check that number and types of actuals are compatible
3243 with the formals. This is not always true in valid C code.
3244 Also check that no formal needs to be addressable
3245 and that all formals are scalars. */
3246
3247 /* Also count the args. */
3248
3249 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3250 {
3251 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3252 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3253 return 0;
3254 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3255 return 0;
3256 }
3257 if (a != 0 || f != 0)
3258 return 0;
3259
3260 /* Compute all the actuals. */
3261
3262 argvec = (rtx *) alloca (i * sizeof (rtx));
3263
3264 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3265 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3266
3267 /* Find which actual values refer to current values of previous formals.
3268 Copy each of them now, before any formal is changed. */
3269
3270 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3271 {
3272 int copy = 0;
3273 int j;
3274 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3275 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3276 {
3277 copy = 1;
3278 break;
3279 }
3280 if (copy)
3281 argvec[i] = copy_to_reg (argvec[i]);
3282 }
3283
3284 /* Store the values of the actuals into the formals. */
3285
3286 for (f = formals, a = actuals, i = 0; f;
3287 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3288 {
3289 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3290 emit_move_insn (DECL_RTL (f), argvec[i]);
3291 else
3292 {
3293 rtx tmp = argvec[i];
3294
3295 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3296 {
3297 tmp = gen_reg_rtx (DECL_MODE (f));
3298 convert_move (tmp, argvec[i],
3299 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3300 }
3301 convert_move (DECL_RTL (f), tmp,
3302 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3303 }
3304 }
3305
3306 free_temp_slots ();
3307 return 1;
3308 }
3309 \f
3310 /* Generate the RTL code for entering a binding contour.
3311 The variables are declared one by one, by calls to `expand_decl'.
3312
3313 FLAGS is a bitwise or of the following flags:
3314
3315 1 - Nonzero if this construct should be visible to
3316 `exit_something'.
3317
3318 2 - Nonzero if this contour does not require a
3319 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3320 language-independent code should set this flag because they
3321 will not create corresponding BLOCK nodes. (There should be
3322 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3323 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3324 when expand_end_bindings is called.
3325
3326 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3327 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3328 note. */
3329
3330 void
3331 expand_start_bindings_and_block (flags, block)
3332 int flags;
3333 tree block;
3334 {
3335 struct nesting *thisblock = ALLOC_NESTING ();
3336 rtx note;
3337 int exit_flag = ((flags & 1) != 0);
3338 int block_flag = ((flags & 2) == 0);
3339
3340 /* If a BLOCK is supplied, then the caller should be requesting a
3341 NOTE_INSN_BLOCK_BEG note. */
3342 if (!block_flag && block)
3343 abort ();
3344
3345 /* Create a note to mark the beginning of the block. */
3346 if (block_flag)
3347 {
3348 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3349 NOTE_BLOCK (note) = block;
3350 }
3351 else
3352 note = emit_note (NULL, NOTE_INSN_DELETED);
3353
3354 /* Make an entry on block_stack for the block we are entering. */
3355
3356 thisblock->desc = BLOCK_NESTING;
3357 thisblock->next = block_stack;
3358 thisblock->all = nesting_stack;
3359 thisblock->depth = ++nesting_depth;
3360 thisblock->data.block.stack_level = 0;
3361 thisblock->data.block.cleanups = 0;
3362 thisblock->data.block.n_function_calls = 0;
3363 thisblock->data.block.exception_region = 0;
3364 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3365
3366 thisblock->data.block.conditional_code = 0;
3367 thisblock->data.block.last_unconditional_cleanup = note;
3368 /* When we insert instructions after the last unconditional cleanup,
3369 we don't adjust last_insn. That means that a later add_insn will
3370 clobber the instructions we've just added. The easiest way to
3371 fix this is to just insert another instruction here, so that the
3372 instructions inserted after the last unconditional cleanup are
3373 never the last instruction. */
3374 emit_note (NULL, NOTE_INSN_DELETED);
3375
3376 if (block_stack
3377 && !(block_stack->data.block.cleanups == NULL_TREE
3378 && block_stack->data.block.outer_cleanups == NULL_TREE))
3379 thisblock->data.block.outer_cleanups
3380 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3381 block_stack->data.block.outer_cleanups);
3382 else
3383 thisblock->data.block.outer_cleanups = 0;
3384 thisblock->data.block.label_chain = 0;
3385 thisblock->data.block.innermost_stack_block = stack_block_stack;
3386 thisblock->data.block.first_insn = note;
3387 thisblock->data.block.block_start_count = ++current_block_start_count;
3388 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3389 block_stack = thisblock;
3390 nesting_stack = thisblock;
3391
3392 /* Make a new level for allocating stack slots. */
3393 push_temp_slots ();
3394 }
3395
3396 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3397 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3398 expand_expr are made. After we end the region, we know that all
3399 space for all temporaries that were created by TARGET_EXPRs will be
3400 destroyed and their space freed for reuse. */
3401
3402 void
3403 expand_start_target_temps ()
3404 {
3405 /* This is so that even if the result is preserved, the space
3406 allocated will be freed, as we know that it is no longer in use. */
3407 push_temp_slots ();
3408
3409 /* Start a new binding layer that will keep track of all cleanup
3410 actions to be performed. */
3411 expand_start_bindings (2);
3412
3413 target_temp_slot_level = temp_slot_level;
3414 }
3415
3416 void
3417 expand_end_target_temps ()
3418 {
3419 expand_end_bindings (NULL_TREE, 0, 0);
3420
3421 /* This is so that even if the result is preserved, the space
3422 allocated will be freed, as we know that it is no longer in use. */
3423 pop_temp_slots ();
3424 }
3425
3426 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3427 in question represents the outermost pair of curly braces (i.e. the "body
3428 block") of a function or method.
3429
3430 For any BLOCK node representing a "body block" of a function or method, the
3431 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3432 represents the outermost (function) scope for the function or method (i.e.
3433 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3434 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3435
3436 int
3437 is_body_block (stmt)
3438 tree stmt;
3439 {
3440 if (TREE_CODE (stmt) == BLOCK)
3441 {
3442 tree parent = BLOCK_SUPERCONTEXT (stmt);
3443
3444 if (parent && TREE_CODE (parent) == BLOCK)
3445 {
3446 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3447
3448 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3449 return 1;
3450 }
3451 }
3452
3453 return 0;
3454 }
3455
3456 /* True if we are currently emitting insns in an area of output code
3457 that is controlled by a conditional expression. This is used by
3458 the cleanup handling code to generate conditional cleanup actions. */
3459
3460 int
3461 conditional_context ()
3462 {
3463 return block_stack && block_stack->data.block.conditional_code;
3464 }
3465
3466 /* Return an opaque pointer to the current nesting level, so frontend code
3467 can check its own sanity. */
3468
3469 struct nesting *
3470 current_nesting_level ()
3471 {
3472 return cfun ? block_stack : 0;
3473 }
3474
3475 /* Emit a handler label for a nonlocal goto handler.
3476 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3477
3478 static rtx
3479 expand_nl_handler_label (slot, before_insn)
3480 rtx slot, before_insn;
3481 {
3482 rtx insns;
3483 rtx handler_label = gen_label_rtx ();
3484
3485 /* Don't let cleanup_cfg delete the handler. */
3486 LABEL_PRESERVE_P (handler_label) = 1;
3487
3488 start_sequence ();
3489 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3490 insns = get_insns ();
3491 end_sequence ();
3492 emit_insn_before (insns, before_insn);
3493
3494 emit_label (handler_label);
3495
3496 return handler_label;
3497 }
3498
3499 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3500 handler. */
3501 static void
3502 expand_nl_goto_receiver ()
3503 {
3504 #ifdef HAVE_nonlocal_goto
3505 if (! HAVE_nonlocal_goto)
3506 #endif
3507 /* First adjust our frame pointer to its actual value. It was
3508 previously set to the start of the virtual area corresponding to
3509 the stacked variables when we branched here and now needs to be
3510 adjusted to the actual hardware fp value.
3511
3512 Assignments are to virtual registers are converted by
3513 instantiate_virtual_regs into the corresponding assignment
3514 to the underlying register (fp in this case) that makes
3515 the original assignment true.
3516 So the following insn will actually be
3517 decrementing fp by STARTING_FRAME_OFFSET. */
3518 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3519
3520 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3521 if (fixed_regs[ARG_POINTER_REGNUM])
3522 {
3523 #ifdef ELIMINABLE_REGS
3524 /* If the argument pointer can be eliminated in favor of the
3525 frame pointer, we don't need to restore it. We assume here
3526 that if such an elimination is present, it can always be used.
3527 This is the case on all known machines; if we don't make this
3528 assumption, we do unnecessary saving on many machines. */
3529 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3530 size_t i;
3531
3532 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3533 if (elim_regs[i].from == ARG_POINTER_REGNUM
3534 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3535 break;
3536
3537 if (i == ARRAY_SIZE (elim_regs))
3538 #endif
3539 {
3540 /* Now restore our arg pointer from the address at which it
3541 was saved in our stack frame. */
3542 emit_move_insn (virtual_incoming_args_rtx,
3543 copy_to_reg (get_arg_pointer_save_area (cfun)));
3544 }
3545 }
3546 #endif
3547
3548 #ifdef HAVE_nonlocal_goto_receiver
3549 if (HAVE_nonlocal_goto_receiver)
3550 emit_insn (gen_nonlocal_goto_receiver ());
3551 #endif
3552 }
3553
3554 /* Make handlers for nonlocal gotos taking place in the function calls in
3555 block THISBLOCK. */
3556
3557 static void
3558 expand_nl_goto_receivers (thisblock)
3559 struct nesting *thisblock;
3560 {
3561 tree link;
3562 rtx afterward = gen_label_rtx ();
3563 rtx insns, slot;
3564 rtx label_list;
3565 int any_invalid;
3566
3567 /* Record the handler address in the stack slot for that purpose,
3568 during this block, saving and restoring the outer value. */
3569 if (thisblock->next != 0)
3570 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3571 {
3572 rtx save_receiver = gen_reg_rtx (Pmode);
3573 emit_move_insn (XEXP (slot, 0), save_receiver);
3574
3575 start_sequence ();
3576 emit_move_insn (save_receiver, XEXP (slot, 0));
3577 insns = get_insns ();
3578 end_sequence ();
3579 emit_insn_before (insns, thisblock->data.block.first_insn);
3580 }
3581
3582 /* Jump around the handlers; they run only when specially invoked. */
3583 emit_jump (afterward);
3584
3585 /* Make a separate handler for each label. */
3586 link = nonlocal_labels;
3587 slot = nonlocal_goto_handler_slots;
3588 label_list = NULL_RTX;
3589 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3590 /* Skip any labels we shouldn't be able to jump to from here,
3591 we generate one special handler for all of them below which just calls
3592 abort. */
3593 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3594 {
3595 rtx lab;
3596 lab = expand_nl_handler_label (XEXP (slot, 0),
3597 thisblock->data.block.first_insn);
3598 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3599
3600 expand_nl_goto_receiver ();
3601
3602 /* Jump to the "real" nonlocal label. */
3603 expand_goto (TREE_VALUE (link));
3604 }
3605
3606 /* A second pass over all nonlocal labels; this time we handle those
3607 we should not be able to jump to at this point. */
3608 link = nonlocal_labels;
3609 slot = nonlocal_goto_handler_slots;
3610 any_invalid = 0;
3611 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3612 if (DECL_TOO_LATE (TREE_VALUE (link)))
3613 {
3614 rtx lab;
3615 lab = expand_nl_handler_label (XEXP (slot, 0),
3616 thisblock->data.block.first_insn);
3617 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3618 any_invalid = 1;
3619 }
3620
3621 if (any_invalid)
3622 {
3623 expand_nl_goto_receiver ();
3624 expand_builtin_trap ();
3625 }
3626
3627 nonlocal_goto_handler_labels = label_list;
3628 emit_label (afterward);
3629 }
3630
3631 /* Warn about any unused VARS (which may contain nodes other than
3632 VAR_DECLs, but such nodes are ignored). The nodes are connected
3633 via the TREE_CHAIN field. */
3634
3635 void
3636 warn_about_unused_variables (vars)
3637 tree vars;
3638 {
3639 tree decl;
3640
3641 if (warn_unused_variable)
3642 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3643 if (TREE_CODE (decl) == VAR_DECL
3644 && ! TREE_USED (decl)
3645 && ! DECL_IN_SYSTEM_HEADER (decl)
3646 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3647 warning_with_decl (decl, "unused variable `%s'");
3648 }
3649
3650 /* Generate RTL code to terminate a binding contour.
3651
3652 VARS is the chain of VAR_DECL nodes for the variables bound in this
3653 contour. There may actually be other nodes in this chain, but any
3654 nodes other than VAR_DECLS are ignored.
3655
3656 MARK_ENDS is nonzero if we should put a note at the beginning
3657 and end of this binding contour.
3658
3659 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3660 (That is true automatically if the contour has a saved stack level.) */
3661
3662 void
3663 expand_end_bindings (vars, mark_ends, dont_jump_in)
3664 tree vars;
3665 int mark_ends;
3666 int dont_jump_in;
3667 {
3668 struct nesting *thisblock = block_stack;
3669
3670 /* If any of the variables in this scope were not used, warn the
3671 user. */
3672 warn_about_unused_variables (vars);
3673
3674 if (thisblock->exit_label)
3675 {
3676 do_pending_stack_adjust ();
3677 emit_label (thisblock->exit_label);
3678 }
3679
3680 /* If necessary, make handlers for nonlocal gotos taking
3681 place in the function calls in this block. */
3682 if (function_call_count != thisblock->data.block.n_function_calls
3683 && nonlocal_labels
3684 /* Make handler for outermost block
3685 if there were any nonlocal gotos to this function. */
3686 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3687 /* Make handler for inner block if it has something
3688 special to do when you jump out of it. */
3689 : (thisblock->data.block.cleanups != 0
3690 || thisblock->data.block.stack_level != 0)))
3691 expand_nl_goto_receivers (thisblock);
3692
3693 /* Don't allow jumping into a block that has a stack level.
3694 Cleanups are allowed, though. */
3695 if (dont_jump_in
3696 || thisblock->data.block.stack_level != 0)
3697 {
3698 struct label_chain *chain;
3699
3700 /* Any labels in this block are no longer valid to go to.
3701 Mark them to cause an error message. */
3702 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3703 {
3704 DECL_TOO_LATE (chain->label) = 1;
3705 /* If any goto without a fixup came to this label,
3706 that must be an error, because gotos without fixups
3707 come from outside all saved stack-levels. */
3708 if (TREE_ADDRESSABLE (chain->label))
3709 error_with_decl (chain->label,
3710 "label `%s' used before containing binding contour");
3711 }
3712 }
3713
3714 /* Restore stack level in effect before the block
3715 (only if variable-size objects allocated). */
3716 /* Perform any cleanups associated with the block. */
3717
3718 if (thisblock->data.block.stack_level != 0
3719 || thisblock->data.block.cleanups != 0)
3720 {
3721 int reachable;
3722 rtx insn;
3723
3724 /* Don't let cleanups affect ({...}) constructs. */
3725 int old_expr_stmts_for_value = expr_stmts_for_value;
3726 rtx old_last_expr_value = last_expr_value;
3727 tree old_last_expr_type = last_expr_type;
3728 expr_stmts_for_value = 0;
3729
3730 /* Only clean up here if this point can actually be reached. */
3731 insn = get_last_insn ();
3732 if (GET_CODE (insn) == NOTE)
3733 insn = prev_nonnote_insn (insn);
3734 reachable = (! insn || GET_CODE (insn) != BARRIER);
3735
3736 /* Do the cleanups. */
3737 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3738 if (reachable)
3739 do_pending_stack_adjust ();
3740
3741 expr_stmts_for_value = old_expr_stmts_for_value;
3742 last_expr_value = old_last_expr_value;
3743 last_expr_type = old_last_expr_type;
3744
3745 /* Restore the stack level. */
3746
3747 if (reachable && thisblock->data.block.stack_level != 0)
3748 {
3749 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3750 thisblock->data.block.stack_level, NULL_RTX);
3751 if (nonlocal_goto_handler_slots != 0)
3752 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3753 NULL_RTX);
3754 }
3755
3756 /* Any gotos out of this block must also do these things.
3757 Also report any gotos with fixups that came to labels in this
3758 level. */
3759 fixup_gotos (thisblock,
3760 thisblock->data.block.stack_level,
3761 thisblock->data.block.cleanups,
3762 thisblock->data.block.first_insn,
3763 dont_jump_in);
3764 }
3765
3766 /* Mark the beginning and end of the scope if requested.
3767 We do this now, after running cleanups on the variables
3768 just going out of scope, so they are in scope for their cleanups. */
3769
3770 if (mark_ends)
3771 {
3772 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3773 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3774 }
3775 else
3776 /* Get rid of the beginning-mark if we don't make an end-mark. */
3777 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3778
3779 /* Restore the temporary level of TARGET_EXPRs. */
3780 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3781
3782 /* Restore block_stack level for containing block. */
3783
3784 stack_block_stack = thisblock->data.block.innermost_stack_block;
3785 POPSTACK (block_stack);
3786
3787 /* Pop the stack slot nesting and free any slots at this level. */
3788 pop_temp_slots ();
3789 }
3790 \f
3791 /* Generate code to save the stack pointer at the start of the current block
3792 and set up to restore it on exit. */
3793
3794 void
3795 save_stack_pointer ()
3796 {
3797 struct nesting *thisblock = block_stack;
3798
3799 if (thisblock->data.block.stack_level == 0)
3800 {
3801 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3802 &thisblock->data.block.stack_level,
3803 thisblock->data.block.first_insn);
3804 stack_block_stack = thisblock;
3805 }
3806 }
3807 \f
3808 /* Generate RTL for the automatic variable declaration DECL.
3809 (Other kinds of declarations are simply ignored if seen here.) */
3810
3811 void
3812 expand_decl (decl)
3813 tree decl;
3814 {
3815 tree type;
3816
3817 type = TREE_TYPE (decl);
3818
3819 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3820 type in case this node is used in a reference. */
3821 if (TREE_CODE (decl) == CONST_DECL)
3822 {
3823 DECL_MODE (decl) = TYPE_MODE (type);
3824 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3825 DECL_SIZE (decl) = TYPE_SIZE (type);
3826 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3827 return;
3828 }
3829
3830 /* Otherwise, only automatic variables need any expansion done. Static and
3831 external variables, and external functions, will be handled by
3832 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3833 nothing. PARM_DECLs are handled in `assign_parms'. */
3834 if (TREE_CODE (decl) != VAR_DECL)
3835 return;
3836
3837 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3838 return;
3839
3840 /* Create the RTL representation for the variable. */
3841
3842 if (type == error_mark_node)
3843 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3844
3845 else if (DECL_SIZE (decl) == 0)
3846 /* Variable with incomplete type. */
3847 {
3848 rtx x;
3849 if (DECL_INITIAL (decl) == 0)
3850 /* Error message was already done; now avoid a crash. */
3851 x = gen_rtx_MEM (BLKmode, const0_rtx);
3852 else
3853 /* An initializer is going to decide the size of this array.
3854 Until we know the size, represent its address with a reg. */
3855 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3856
3857 set_mem_attributes (x, decl, 1);
3858 SET_DECL_RTL (decl, x);
3859 }
3860 else if (DECL_MODE (decl) != BLKmode
3861 /* If -ffloat-store, don't put explicit float vars
3862 into regs. */
3863 && !(flag_float_store
3864 && TREE_CODE (type) == REAL_TYPE)
3865 && ! TREE_THIS_VOLATILE (decl)
3866 && (DECL_REGISTER (decl) || optimize))
3867 {
3868 /* Automatic variable that can go in a register. */
3869 int unsignedp = TREE_UNSIGNED (type);
3870 enum machine_mode reg_mode
3871 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3872
3873 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3874
3875 if (GET_CODE (DECL_RTL (decl)) == REG)
3876 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3877 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3878 {
3879 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3880 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3881 }
3882
3883 mark_user_reg (DECL_RTL (decl));
3884
3885 if (POINTER_TYPE_P (type))
3886 mark_reg_pointer (DECL_RTL (decl),
3887 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3888
3889 maybe_set_unchanging (DECL_RTL (decl), decl);
3890
3891 /* If something wants our address, try to use ADDRESSOF. */
3892 if (TREE_ADDRESSABLE (decl))
3893 put_var_into_stack (decl);
3894 }
3895
3896 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3897 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3898 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3899 STACK_CHECK_MAX_VAR_SIZE)))
3900 {
3901 /* Variable of fixed size that goes on the stack. */
3902 rtx oldaddr = 0;
3903 rtx addr;
3904 rtx x;
3905
3906 /* If we previously made RTL for this decl, it must be an array
3907 whose size was determined by the initializer.
3908 The old address was a register; set that register now
3909 to the proper address. */
3910 if (DECL_RTL_SET_P (decl))
3911 {
3912 if (GET_CODE (DECL_RTL (decl)) != MEM
3913 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3914 abort ();
3915 oldaddr = XEXP (DECL_RTL (decl), 0);
3916 }
3917
3918 /* Set alignment we actually gave this decl. */
3919 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3920 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3921 DECL_USER_ALIGN (decl) = 0;
3922
3923 x = assign_temp (decl, 1, 1, 1);
3924 set_mem_attributes (x, decl, 1);
3925 SET_DECL_RTL (decl, x);
3926
3927 if (oldaddr)
3928 {
3929 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3930 if (addr != oldaddr)
3931 emit_move_insn (oldaddr, addr);
3932 }
3933 }
3934 else
3935 /* Dynamic-size object: must push space on the stack. */
3936 {
3937 rtx address, size, x;
3938
3939 /* Record the stack pointer on entry to block, if have
3940 not already done so. */
3941 do_pending_stack_adjust ();
3942 save_stack_pointer ();
3943
3944 /* In function-at-a-time mode, variable_size doesn't expand this,
3945 so do it now. */
3946 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3947 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3948 const0_rtx, VOIDmode, 0);
3949
3950 /* Compute the variable's size, in bytes. */
3951 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3952 free_temp_slots ();
3953
3954 /* Allocate space on the stack for the variable. Note that
3955 DECL_ALIGN says how the variable is to be aligned and we
3956 cannot use it to conclude anything about the alignment of
3957 the size. */
3958 address = allocate_dynamic_stack_space (size, NULL_RTX,
3959 TYPE_ALIGN (TREE_TYPE (decl)));
3960
3961 /* Reference the variable indirect through that rtx. */
3962 x = gen_rtx_MEM (DECL_MODE (decl), address);
3963 set_mem_attributes (x, decl, 1);
3964 SET_DECL_RTL (decl, x);
3965
3966
3967 /* Indicate the alignment we actually gave this variable. */
3968 #ifdef STACK_BOUNDARY
3969 DECL_ALIGN (decl) = STACK_BOUNDARY;
3970 #else
3971 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3972 #endif
3973 DECL_USER_ALIGN (decl) = 0;
3974 }
3975 }
3976 \f
3977 /* Emit code to perform the initialization of a declaration DECL. */
3978
3979 void
3980 expand_decl_init (decl)
3981 tree decl;
3982 {
3983 int was_used = TREE_USED (decl);
3984
3985 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3986 for static decls. */
3987 if (TREE_CODE (decl) == CONST_DECL
3988 || TREE_STATIC (decl))
3989 return;
3990
3991 /* Compute and store the initial value now. */
3992
3993 if (DECL_INITIAL (decl) == error_mark_node)
3994 {
3995 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3996
3997 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3998 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3999 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4000 0, 0);
4001 emit_queue ();
4002 }
4003 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4004 {
4005 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4006 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4007 emit_queue ();
4008 }
4009
4010 /* Don't let the initialization count as "using" the variable. */
4011 TREE_USED (decl) = was_used;
4012
4013 /* Free any temporaries we made while initializing the decl. */
4014 preserve_temp_slots (NULL_RTX);
4015 free_temp_slots ();
4016 }
4017
4018 /* CLEANUP is an expression to be executed at exit from this binding contour;
4019 for example, in C++, it might call the destructor for this variable.
4020
4021 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4022 CLEANUP multiple times, and have the correct semantics. This
4023 happens in exception handling, for gotos, returns, breaks that
4024 leave the current scope.
4025
4026 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4027 that is not associated with any particular variable. */
4028
4029 int
4030 expand_decl_cleanup (decl, cleanup)
4031 tree decl, cleanup;
4032 {
4033 struct nesting *thisblock;
4034
4035 /* Error if we are not in any block. */
4036 if (cfun == 0 || block_stack == 0)
4037 return 0;
4038
4039 thisblock = block_stack;
4040
4041 /* Record the cleanup if there is one. */
4042
4043 if (cleanup != 0)
4044 {
4045 tree t;
4046 rtx seq;
4047 tree *cleanups = &thisblock->data.block.cleanups;
4048 int cond_context = conditional_context ();
4049
4050 if (cond_context)
4051 {
4052 rtx flag = gen_reg_rtx (word_mode);
4053 rtx set_flag_0;
4054 tree cond;
4055
4056 start_sequence ();
4057 emit_move_insn (flag, const0_rtx);
4058 set_flag_0 = get_insns ();
4059 end_sequence ();
4060
4061 thisblock->data.block.last_unconditional_cleanup
4062 = emit_insn_after (set_flag_0,
4063 thisblock->data.block.last_unconditional_cleanup);
4064
4065 emit_move_insn (flag, const1_rtx);
4066
4067 cond = build_decl (VAR_DECL, NULL_TREE,
4068 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4069 SET_DECL_RTL (cond, flag);
4070
4071 /* Conditionalize the cleanup. */
4072 cleanup = build (COND_EXPR, void_type_node,
4073 (*lang_hooks.truthvalue_conversion) (cond),
4074 cleanup, integer_zero_node);
4075 cleanup = fold (cleanup);
4076
4077 cleanups = &thisblock->data.block.cleanups;
4078 }
4079
4080 cleanup = unsave_expr (cleanup);
4081
4082 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4083
4084 if (! cond_context)
4085 /* If this block has a cleanup, it belongs in stack_block_stack. */
4086 stack_block_stack = thisblock;
4087
4088 if (cond_context)
4089 {
4090 start_sequence ();
4091 }
4092
4093 if (! using_eh_for_cleanups_p)
4094 TREE_ADDRESSABLE (t) = 1;
4095 else
4096 expand_eh_region_start ();
4097
4098 if (cond_context)
4099 {
4100 seq = get_insns ();
4101 end_sequence ();
4102 if (seq)
4103 thisblock->data.block.last_unconditional_cleanup
4104 = emit_insn_after (seq,
4105 thisblock->data.block.last_unconditional_cleanup);
4106 }
4107 else
4108 {
4109 thisblock->data.block.last_unconditional_cleanup
4110 = get_last_insn ();
4111 /* When we insert instructions after the last unconditional cleanup,
4112 we don't adjust last_insn. That means that a later add_insn will
4113 clobber the instructions we've just added. The easiest way to
4114 fix this is to just insert another instruction here, so that the
4115 instructions inserted after the last unconditional cleanup are
4116 never the last instruction. */
4117 emit_note (NULL, NOTE_INSN_DELETED);
4118 }
4119 }
4120 return 1;
4121 }
4122
4123 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4124 is thrown. */
4125
4126 int
4127 expand_decl_cleanup_eh (decl, cleanup, eh_only)
4128 tree decl, cleanup;
4129 int eh_only;
4130 {
4131 int ret = expand_decl_cleanup (decl, cleanup);
4132 if (cleanup && ret)
4133 {
4134 tree node = block_stack->data.block.cleanups;
4135 CLEANUP_EH_ONLY (node) = eh_only;
4136 }
4137 return ret;
4138 }
4139 \f
4140 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4141 DECL_ELTS is the list of elements that belong to DECL's type.
4142 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4143
4144 void
4145 expand_anon_union_decl (decl, cleanup, decl_elts)
4146 tree decl, cleanup, decl_elts;
4147 {
4148 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4149 rtx x;
4150 tree t;
4151
4152 /* If any of the elements are addressable, so is the entire union. */
4153 for (t = decl_elts; t; t = TREE_CHAIN (t))
4154 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4155 {
4156 TREE_ADDRESSABLE (decl) = 1;
4157 break;
4158 }
4159
4160 expand_decl (decl);
4161 expand_decl_cleanup (decl, cleanup);
4162 x = DECL_RTL (decl);
4163
4164 /* Go through the elements, assigning RTL to each. */
4165 for (t = decl_elts; t; t = TREE_CHAIN (t))
4166 {
4167 tree decl_elt = TREE_VALUE (t);
4168 tree cleanup_elt = TREE_PURPOSE (t);
4169 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4170
4171 /* If any of the elements are addressable, so is the entire
4172 union. */
4173 if (TREE_USED (decl_elt))
4174 TREE_USED (decl) = 1;
4175
4176 /* Propagate the union's alignment to the elements. */
4177 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4178 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4179
4180 /* If the element has BLKmode and the union doesn't, the union is
4181 aligned such that the element doesn't need to have BLKmode, so
4182 change the element's mode to the appropriate one for its size. */
4183 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4184 DECL_MODE (decl_elt) = mode
4185 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4186
4187 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4188 instead create a new MEM rtx with the proper mode. */
4189 if (GET_CODE (x) == MEM)
4190 {
4191 if (mode == GET_MODE (x))
4192 SET_DECL_RTL (decl_elt, x);
4193 else
4194 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4195 }
4196 else if (GET_CODE (x) == REG)
4197 {
4198 if (mode == GET_MODE (x))
4199 SET_DECL_RTL (decl_elt, x);
4200 else
4201 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4202 }
4203 else
4204 abort ();
4205
4206 /* Record the cleanup if there is one. */
4207
4208 if (cleanup != 0)
4209 thisblock->data.block.cleanups
4210 = tree_cons (decl_elt, cleanup_elt,
4211 thisblock->data.block.cleanups);
4212 }
4213 }
4214 \f
4215 /* Expand a list of cleanups LIST.
4216 Elements may be expressions or may be nested lists.
4217
4218 If DONT_DO is nonnull, then any list-element
4219 whose TREE_PURPOSE matches DONT_DO is omitted.
4220 This is sometimes used to avoid a cleanup associated with
4221 a value that is being returned out of the scope.
4222
4223 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4224 goto and handle protection regions specially in that case.
4225
4226 If REACHABLE, we emit code, otherwise just inform the exception handling
4227 code about this finalization. */
4228
4229 static void
4230 expand_cleanups (list, dont_do, in_fixup, reachable)
4231 tree list;
4232 tree dont_do;
4233 int in_fixup;
4234 int reachable;
4235 {
4236 tree tail;
4237 for (tail = list; tail; tail = TREE_CHAIN (tail))
4238 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4239 {
4240 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4241 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4242 else
4243 {
4244 if (! in_fixup && using_eh_for_cleanups_p)
4245 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4246
4247 if (reachable && !CLEANUP_EH_ONLY (tail))
4248 {
4249 /* Cleanups may be run multiple times. For example,
4250 when exiting a binding contour, we expand the
4251 cleanups associated with that contour. When a goto
4252 within that binding contour has a target outside that
4253 contour, it will expand all cleanups from its scope to
4254 the target. Though the cleanups are expanded multiple
4255 times, the control paths are non-overlapping so the
4256 cleanups will not be executed twice. */
4257
4258 /* We may need to protect from outer cleanups. */
4259 if (in_fixup && using_eh_for_cleanups_p)
4260 {
4261 expand_eh_region_start ();
4262
4263 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4264
4265 expand_eh_region_end_fixup (TREE_VALUE (tail));
4266 }
4267 else
4268 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4269
4270 free_temp_slots ();
4271 }
4272 }
4273 }
4274 }
4275
4276 /* Mark when the context we are emitting RTL for as a conditional
4277 context, so that any cleanup actions we register with
4278 expand_decl_init will be properly conditionalized when those
4279 cleanup actions are later performed. Must be called before any
4280 expression (tree) is expanded that is within a conditional context. */
4281
4282 void
4283 start_cleanup_deferral ()
4284 {
4285 /* block_stack can be NULL if we are inside the parameter list. It is
4286 OK to do nothing, because cleanups aren't possible here. */
4287 if (block_stack)
4288 ++block_stack->data.block.conditional_code;
4289 }
4290
4291 /* Mark the end of a conditional region of code. Because cleanup
4292 deferrals may be nested, we may still be in a conditional region
4293 after we end the currently deferred cleanups, only after we end all
4294 deferred cleanups, are we back in unconditional code. */
4295
4296 void
4297 end_cleanup_deferral ()
4298 {
4299 /* block_stack can be NULL if we are inside the parameter list. It is
4300 OK to do nothing, because cleanups aren't possible here. */
4301 if (block_stack)
4302 --block_stack->data.block.conditional_code;
4303 }
4304
4305 tree
4306 last_cleanup_this_contour ()
4307 {
4308 if (block_stack == 0)
4309 return 0;
4310
4311 return block_stack->data.block.cleanups;
4312 }
4313
4314 /* Return 1 if there are any pending cleanups at this point.
4315 If THIS_CONTOUR is nonzero, check the current contour as well.
4316 Otherwise, look only at the contours that enclose this one. */
4317
4318 int
4319 any_pending_cleanups (this_contour)
4320 int this_contour;
4321 {
4322 struct nesting *block;
4323
4324 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4325 return 0;
4326
4327 if (this_contour && block_stack->data.block.cleanups != NULL)
4328 return 1;
4329 if (block_stack->data.block.cleanups == 0
4330 && block_stack->data.block.outer_cleanups == 0)
4331 return 0;
4332
4333 for (block = block_stack->next; block; block = block->next)
4334 if (block->data.block.cleanups != 0)
4335 return 1;
4336
4337 return 0;
4338 }
4339 \f
4340 /* Enter a case (Pascal) or switch (C) statement.
4341 Push a block onto case_stack and nesting_stack
4342 to accumulate the case-labels that are seen
4343 and to record the labels generated for the statement.
4344
4345 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4346 Otherwise, this construct is transparent for `exit_something'.
4347
4348 EXPR is the index-expression to be dispatched on.
4349 TYPE is its nominal type. We could simply convert EXPR to this type,
4350 but instead we take short cuts. */
4351
4352 void
4353 expand_start_case (exit_flag, expr, type, printname)
4354 int exit_flag;
4355 tree expr;
4356 tree type;
4357 const char *printname;
4358 {
4359 struct nesting *thiscase = ALLOC_NESTING ();
4360
4361 /* Make an entry on case_stack for the case we are entering. */
4362
4363 thiscase->desc = CASE_NESTING;
4364 thiscase->next = case_stack;
4365 thiscase->all = nesting_stack;
4366 thiscase->depth = ++nesting_depth;
4367 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4368 thiscase->data.case_stmt.case_list = 0;
4369 thiscase->data.case_stmt.index_expr = expr;
4370 thiscase->data.case_stmt.nominal_type = type;
4371 thiscase->data.case_stmt.default_label = 0;
4372 thiscase->data.case_stmt.printname = printname;
4373 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4374 case_stack = thiscase;
4375 nesting_stack = thiscase;
4376
4377 do_pending_stack_adjust ();
4378
4379 /* Make sure case_stmt.start points to something that won't
4380 need any transformation before expand_end_case. */
4381 if (GET_CODE (get_last_insn ()) != NOTE)
4382 emit_note (NULL, NOTE_INSN_DELETED);
4383
4384 thiscase->data.case_stmt.start = get_last_insn ();
4385
4386 start_cleanup_deferral ();
4387 }
4388
4389 /* Start a "dummy case statement" within which case labels are invalid
4390 and are not connected to any larger real case statement.
4391 This can be used if you don't want to let a case statement jump
4392 into the middle of certain kinds of constructs. */
4393
4394 void
4395 expand_start_case_dummy ()
4396 {
4397 struct nesting *thiscase = ALLOC_NESTING ();
4398
4399 /* Make an entry on case_stack for the dummy. */
4400
4401 thiscase->desc = CASE_NESTING;
4402 thiscase->next = case_stack;
4403 thiscase->all = nesting_stack;
4404 thiscase->depth = ++nesting_depth;
4405 thiscase->exit_label = 0;
4406 thiscase->data.case_stmt.case_list = 0;
4407 thiscase->data.case_stmt.start = 0;
4408 thiscase->data.case_stmt.nominal_type = 0;
4409 thiscase->data.case_stmt.default_label = 0;
4410 case_stack = thiscase;
4411 nesting_stack = thiscase;
4412 start_cleanup_deferral ();
4413 }
4414 \f
4415 static void
4416 check_seenlabel ()
4417 {
4418 /* If this is the first label, warn if any insns have been emitted. */
4419 if (case_stack->data.case_stmt.line_number_status >= 0)
4420 {
4421 rtx insn;
4422
4423 restore_line_number_status
4424 (case_stack->data.case_stmt.line_number_status);
4425 case_stack->data.case_stmt.line_number_status = -1;
4426
4427 for (insn = case_stack->data.case_stmt.start;
4428 insn;
4429 insn = NEXT_INSN (insn))
4430 {
4431 if (GET_CODE (insn) == CODE_LABEL)
4432 break;
4433 if (GET_CODE (insn) != NOTE
4434 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4435 {
4436 do
4437 insn = PREV_INSN (insn);
4438 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4439
4440 /* If insn is zero, then there must have been a syntax error. */
4441 if (insn)
4442 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4443 NOTE_LINE_NUMBER (insn),
4444 "unreachable code at beginning of %s",
4445 case_stack->data.case_stmt.printname);
4446 break;
4447 }
4448 }
4449 }
4450 }
4451
4452 /* Accumulate one case or default label inside a case or switch statement.
4453 VALUE is the value of the case (a null pointer, for a default label).
4454 The function CONVERTER, when applied to arguments T and V,
4455 converts the value V to the type T.
4456
4457 If not currently inside a case or switch statement, return 1 and do
4458 nothing. The caller will print a language-specific error message.
4459 If VALUE is a duplicate or overlaps, return 2 and do nothing
4460 except store the (first) duplicate node in *DUPLICATE.
4461 If VALUE is out of range, return 3 and do nothing.
4462 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4463 Return 0 on success.
4464
4465 Extended to handle range statements. */
4466
4467 int
4468 pushcase (value, converter, label, duplicate)
4469 tree value;
4470 tree (*converter) PARAMS ((tree, tree));
4471 tree label;
4472 tree *duplicate;
4473 {
4474 tree index_type;
4475 tree nominal_type;
4476
4477 /* Fail if not inside a real case statement. */
4478 if (! (case_stack && case_stack->data.case_stmt.start))
4479 return 1;
4480
4481 if (stack_block_stack
4482 && stack_block_stack->depth > case_stack->depth)
4483 return 5;
4484
4485 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4486 nominal_type = case_stack->data.case_stmt.nominal_type;
4487
4488 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4489 if (index_type == error_mark_node)
4490 return 0;
4491
4492 /* Convert VALUE to the type in which the comparisons are nominally done. */
4493 if (value != 0)
4494 value = (*converter) (nominal_type, value);
4495
4496 check_seenlabel ();
4497
4498 /* Fail if this value is out of range for the actual type of the index
4499 (which may be narrower than NOMINAL_TYPE). */
4500 if (value != 0
4501 && (TREE_CONSTANT_OVERFLOW (value)
4502 || ! int_fits_type_p (value, index_type)))
4503 return 3;
4504
4505 return add_case_node (value, value, label, duplicate);
4506 }
4507
4508 /* Like pushcase but this case applies to all values between VALUE1 and
4509 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4510 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4511 starts at VALUE1 and ends at the highest value of the index type.
4512 If both are NULL, this case applies to all values.
4513
4514 The return value is the same as that of pushcase but there is one
4515 additional error code: 4 means the specified range was empty. */
4516
4517 int
4518 pushcase_range (value1, value2, converter, label, duplicate)
4519 tree value1, value2;
4520 tree (*converter) PARAMS ((tree, tree));
4521 tree label;
4522 tree *duplicate;
4523 {
4524 tree index_type;
4525 tree nominal_type;
4526
4527 /* Fail if not inside a real case statement. */
4528 if (! (case_stack && case_stack->data.case_stmt.start))
4529 return 1;
4530
4531 if (stack_block_stack
4532 && stack_block_stack->depth > case_stack->depth)
4533 return 5;
4534
4535 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4536 nominal_type = case_stack->data.case_stmt.nominal_type;
4537
4538 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4539 if (index_type == error_mark_node)
4540 return 0;
4541
4542 check_seenlabel ();
4543
4544 /* Convert VALUEs to type in which the comparisons are nominally done
4545 and replace any unspecified value with the corresponding bound. */
4546 if (value1 == 0)
4547 value1 = TYPE_MIN_VALUE (index_type);
4548 if (value2 == 0)
4549 value2 = TYPE_MAX_VALUE (index_type);
4550
4551 /* Fail if the range is empty. Do this before any conversion since
4552 we want to allow out-of-range empty ranges. */
4553 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4554 return 4;
4555
4556 /* If the max was unbounded, use the max of the nominal_type we are
4557 converting to. Do this after the < check above to suppress false
4558 positives. */
4559 if (value2 == 0)
4560 value2 = TYPE_MAX_VALUE (nominal_type);
4561
4562 value1 = (*converter) (nominal_type, value1);
4563 value2 = (*converter) (nominal_type, value2);
4564
4565 /* Fail if these values are out of range. */
4566 if (TREE_CONSTANT_OVERFLOW (value1)
4567 || ! int_fits_type_p (value1, index_type))
4568 return 3;
4569
4570 if (TREE_CONSTANT_OVERFLOW (value2)
4571 || ! int_fits_type_p (value2, index_type))
4572 return 3;
4573
4574 return add_case_node (value1, value2, label, duplicate);
4575 }
4576
4577 /* Do the actual insertion of a case label for pushcase and pushcase_range
4578 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4579 slowdown for large switch statements. */
4580
4581 int
4582 add_case_node (low, high, label, duplicate)
4583 tree low, high;
4584 tree label;
4585 tree *duplicate;
4586 {
4587 struct case_node *p, **q, *r;
4588
4589 /* If there's no HIGH value, then this is not a case range; it's
4590 just a simple case label. But that's just a degenerate case
4591 range. */
4592 if (!high)
4593 high = low;
4594
4595 /* Handle default labels specially. */
4596 if (!high && !low)
4597 {
4598 if (case_stack->data.case_stmt.default_label != 0)
4599 {
4600 *duplicate = case_stack->data.case_stmt.default_label;
4601 return 2;
4602 }
4603 case_stack->data.case_stmt.default_label = label;
4604 expand_label (label);
4605 return 0;
4606 }
4607
4608 q = &case_stack->data.case_stmt.case_list;
4609 p = *q;
4610
4611 while ((r = *q))
4612 {
4613 p = r;
4614
4615 /* Keep going past elements distinctly greater than HIGH. */
4616 if (tree_int_cst_lt (high, p->low))
4617 q = &p->left;
4618
4619 /* or distinctly less than LOW. */
4620 else if (tree_int_cst_lt (p->high, low))
4621 q = &p->right;
4622
4623 else
4624 {
4625 /* We have an overlap; this is an error. */
4626 *duplicate = p->code_label;
4627 return 2;
4628 }
4629 }
4630
4631 /* Add this label to the chain, and succeed. */
4632
4633 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4634 r->low = low;
4635
4636 /* If the bounds are equal, turn this into the one-value case. */
4637 if (tree_int_cst_equal (low, high))
4638 r->high = r->low;
4639 else
4640 r->high = high;
4641
4642 r->code_label = label;
4643 expand_label (label);
4644
4645 *q = r;
4646 r->parent = p;
4647 r->left = 0;
4648 r->right = 0;
4649 r->balance = 0;
4650
4651 while (p)
4652 {
4653 struct case_node *s;
4654
4655 if (r == p->left)
4656 {
4657 int b;
4658
4659 if (! (b = p->balance))
4660 /* Growth propagation from left side. */
4661 p->balance = -1;
4662 else if (b < 0)
4663 {
4664 if (r->balance < 0)
4665 {
4666 /* R-Rotation */
4667 if ((p->left = s = r->right))
4668 s->parent = p;
4669
4670 r->right = p;
4671 p->balance = 0;
4672 r->balance = 0;
4673 s = p->parent;
4674 p->parent = r;
4675
4676 if ((r->parent = s))
4677 {
4678 if (s->left == p)
4679 s->left = r;
4680 else
4681 s->right = r;
4682 }
4683 else
4684 case_stack->data.case_stmt.case_list = r;
4685 }
4686 else
4687 /* r->balance == +1 */
4688 {
4689 /* LR-Rotation */
4690
4691 int b2;
4692 struct case_node *t = r->right;
4693
4694 if ((p->left = s = t->right))
4695 s->parent = p;
4696
4697 t->right = p;
4698 if ((r->right = s = t->left))
4699 s->parent = r;
4700
4701 t->left = r;
4702 b = t->balance;
4703 b2 = b < 0;
4704 p->balance = b2;
4705 b2 = -b2 - b;
4706 r->balance = b2;
4707 t->balance = 0;
4708 s = p->parent;
4709 p->parent = t;
4710 r->parent = t;
4711
4712 if ((t->parent = s))
4713 {
4714 if (s->left == p)
4715 s->left = t;
4716 else
4717 s->right = t;
4718 }
4719 else
4720 case_stack->data.case_stmt.case_list = t;
4721 }
4722 break;
4723 }
4724
4725 else
4726 {
4727 /* p->balance == +1; growth of left side balances the node. */
4728 p->balance = 0;
4729 break;
4730 }
4731 }
4732 else
4733 /* r == p->right */
4734 {
4735 int b;
4736
4737 if (! (b = p->balance))
4738 /* Growth propagation from right side. */
4739 p->balance++;
4740 else if (b > 0)
4741 {
4742 if (r->balance > 0)
4743 {
4744 /* L-Rotation */
4745
4746 if ((p->right = s = r->left))
4747 s->parent = p;
4748
4749 r->left = p;
4750 p->balance = 0;
4751 r->balance = 0;
4752 s = p->parent;
4753 p->parent = r;
4754 if ((r->parent = s))
4755 {
4756 if (s->left == p)
4757 s->left = r;
4758 else
4759 s->right = r;
4760 }
4761
4762 else
4763 case_stack->data.case_stmt.case_list = r;
4764 }
4765
4766 else
4767 /* r->balance == -1 */
4768 {
4769 /* RL-Rotation */
4770 int b2;
4771 struct case_node *t = r->left;
4772
4773 if ((p->right = s = t->left))
4774 s->parent = p;
4775
4776 t->left = p;
4777
4778 if ((r->left = s = t->right))
4779 s->parent = r;
4780
4781 t->right = r;
4782 b = t->balance;
4783 b2 = b < 0;
4784 r->balance = b2;
4785 b2 = -b2 - b;
4786 p->balance = b2;
4787 t->balance = 0;
4788 s = p->parent;
4789 p->parent = t;
4790 r->parent = t;
4791
4792 if ((t->parent = s))
4793 {
4794 if (s->left == p)
4795 s->left = t;
4796 else
4797 s->right = t;
4798 }
4799
4800 else
4801 case_stack->data.case_stmt.case_list = t;
4802 }
4803 break;
4804 }
4805 else
4806 {
4807 /* p->balance == -1; growth of right side balances the node. */
4808 p->balance = 0;
4809 break;
4810 }
4811 }
4812
4813 r = p;
4814 p = p->parent;
4815 }
4816
4817 return 0;
4818 }
4819 \f
4820 /* Returns the number of possible values of TYPE.
4821 Returns -1 if the number is unknown, variable, or if the number does not
4822 fit in a HOST_WIDE_INT.
4823 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4824 do not increase monotonically (there may be duplicates);
4825 to 1 if the values increase monotonically, but not always by 1;
4826 otherwise sets it to 0. */
4827
4828 HOST_WIDE_INT
4829 all_cases_count (type, sparseness)
4830 tree type;
4831 int *sparseness;
4832 {
4833 tree t;
4834 HOST_WIDE_INT count, minval, lastval;
4835
4836 *sparseness = 0;
4837
4838 switch (TREE_CODE (type))
4839 {
4840 case BOOLEAN_TYPE:
4841 count = 2;
4842 break;
4843
4844 case CHAR_TYPE:
4845 count = 1 << BITS_PER_UNIT;
4846 break;
4847
4848 default:
4849 case INTEGER_TYPE:
4850 if (TYPE_MAX_VALUE (type) != 0
4851 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4852 TYPE_MIN_VALUE (type))))
4853 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4854 convert (type, integer_zero_node))))
4855 && host_integerp (t, 1))
4856 count = tree_low_cst (t, 1);
4857 else
4858 return -1;
4859 break;
4860
4861 case ENUMERAL_TYPE:
4862 /* Don't waste time with enumeral types with huge values. */
4863 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4864 || TYPE_MAX_VALUE (type) == 0
4865 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4866 return -1;
4867
4868 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4869 count = 0;
4870
4871 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4872 {
4873 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4874
4875 if (*sparseness == 2 || thisval <= lastval)
4876 *sparseness = 2;
4877 else if (thisval != minval + count)
4878 *sparseness = 1;
4879
4880 lastval = thisval;
4881 count++;
4882 }
4883 }
4884
4885 return count;
4886 }
4887
4888 #define BITARRAY_TEST(ARRAY, INDEX) \
4889 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4890 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4891 #define BITARRAY_SET(ARRAY, INDEX) \
4892 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4893 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4894
4895 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4896 with the case values we have seen, assuming the case expression
4897 has the given TYPE.
4898 SPARSENESS is as determined by all_cases_count.
4899
4900 The time needed is proportional to COUNT, unless
4901 SPARSENESS is 2, in which case quadratic time is needed. */
4902
4903 void
4904 mark_seen_cases (type, cases_seen, count, sparseness)
4905 tree type;
4906 unsigned char *cases_seen;
4907 HOST_WIDE_INT count;
4908 int sparseness;
4909 {
4910 tree next_node_to_try = NULL_TREE;
4911 HOST_WIDE_INT next_node_offset = 0;
4912
4913 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4914 tree val = make_node (INTEGER_CST);
4915
4916 TREE_TYPE (val) = type;
4917 if (! root)
4918 /* Do nothing. */
4919 ;
4920 else if (sparseness == 2)
4921 {
4922 tree t;
4923 unsigned HOST_WIDE_INT xlo;
4924
4925 /* This less efficient loop is only needed to handle
4926 duplicate case values (multiple enum constants
4927 with the same value). */
4928 TREE_TYPE (val) = TREE_TYPE (root->low);
4929 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4930 t = TREE_CHAIN (t), xlo++)
4931 {
4932 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4933 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4934 n = root;
4935 do
4936 {
4937 /* Keep going past elements distinctly greater than VAL. */
4938 if (tree_int_cst_lt (val, n->low))
4939 n = n->left;
4940
4941 /* or distinctly less than VAL. */
4942 else if (tree_int_cst_lt (n->high, val))
4943 n = n->right;
4944
4945 else
4946 {
4947 /* We have found a matching range. */
4948 BITARRAY_SET (cases_seen, xlo);
4949 break;
4950 }
4951 }
4952 while (n);
4953 }
4954 }
4955 else
4956 {
4957 if (root->left)
4958 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4959
4960 for (n = root; n; n = n->right)
4961 {
4962 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4963 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4964 while (! tree_int_cst_lt (n->high, val))
4965 {
4966 /* Calculate (into xlo) the "offset" of the integer (val).
4967 The element with lowest value has offset 0, the next smallest
4968 element has offset 1, etc. */
4969
4970 unsigned HOST_WIDE_INT xlo;
4971 HOST_WIDE_INT xhi;
4972 tree t;
4973
4974 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4975 {
4976 /* The TYPE_VALUES will be in increasing order, so
4977 starting searching where we last ended. */
4978 t = next_node_to_try;
4979 xlo = next_node_offset;
4980 xhi = 0;
4981 for (;;)
4982 {
4983 if (t == NULL_TREE)
4984 {
4985 t = TYPE_VALUES (type);
4986 xlo = 0;
4987 }
4988 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4989 {
4990 next_node_to_try = TREE_CHAIN (t);
4991 next_node_offset = xlo + 1;
4992 break;
4993 }
4994 xlo++;
4995 t = TREE_CHAIN (t);
4996 if (t == next_node_to_try)
4997 {
4998 xlo = -1;
4999 break;
5000 }
5001 }
5002 }
5003 else
5004 {
5005 t = TYPE_MIN_VALUE (type);
5006 if (t)
5007 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5008 &xlo, &xhi);
5009 else
5010 xlo = xhi = 0;
5011 add_double (xlo, xhi,
5012 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5013 &xlo, &xhi);
5014 }
5015
5016 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5017 BITARRAY_SET (cases_seen, xlo);
5018
5019 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5020 1, 0,
5021 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5022 }
5023 }
5024 }
5025 }
5026
5027 /* Given a switch statement with an expression that is an enumeration
5028 type, warn if any of the enumeration type's literals are not
5029 covered by the case expressions of the switch. Also, warn if there
5030 are any extra switch cases that are *not* elements of the
5031 enumerated type.
5032
5033 Historical note:
5034
5035 At one stage this function would: ``If all enumeration literals
5036 were covered by the case expressions, turn one of the expressions
5037 into the default expression since it should not be possible to fall
5038 through such a switch.''
5039
5040 That code has since been removed as: ``This optimization is
5041 disabled because it causes valid programs to fail. ANSI C does not
5042 guarantee that an expression with enum type will have a value that
5043 is the same as one of the enumeration literals.'' */
5044
5045 void
5046 check_for_full_enumeration_handling (type)
5047 tree type;
5048 {
5049 struct case_node *n;
5050 tree chain;
5051
5052 /* True iff the selector type is a numbered set mode. */
5053 int sparseness = 0;
5054
5055 /* The number of possible selector values. */
5056 HOST_WIDE_INT size;
5057
5058 /* For each possible selector value. a one iff it has been matched
5059 by a case value alternative. */
5060 unsigned char *cases_seen;
5061
5062 /* The allocated size of cases_seen, in chars. */
5063 HOST_WIDE_INT bytes_needed;
5064
5065 size = all_cases_count (type, &sparseness);
5066 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5067
5068 if (size > 0 && size < 600000
5069 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5070 this optimization if we don't have enough memory rather than
5071 aborting, as xmalloc would do. */
5072 && (cases_seen =
5073 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5074 {
5075 HOST_WIDE_INT i;
5076 tree v = TYPE_VALUES (type);
5077
5078 /* The time complexity of this code is normally O(N), where
5079 N being the number of members in the enumerated type.
5080 However, if type is an ENUMERAL_TYPE whose values do not
5081 increase monotonically, O(N*log(N)) time may be needed. */
5082
5083 mark_seen_cases (type, cases_seen, size, sparseness);
5084
5085 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5086 if (BITARRAY_TEST (cases_seen, i) == 0)
5087 warning ("enumeration value `%s' not handled in switch",
5088 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5089
5090 free (cases_seen);
5091 }
5092
5093 /* Now we go the other way around; we warn if there are case
5094 expressions that don't correspond to enumerators. This can
5095 occur since C and C++ don't enforce type-checking of
5096 assignments to enumeration variables. */
5097
5098 if (case_stack->data.case_stmt.case_list
5099 && case_stack->data.case_stmt.case_list->left)
5100 case_stack->data.case_stmt.case_list
5101 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5102 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5103 {
5104 for (chain = TYPE_VALUES (type);
5105 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5106 chain = TREE_CHAIN (chain))
5107 ;
5108
5109 if (!chain)
5110 {
5111 if (TYPE_NAME (type) == 0)
5112 warning ("case value `%ld' not in enumerated type",
5113 (long) TREE_INT_CST_LOW (n->low));
5114 else
5115 warning ("case value `%ld' not in enumerated type `%s'",
5116 (long) TREE_INT_CST_LOW (n->low),
5117 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5118 == IDENTIFIER_NODE)
5119 ? TYPE_NAME (type)
5120 : DECL_NAME (TYPE_NAME (type))));
5121 }
5122 if (!tree_int_cst_equal (n->low, n->high))
5123 {
5124 for (chain = TYPE_VALUES (type);
5125 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5126 chain = TREE_CHAIN (chain))
5127 ;
5128
5129 if (!chain)
5130 {
5131 if (TYPE_NAME (type) == 0)
5132 warning ("case value `%ld' not in enumerated type",
5133 (long) TREE_INT_CST_LOW (n->high));
5134 else
5135 warning ("case value `%ld' not in enumerated type `%s'",
5136 (long) TREE_INT_CST_LOW (n->high),
5137 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5138 == IDENTIFIER_NODE)
5139 ? TYPE_NAME (type)
5140 : DECL_NAME (TYPE_NAME (type))));
5141 }
5142 }
5143 }
5144 }
5145
5146 \f
5147
5148 /* Terminate a case (Pascal) or switch (C) statement
5149 in which ORIG_INDEX is the expression to be tested.
5150 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5151 type as given in the source before any compiler conversions.
5152 Generate the code to test it and jump to the right place. */
5153
5154 void
5155 expand_end_case_type (orig_index, orig_type)
5156 tree orig_index, orig_type;
5157 {
5158 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5159 rtx default_label = 0;
5160 struct case_node *n;
5161 unsigned int count;
5162 rtx index;
5163 rtx table_label;
5164 int ncases;
5165 rtx *labelvec;
5166 int i;
5167 rtx before_case, end;
5168 struct nesting *thiscase = case_stack;
5169 tree index_expr, index_type;
5170 int unsignedp;
5171
5172 /* Don't crash due to previous errors. */
5173 if (thiscase == NULL)
5174 return;
5175
5176 table_label = gen_label_rtx ();
5177 index_expr = thiscase->data.case_stmt.index_expr;
5178 index_type = TREE_TYPE (index_expr);
5179 unsignedp = TREE_UNSIGNED (index_type);
5180 if (orig_type == NULL)
5181 orig_type = TREE_TYPE (orig_index);
5182
5183 do_pending_stack_adjust ();
5184
5185 /* This might get a spurious warning in the presence of a syntax error;
5186 it could be fixed by moving the call to check_seenlabel after the
5187 check for error_mark_node, and copying the code of check_seenlabel that
5188 deals with case_stack->data.case_stmt.line_number_status /
5189 restore_line_number_status in front of the call to end_cleanup_deferral;
5190 However, this might miss some useful warnings in the presence of
5191 non-syntax errors. */
5192 check_seenlabel ();
5193
5194 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5195 if (index_type != error_mark_node)
5196 {
5197 /* If the switch expression was an enumerated type, check that
5198 exactly all enumeration literals are covered by the cases.
5199 The check is made when -Wswitch was specified and there is no
5200 default case, or when -Wswitch-enum was specified. */
5201 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5202 || warn_switch_enum)
5203 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5204 && TREE_CODE (index_expr) != INTEGER_CST)
5205 check_for_full_enumeration_handling (orig_type);
5206
5207 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5208 warning ("switch missing default case");
5209
5210 /* If we don't have a default-label, create one here,
5211 after the body of the switch. */
5212 if (thiscase->data.case_stmt.default_label == 0)
5213 {
5214 thiscase->data.case_stmt.default_label
5215 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5216 expand_label (thiscase->data.case_stmt.default_label);
5217 }
5218 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5219
5220 before_case = get_last_insn ();
5221
5222 if (thiscase->data.case_stmt.case_list
5223 && thiscase->data.case_stmt.case_list->left)
5224 thiscase->data.case_stmt.case_list
5225 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5226
5227 /* Simplify the case-list before we count it. */
5228 group_case_nodes (thiscase->data.case_stmt.case_list);
5229
5230 /* Get upper and lower bounds of case values.
5231 Also convert all the case values to the index expr's data type. */
5232
5233 count = 0;
5234 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5235 {
5236 /* Check low and high label values are integers. */
5237 if (TREE_CODE (n->low) != INTEGER_CST)
5238 abort ();
5239 if (TREE_CODE (n->high) != INTEGER_CST)
5240 abort ();
5241
5242 n->low = convert (index_type, n->low);
5243 n->high = convert (index_type, n->high);
5244
5245 /* Count the elements and track the largest and smallest
5246 of them (treating them as signed even if they are not). */
5247 if (count++ == 0)
5248 {
5249 minval = n->low;
5250 maxval = n->high;
5251 }
5252 else
5253 {
5254 if (INT_CST_LT (n->low, minval))
5255 minval = n->low;
5256 if (INT_CST_LT (maxval, n->high))
5257 maxval = n->high;
5258 }
5259 /* A range counts double, since it requires two compares. */
5260 if (! tree_int_cst_equal (n->low, n->high))
5261 count++;
5262 }
5263
5264 /* Compute span of values. */
5265 if (count != 0)
5266 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5267
5268 end_cleanup_deferral ();
5269
5270 if (count == 0)
5271 {
5272 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5273 emit_queue ();
5274 emit_jump (default_label);
5275 }
5276
5277 /* If range of values is much bigger than number of values,
5278 make a sequence of conditional branches instead of a dispatch.
5279 If the switch-index is a constant, do it this way
5280 because we can optimize it. */
5281
5282 else if (count < case_values_threshold ()
5283 || compare_tree_int (range, 10 * count) > 0
5284 /* RANGE may be signed, and really large ranges will show up
5285 as negative numbers. */
5286 || compare_tree_int (range, 0) < 0
5287 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5288 || flag_pic
5289 #endif
5290 || TREE_CODE (index_expr) == INTEGER_CST
5291 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5292 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5293 {
5294 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5295
5296 /* If the index is a short or char that we do not have
5297 an insn to handle comparisons directly, convert it to
5298 a full integer now, rather than letting each comparison
5299 generate the conversion. */
5300
5301 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5302 && ! have_insn_for (COMPARE, GET_MODE (index)))
5303 {
5304 enum machine_mode wider_mode;
5305 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5306 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5307 if (have_insn_for (COMPARE, wider_mode))
5308 {
5309 index = convert_to_mode (wider_mode, index, unsignedp);
5310 break;
5311 }
5312 }
5313
5314 emit_queue ();
5315 do_pending_stack_adjust ();
5316
5317 index = protect_from_queue (index, 0);
5318 if (GET_CODE (index) == MEM)
5319 index = copy_to_reg (index);
5320 if (GET_CODE (index) == CONST_INT
5321 || TREE_CODE (index_expr) == INTEGER_CST)
5322 {
5323 /* Make a tree node with the proper constant value
5324 if we don't already have one. */
5325 if (TREE_CODE (index_expr) != INTEGER_CST)
5326 {
5327 index_expr
5328 = build_int_2 (INTVAL (index),
5329 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5330 index_expr = convert (index_type, index_expr);
5331 }
5332
5333 /* For constant index expressions we need only
5334 issue an unconditional branch to the appropriate
5335 target code. The job of removing any unreachable
5336 code is left to the optimisation phase if the
5337 "-O" option is specified. */
5338 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5339 if (! tree_int_cst_lt (index_expr, n->low)
5340 && ! tree_int_cst_lt (n->high, index_expr))
5341 break;
5342
5343 if (n)
5344 emit_jump (label_rtx (n->code_label));
5345 else
5346 emit_jump (default_label);
5347 }
5348 else
5349 {
5350 /* If the index expression is not constant we generate
5351 a binary decision tree to select the appropriate
5352 target code. This is done as follows:
5353
5354 The list of cases is rearranged into a binary tree,
5355 nearly optimal assuming equal probability for each case.
5356
5357 The tree is transformed into RTL, eliminating
5358 redundant test conditions at the same time.
5359
5360 If program flow could reach the end of the
5361 decision tree an unconditional jump to the
5362 default code is emitted. */
5363
5364 use_cost_table
5365 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5366 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5367 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5368 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5369 default_label, index_type);
5370 emit_jump_if_reachable (default_label);
5371 }
5372 }
5373 else
5374 {
5375 if (! try_casesi (index_type, index_expr, minval, range,
5376 table_label, default_label))
5377 {
5378 index_type = thiscase->data.case_stmt.nominal_type;
5379
5380 /* Index jumptables from zero for suitable values of
5381 minval to avoid a subtraction. */
5382 if (! optimize_size
5383 && compare_tree_int (minval, 0) > 0
5384 && compare_tree_int (minval, 3) < 0)
5385 {
5386 minval = integer_zero_node;
5387 range = maxval;
5388 }
5389
5390 if (! try_tablejump (index_type, index_expr, minval, range,
5391 table_label, default_label))
5392 abort ();
5393 }
5394
5395 /* Get table of labels to jump to, in order of case index. */
5396
5397 ncases = tree_low_cst (range, 0) + 1;
5398 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5399 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5400
5401 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5402 {
5403 /* Compute the low and high bounds relative to the minimum
5404 value since that should fit in a HOST_WIDE_INT while the
5405 actual values may not. */
5406 HOST_WIDE_INT i_low
5407 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5408 n->low, minval)), 1);
5409 HOST_WIDE_INT i_high
5410 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5411 n->high, minval)), 1);
5412 HOST_WIDE_INT i;
5413
5414 for (i = i_low; i <= i_high; i ++)
5415 labelvec[i]
5416 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5417 }
5418
5419 /* Fill in the gaps with the default. */
5420 for (i = 0; i < ncases; i++)
5421 if (labelvec[i] == 0)
5422 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5423
5424 /* Output the table */
5425 emit_label (table_label);
5426
5427 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5428 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5429 gen_rtx_LABEL_REF (Pmode, table_label),
5430 gen_rtvec_v (ncases, labelvec),
5431 const0_rtx, const0_rtx));
5432 else
5433 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5434 gen_rtvec_v (ncases, labelvec)));
5435
5436 /* If the case insn drops through the table,
5437 after the table we must jump to the default-label.
5438 Otherwise record no drop-through after the table. */
5439 #ifdef CASE_DROPS_THROUGH
5440 emit_jump (default_label);
5441 #else
5442 emit_barrier ();
5443 #endif
5444 }
5445
5446 before_case = NEXT_INSN (before_case);
5447 end = get_last_insn ();
5448 if (squeeze_notes (&before_case, &end))
5449 abort ();
5450 reorder_insns (before_case, end,
5451 thiscase->data.case_stmt.start);
5452 }
5453 else
5454 end_cleanup_deferral ();
5455
5456 if (thiscase->exit_label)
5457 emit_label (thiscase->exit_label);
5458
5459 POPSTACK (case_stack);
5460
5461 free_temp_slots ();
5462 }
5463
5464 /* Convert the tree NODE into a list linked by the right field, with the left
5465 field zeroed. RIGHT is used for recursion; it is a list to be placed
5466 rightmost in the resulting list. */
5467
5468 static struct case_node *
5469 case_tree2list (node, right)
5470 struct case_node *node, *right;
5471 {
5472 struct case_node *left;
5473
5474 if (node->right)
5475 right = case_tree2list (node->right, right);
5476
5477 node->right = right;
5478 if ((left = node->left))
5479 {
5480 node->left = 0;
5481 return case_tree2list (left, node);
5482 }
5483
5484 return node;
5485 }
5486
5487 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5488
5489 static void
5490 do_jump_if_equal (op1, op2, label, unsignedp)
5491 rtx op1, op2, label;
5492 int unsignedp;
5493 {
5494 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5495 {
5496 if (INTVAL (op1) == INTVAL (op2))
5497 emit_jump (label);
5498 }
5499 else
5500 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5501 (GET_MODE (op1) == VOIDmode
5502 ? GET_MODE (op2) : GET_MODE (op1)),
5503 unsignedp, label);
5504 }
5505 \f
5506 /* Not all case values are encountered equally. This function
5507 uses a heuristic to weight case labels, in cases where that
5508 looks like a reasonable thing to do.
5509
5510 Right now, all we try to guess is text, and we establish the
5511 following weights:
5512
5513 chars above space: 16
5514 digits: 16
5515 default: 12
5516 space, punct: 8
5517 tab: 4
5518 newline: 2
5519 other "\" chars: 1
5520 remaining chars: 0
5521
5522 If we find any cases in the switch that are not either -1 or in the range
5523 of valid ASCII characters, or are control characters other than those
5524 commonly used with "\", don't treat this switch scanning text.
5525
5526 Return 1 if these nodes are suitable for cost estimation, otherwise
5527 return 0. */
5528
5529 static int
5530 estimate_case_costs (node)
5531 case_node_ptr node;
5532 {
5533 tree min_ascii = integer_minus_one_node;
5534 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5535 case_node_ptr n;
5536 int i;
5537
5538 /* If we haven't already made the cost table, make it now. Note that the
5539 lower bound of the table is -1, not zero. */
5540
5541 if (! cost_table_initialized)
5542 {
5543 cost_table_initialized = 1;
5544
5545 for (i = 0; i < 128; i++)
5546 {
5547 if (ISALNUM (i))
5548 COST_TABLE (i) = 16;
5549 else if (ISPUNCT (i))
5550 COST_TABLE (i) = 8;
5551 else if (ISCNTRL (i))
5552 COST_TABLE (i) = -1;
5553 }
5554
5555 COST_TABLE (' ') = 8;
5556 COST_TABLE ('\t') = 4;
5557 COST_TABLE ('\0') = 4;
5558 COST_TABLE ('\n') = 2;
5559 COST_TABLE ('\f') = 1;
5560 COST_TABLE ('\v') = 1;
5561 COST_TABLE ('\b') = 1;
5562 }
5563
5564 /* See if all the case expressions look like text. It is text if the
5565 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5566 as signed arithmetic since we don't want to ever access cost_table with a
5567 value less than -1. Also check that none of the constants in a range
5568 are strange control characters. */
5569
5570 for (n = node; n; n = n->right)
5571 {
5572 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5573 return 0;
5574
5575 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5576 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5577 if (COST_TABLE (i) < 0)
5578 return 0;
5579 }
5580
5581 /* All interesting values are within the range of interesting
5582 ASCII characters. */
5583 return 1;
5584 }
5585
5586 /* Scan an ordered list of case nodes
5587 combining those with consecutive values or ranges.
5588
5589 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5590
5591 static void
5592 group_case_nodes (head)
5593 case_node_ptr head;
5594 {
5595 case_node_ptr node = head;
5596
5597 while (node)
5598 {
5599 rtx lb = next_real_insn (label_rtx (node->code_label));
5600 rtx lb2;
5601 case_node_ptr np = node;
5602
5603 /* Try to group the successors of NODE with NODE. */
5604 while (((np = np->right) != 0)
5605 /* Do they jump to the same place? */
5606 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5607 || (lb != 0 && lb2 != 0
5608 && simplejump_p (lb)
5609 && simplejump_p (lb2)
5610 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5611 SET_SRC (PATTERN (lb2)))))
5612 /* Are their ranges consecutive? */
5613 && tree_int_cst_equal (np->low,
5614 fold (build (PLUS_EXPR,
5615 TREE_TYPE (node->high),
5616 node->high,
5617 integer_one_node)))
5618 /* An overflow is not consecutive. */
5619 && tree_int_cst_lt (node->high,
5620 fold (build (PLUS_EXPR,
5621 TREE_TYPE (node->high),
5622 node->high,
5623 integer_one_node))))
5624 {
5625 node->high = np->high;
5626 }
5627 /* NP is the first node after NODE which can't be grouped with it.
5628 Delete the nodes in between, and move on to that node. */
5629 node->right = np;
5630 node = np;
5631 }
5632 }
5633
5634 /* Take an ordered list of case nodes
5635 and transform them into a near optimal binary tree,
5636 on the assumption that any target code selection value is as
5637 likely as any other.
5638
5639 The transformation is performed by splitting the ordered
5640 list into two equal sections plus a pivot. The parts are
5641 then attached to the pivot as left and right branches. Each
5642 branch is then transformed recursively. */
5643
5644 static void
5645 balance_case_nodes (head, parent)
5646 case_node_ptr *head;
5647 case_node_ptr parent;
5648 {
5649 case_node_ptr np;
5650
5651 np = *head;
5652 if (np)
5653 {
5654 int cost = 0;
5655 int i = 0;
5656 int ranges = 0;
5657 case_node_ptr *npp;
5658 case_node_ptr left;
5659
5660 /* Count the number of entries on branch. Also count the ranges. */
5661
5662 while (np)
5663 {
5664 if (!tree_int_cst_equal (np->low, np->high))
5665 {
5666 ranges++;
5667 if (use_cost_table)
5668 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5669 }
5670
5671 if (use_cost_table)
5672 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5673
5674 i++;
5675 np = np->right;
5676 }
5677
5678 if (i > 2)
5679 {
5680 /* Split this list if it is long enough for that to help. */
5681 npp = head;
5682 left = *npp;
5683 if (use_cost_table)
5684 {
5685 /* Find the place in the list that bisects the list's total cost,
5686 Here I gets half the total cost. */
5687 int n_moved = 0;
5688 i = (cost + 1) / 2;
5689 while (1)
5690 {
5691 /* Skip nodes while their cost does not reach that amount. */
5692 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5693 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5694 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5695 if (i <= 0)
5696 break;
5697 npp = &(*npp)->right;
5698 n_moved += 1;
5699 }
5700 if (n_moved == 0)
5701 {
5702 /* Leave this branch lopsided, but optimize left-hand
5703 side and fill in `parent' fields for right-hand side. */
5704 np = *head;
5705 np->parent = parent;
5706 balance_case_nodes (&np->left, np);
5707 for (; np->right; np = np->right)
5708 np->right->parent = np;
5709 return;
5710 }
5711 }
5712 /* If there are just three nodes, split at the middle one. */
5713 else if (i == 3)
5714 npp = &(*npp)->right;
5715 else
5716 {
5717 /* Find the place in the list that bisects the list's total cost,
5718 where ranges count as 2.
5719 Here I gets half the total cost. */
5720 i = (i + ranges + 1) / 2;
5721 while (1)
5722 {
5723 /* Skip nodes while their cost does not reach that amount. */
5724 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5725 i--;
5726 i--;
5727 if (i <= 0)
5728 break;
5729 npp = &(*npp)->right;
5730 }
5731 }
5732 *head = np = *npp;
5733 *npp = 0;
5734 np->parent = parent;
5735 np->left = left;
5736
5737 /* Optimize each of the two split parts. */
5738 balance_case_nodes (&np->left, np);
5739 balance_case_nodes (&np->right, np);
5740 }
5741 else
5742 {
5743 /* Else leave this branch as one level,
5744 but fill in `parent' fields. */
5745 np = *head;
5746 np->parent = parent;
5747 for (; np->right; np = np->right)
5748 np->right->parent = np;
5749 }
5750 }
5751 }
5752 \f
5753 /* Search the parent sections of the case node tree
5754 to see if a test for the lower bound of NODE would be redundant.
5755 INDEX_TYPE is the type of the index expression.
5756
5757 The instructions to generate the case decision tree are
5758 output in the same order as nodes are processed so it is
5759 known that if a parent node checks the range of the current
5760 node minus one that the current node is bounded at its lower
5761 span. Thus the test would be redundant. */
5762
5763 static int
5764 node_has_low_bound (node, index_type)
5765 case_node_ptr node;
5766 tree index_type;
5767 {
5768 tree low_minus_one;
5769 case_node_ptr pnode;
5770
5771 /* If the lower bound of this node is the lowest value in the index type,
5772 we need not test it. */
5773
5774 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5775 return 1;
5776
5777 /* If this node has a left branch, the value at the left must be less
5778 than that at this node, so it cannot be bounded at the bottom and
5779 we need not bother testing any further. */
5780
5781 if (node->left)
5782 return 0;
5783
5784 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5785 node->low, integer_one_node));
5786
5787 /* If the subtraction above overflowed, we can't verify anything.
5788 Otherwise, look for a parent that tests our value - 1. */
5789
5790 if (! tree_int_cst_lt (low_minus_one, node->low))
5791 return 0;
5792
5793 for (pnode = node->parent; pnode; pnode = pnode->parent)
5794 if (tree_int_cst_equal (low_minus_one, pnode->high))
5795 return 1;
5796
5797 return 0;
5798 }
5799
5800 /* Search the parent sections of the case node tree
5801 to see if a test for the upper bound of NODE would be redundant.
5802 INDEX_TYPE is the type of the index expression.
5803
5804 The instructions to generate the case decision tree are
5805 output in the same order as nodes are processed so it is
5806 known that if a parent node checks the range of the current
5807 node plus one that the current node is bounded at its upper
5808 span. Thus the test would be redundant. */
5809
5810 static int
5811 node_has_high_bound (node, index_type)
5812 case_node_ptr node;
5813 tree index_type;
5814 {
5815 tree high_plus_one;
5816 case_node_ptr pnode;
5817
5818 /* If there is no upper bound, obviously no test is needed. */
5819
5820 if (TYPE_MAX_VALUE (index_type) == NULL)
5821 return 1;
5822
5823 /* If the upper bound of this node is the highest value in the type
5824 of the index expression, we need not test against it. */
5825
5826 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5827 return 1;
5828
5829 /* If this node has a right branch, the value at the right must be greater
5830 than that at this node, so it cannot be bounded at the top and
5831 we need not bother testing any further. */
5832
5833 if (node->right)
5834 return 0;
5835
5836 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5837 node->high, integer_one_node));
5838
5839 /* If the addition above overflowed, we can't verify anything.
5840 Otherwise, look for a parent that tests our value + 1. */
5841
5842 if (! tree_int_cst_lt (node->high, high_plus_one))
5843 return 0;
5844
5845 for (pnode = node->parent; pnode; pnode = pnode->parent)
5846 if (tree_int_cst_equal (high_plus_one, pnode->low))
5847 return 1;
5848
5849 return 0;
5850 }
5851
5852 /* Search the parent sections of the
5853 case node tree to see if both tests for the upper and lower
5854 bounds of NODE would be redundant. */
5855
5856 static int
5857 node_is_bounded (node, index_type)
5858 case_node_ptr node;
5859 tree index_type;
5860 {
5861 return (node_has_low_bound (node, index_type)
5862 && node_has_high_bound (node, index_type));
5863 }
5864
5865 /* Emit an unconditional jump to LABEL unless it would be dead code. */
5866
5867 static void
5868 emit_jump_if_reachable (label)
5869 rtx label;
5870 {
5871 if (GET_CODE (get_last_insn ()) != BARRIER)
5872 emit_jump (label);
5873 }
5874 \f
5875 /* Emit step-by-step code to select a case for the value of INDEX.
5876 The thus generated decision tree follows the form of the
5877 case-node binary tree NODE, whose nodes represent test conditions.
5878 INDEX_TYPE is the type of the index of the switch.
5879
5880 Care is taken to prune redundant tests from the decision tree
5881 by detecting any boundary conditions already checked by
5882 emitted rtx. (See node_has_high_bound, node_has_low_bound
5883 and node_is_bounded, above.)
5884
5885 Where the test conditions can be shown to be redundant we emit
5886 an unconditional jump to the target code. As a further
5887 optimization, the subordinates of a tree node are examined to
5888 check for bounded nodes. In this case conditional and/or
5889 unconditional jumps as a result of the boundary check for the
5890 current node are arranged to target the subordinates associated
5891 code for out of bound conditions on the current node.
5892
5893 We can assume that when control reaches the code generated here,
5894 the index value has already been compared with the parents
5895 of this node, and determined to be on the same side of each parent
5896 as this node is. Thus, if this node tests for the value 51,
5897 and a parent tested for 52, we don't need to consider
5898 the possibility of a value greater than 51. If another parent
5899 tests for the value 50, then this node need not test anything. */
5900
5901 static void
5902 emit_case_nodes (index, node, default_label, index_type)
5903 rtx index;
5904 case_node_ptr node;
5905 rtx default_label;
5906 tree index_type;
5907 {
5908 /* If INDEX has an unsigned type, we must make unsigned branches. */
5909 int unsignedp = TREE_UNSIGNED (index_type);
5910 enum machine_mode mode = GET_MODE (index);
5911 enum machine_mode imode = TYPE_MODE (index_type);
5912
5913 /* See if our parents have already tested everything for us.
5914 If they have, emit an unconditional jump for this node. */
5915 if (node_is_bounded (node, index_type))
5916 emit_jump (label_rtx (node->code_label));
5917
5918 else if (tree_int_cst_equal (node->low, node->high))
5919 {
5920 /* Node is single valued. First see if the index expression matches
5921 this node and then check our children, if any. */
5922
5923 do_jump_if_equal (index,
5924 convert_modes (mode, imode,
5925 expand_expr (node->low, NULL_RTX,
5926 VOIDmode, 0),
5927 unsignedp),
5928 label_rtx (node->code_label), unsignedp);
5929
5930 if (node->right != 0 && node->left != 0)
5931 {
5932 /* This node has children on both sides.
5933 Dispatch to one side or the other
5934 by comparing the index value with this node's value.
5935 If one subtree is bounded, check that one first,
5936 so we can avoid real branches in the tree. */
5937
5938 if (node_is_bounded (node->right, index_type))
5939 {
5940 emit_cmp_and_jump_insns (index,
5941 convert_modes
5942 (mode, imode,
5943 expand_expr (node->high, NULL_RTX,
5944 VOIDmode, 0),
5945 unsignedp),
5946 GT, NULL_RTX, mode, unsignedp,
5947 label_rtx (node->right->code_label));
5948 emit_case_nodes (index, node->left, default_label, index_type);
5949 }
5950
5951 else if (node_is_bounded (node->left, index_type))
5952 {
5953 emit_cmp_and_jump_insns (index,
5954 convert_modes
5955 (mode, imode,
5956 expand_expr (node->high, NULL_RTX,
5957 VOIDmode, 0),
5958 unsignedp),
5959 LT, NULL_RTX, mode, unsignedp,
5960 label_rtx (node->left->code_label));
5961 emit_case_nodes (index, node->right, default_label, index_type);
5962 }
5963
5964 else
5965 {
5966 /* Neither node is bounded. First distinguish the two sides;
5967 then emit the code for one side at a time. */
5968
5969 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5970
5971 /* See if the value is on the right. */
5972 emit_cmp_and_jump_insns (index,
5973 convert_modes
5974 (mode, imode,
5975 expand_expr (node->high, NULL_RTX,
5976 VOIDmode, 0),
5977 unsignedp),
5978 GT, NULL_RTX, mode, unsignedp,
5979 label_rtx (test_label));
5980
5981 /* Value must be on the left.
5982 Handle the left-hand subtree. */
5983 emit_case_nodes (index, node->left, default_label, index_type);
5984 /* If left-hand subtree does nothing,
5985 go to default. */
5986 emit_jump_if_reachable (default_label);
5987
5988 /* Code branches here for the right-hand subtree. */
5989 expand_label (test_label);
5990 emit_case_nodes (index, node->right, default_label, index_type);
5991 }
5992 }
5993
5994 else if (node->right != 0 && node->left == 0)
5995 {
5996 /* Here we have a right child but no left so we issue conditional
5997 branch to default and process the right child.
5998
5999 Omit the conditional branch to default if we it avoid only one
6000 right child; it costs too much space to save so little time. */
6001
6002 if (node->right->right || node->right->left
6003 || !tree_int_cst_equal (node->right->low, node->right->high))
6004 {
6005 if (!node_has_low_bound (node, index_type))
6006 {
6007 emit_cmp_and_jump_insns (index,
6008 convert_modes
6009 (mode, imode,
6010 expand_expr (node->high, NULL_RTX,
6011 VOIDmode, 0),
6012 unsignedp),
6013 LT, NULL_RTX, mode, unsignedp,
6014 default_label);
6015 }
6016
6017 emit_case_nodes (index, node->right, default_label, index_type);
6018 }
6019 else
6020 /* We cannot process node->right normally
6021 since we haven't ruled out the numbers less than
6022 this node's value. So handle node->right explicitly. */
6023 do_jump_if_equal (index,
6024 convert_modes
6025 (mode, imode,
6026 expand_expr (node->right->low, NULL_RTX,
6027 VOIDmode, 0),
6028 unsignedp),
6029 label_rtx (node->right->code_label), unsignedp);
6030 }
6031
6032 else if (node->right == 0 && node->left != 0)
6033 {
6034 /* Just one subtree, on the left. */
6035 if (node->left->left || node->left->right
6036 || !tree_int_cst_equal (node->left->low, node->left->high))
6037 {
6038 if (!node_has_high_bound (node, index_type))
6039 {
6040 emit_cmp_and_jump_insns (index,
6041 convert_modes
6042 (mode, imode,
6043 expand_expr (node->high, NULL_RTX,
6044 VOIDmode, 0),
6045 unsignedp),
6046 GT, NULL_RTX, mode, unsignedp,
6047 default_label);
6048 }
6049
6050 emit_case_nodes (index, node->left, default_label, index_type);
6051 }
6052 else
6053 /* We cannot process node->left normally
6054 since we haven't ruled out the numbers less than
6055 this node's value. So handle node->left explicitly. */
6056 do_jump_if_equal (index,
6057 convert_modes
6058 (mode, imode,
6059 expand_expr (node->left->low, NULL_RTX,
6060 VOIDmode, 0),
6061 unsignedp),
6062 label_rtx (node->left->code_label), unsignedp);
6063 }
6064 }
6065 else
6066 {
6067 /* Node is a range. These cases are very similar to those for a single
6068 value, except that we do not start by testing whether this node
6069 is the one to branch to. */
6070
6071 if (node->right != 0 && node->left != 0)
6072 {
6073 /* Node has subtrees on both sides.
6074 If the right-hand subtree is bounded,
6075 test for it first, since we can go straight there.
6076 Otherwise, we need to make a branch in the control structure,
6077 then handle the two subtrees. */
6078 tree test_label = 0;
6079
6080 if (node_is_bounded (node->right, index_type))
6081 /* Right hand node is fully bounded so we can eliminate any
6082 testing and branch directly to the target code. */
6083 emit_cmp_and_jump_insns (index,
6084 convert_modes
6085 (mode, imode,
6086 expand_expr (node->high, NULL_RTX,
6087 VOIDmode, 0),
6088 unsignedp),
6089 GT, NULL_RTX, mode, unsignedp,
6090 label_rtx (node->right->code_label));
6091 else
6092 {
6093 /* Right hand node requires testing.
6094 Branch to a label where we will handle it later. */
6095
6096 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6097 emit_cmp_and_jump_insns (index,
6098 convert_modes
6099 (mode, imode,
6100 expand_expr (node->high, NULL_RTX,
6101 VOIDmode, 0),
6102 unsignedp),
6103 GT, NULL_RTX, mode, unsignedp,
6104 label_rtx (test_label));
6105 }
6106
6107 /* Value belongs to this node or to the left-hand subtree. */
6108
6109 emit_cmp_and_jump_insns (index,
6110 convert_modes
6111 (mode, imode,
6112 expand_expr (node->low, NULL_RTX,
6113 VOIDmode, 0),
6114 unsignedp),
6115 GE, NULL_RTX, mode, unsignedp,
6116 label_rtx (node->code_label));
6117
6118 /* Handle the left-hand subtree. */
6119 emit_case_nodes (index, node->left, default_label, index_type);
6120
6121 /* If right node had to be handled later, do that now. */
6122
6123 if (test_label)
6124 {
6125 /* If the left-hand subtree fell through,
6126 don't let it fall into the right-hand subtree. */
6127 emit_jump_if_reachable (default_label);
6128
6129 expand_label (test_label);
6130 emit_case_nodes (index, node->right, default_label, index_type);
6131 }
6132 }
6133
6134 else if (node->right != 0 && node->left == 0)
6135 {
6136 /* Deal with values to the left of this node,
6137 if they are possible. */
6138 if (!node_has_low_bound (node, index_type))
6139 {
6140 emit_cmp_and_jump_insns (index,
6141 convert_modes
6142 (mode, imode,
6143 expand_expr (node->low, NULL_RTX,
6144 VOIDmode, 0),
6145 unsignedp),
6146 LT, NULL_RTX, mode, unsignedp,
6147 default_label);
6148 }
6149
6150 /* Value belongs to this node or to the right-hand subtree. */
6151
6152 emit_cmp_and_jump_insns (index,
6153 convert_modes
6154 (mode, imode,
6155 expand_expr (node->high, NULL_RTX,
6156 VOIDmode, 0),
6157 unsignedp),
6158 LE, NULL_RTX, mode, unsignedp,
6159 label_rtx (node->code_label));
6160
6161 emit_case_nodes (index, node->right, default_label, index_type);
6162 }
6163
6164 else if (node->right == 0 && node->left != 0)
6165 {
6166 /* Deal with values to the right of this node,
6167 if they are possible. */
6168 if (!node_has_high_bound (node, index_type))
6169 {
6170 emit_cmp_and_jump_insns (index,
6171 convert_modes
6172 (mode, imode,
6173 expand_expr (node->high, NULL_RTX,
6174 VOIDmode, 0),
6175 unsignedp),
6176 GT, NULL_RTX, mode, unsignedp,
6177 default_label);
6178 }
6179
6180 /* Value belongs to this node or to the left-hand subtree. */
6181
6182 emit_cmp_and_jump_insns (index,
6183 convert_modes
6184 (mode, imode,
6185 expand_expr (node->low, NULL_RTX,
6186 VOIDmode, 0),
6187 unsignedp),
6188 GE, NULL_RTX, mode, unsignedp,
6189 label_rtx (node->code_label));
6190
6191 emit_case_nodes (index, node->left, default_label, index_type);
6192 }
6193
6194 else
6195 {
6196 /* Node has no children so we check low and high bounds to remove
6197 redundant tests. Only one of the bounds can exist,
6198 since otherwise this node is bounded--a case tested already. */
6199 int high_bound = node_has_high_bound (node, index_type);
6200 int low_bound = node_has_low_bound (node, index_type);
6201
6202 if (!high_bound && low_bound)
6203 {
6204 emit_cmp_and_jump_insns (index,
6205 convert_modes
6206 (mode, imode,
6207 expand_expr (node->high, NULL_RTX,
6208 VOIDmode, 0),
6209 unsignedp),
6210 GT, NULL_RTX, mode, unsignedp,
6211 default_label);
6212 }
6213
6214 else if (!low_bound && high_bound)
6215 {
6216 emit_cmp_and_jump_insns (index,
6217 convert_modes
6218 (mode, imode,
6219 expand_expr (node->low, NULL_RTX,
6220 VOIDmode, 0),
6221 unsignedp),
6222 LT, NULL_RTX, mode, unsignedp,
6223 default_label);
6224 }
6225 else if (!low_bound && !high_bound)
6226 {
6227 /* Widen LOW and HIGH to the same width as INDEX. */
6228 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6229 tree low = build1 (CONVERT_EXPR, type, node->low);
6230 tree high = build1 (CONVERT_EXPR, type, node->high);
6231 rtx low_rtx, new_index, new_bound;
6232
6233 /* Instead of doing two branches, emit one unsigned branch for
6234 (index-low) > (high-low). */
6235 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6236 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6237 NULL_RTX, unsignedp,
6238 OPTAB_WIDEN);
6239 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6240 high, low)),
6241 NULL_RTX, mode, 0);
6242
6243 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6244 mode, 1, default_label);
6245 }
6246
6247 emit_jump (label_rtx (node->code_label));
6248 }
6249 }
6250 }
6251
6252 #include "gt-stmt.h"