]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/stmt.c
alias.c (nonlocal_mentioned_p, [...]): Use, LABEL_P, JUMP_P, CALL_P, NONJUMP_INSN_P...
[thirdparty/gcc.git] / gcc / stmt.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC
4559fd9e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
a6dd4094 3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
28d81abb 4
1322177d 5This file is part of GCC.
28d81abb 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
28d81abb 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
28d81abb
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
28d81abb 21
28d81abb
RK
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
ccd043a9 40
28d81abb
RK
41#include "rtl.h"
42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
28d81abb 44#include "flags.h"
6adb4e3a 45#include "except.h"
28d81abb 46#include "function.h"
28d81abb 47#include "insn-config.h"
28d81abb 48#include "expr.h"
e78d8e51 49#include "libfuncs.h"
28d81abb 50#include "hard-reg-set.h"
28d81abb
RK
51#include "loop.h"
52#include "recog.h"
ca695ac9 53#include "machmode.h"
10f0ad3d 54#include "toplev.h"
d6f4ec51 55#include "output.h"
87ff9c8e 56#include "ggc.h"
43577e6b 57#include "langhooks.h"
969d70ca 58#include "predict.h"
9bb231fd 59#include "optabs.h"
61f71b34 60#include "target.h"
66fd46b6 61#include "regs.h"
28d81abb
RK
62\f
63/* Functions and data structures for expanding case statements. */
64
65/* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
68
5720c7e7
RK
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
28d81abb
RK
72
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
78
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
28d81abb
RK
84 and nodes on the right having higher values. We then output the tree
85 in order. */
86
e2500fed 87struct case_node GTY(())
28d81abb
RK
88{
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
57641239 95 int balance;
28d81abb
RK
96};
97
98typedef struct case_node case_node;
99typedef struct case_node *case_node_ptr;
100
101/* These are used by estimate_case_costs and balance_case_nodes. */
102
103/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 104static short cost_table_[129];
28d81abb 105static int use_cost_table;
2a2137c4
RH
106static int cost_table_initialized;
107
108/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
cf403648 110#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
28d81abb
RK
111\f
112/* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
6af8eb57 127 For example, loops have `cond_stack'. Each object points to the
28d81abb
RK
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
e2500fed 137struct nesting GTY(())
28d81abb
RK
138{
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
e2500fed
GK
143 enum nesting_desc {
144 COND_NESTING,
e2500fed
GK
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
28d81abb
RK
149 {
150 /* For conds (if-then and if-then-else statements). */
e2500fed 151 struct nesting_cond
28d81abb
RK
152 {
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
0f41302f 158 This may be the end of the if or the next else/elseif. */
28d81abb 159 rtx next_label;
e2500fed 160 } GTY ((tag ("COND_NESTING"))) cond;
28d81abb 161 /* For variable binding contours. */
e2500fed 162 struct nesting_block
28d81abb
RK
163 {
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
28d81abb
RK
167 /* The NOTE that starts this contour.
168 Used by expand_goto to check whether the destination
169 is within each contour or not. */
170 rtx first_insn;
e976b8b2
MS
171 /* The saved target_temp_slot_level from our outer block.
172 We may reset target_temp_slot_level to be the level of
173 this block, if that is done, target_temp_slot_level
174 reverts to the saved target_temp_slot_level at the very
175 end of the block. */
3f1d071b 176 int block_target_temp_slot_level;
e976b8b2
MS
177 /* True if we are currently emitting insns in an area of
178 output code that is controlled by a conditional
179 expression. This is used by the cleanup handling code to
180 generate conditional cleanup actions. */
181 int conditional_code;
182 /* A place to move the start of the exception region for any
183 of the conditional cleanups, must be at the end or after
184 the start of the last unconditional cleanup, and before any
185 conditional branch points. */
186 rtx last_unconditional_cleanup;
e2500fed 187 } GTY ((tag ("BLOCK_NESTING"))) block;
e8eebd31 188 /* For switch (C) or case (Pascal) statements. */
e2500fed 189 struct nesting_case
28d81abb
RK
190 {
191 /* The insn after which the case dispatch should finally
192 be emitted. Zero for a dummy. */
193 rtx start;
57641239
RK
194 /* A list of case labels; it is first built as an AVL tree.
195 During expand_end_case, this is converted to a list, and may be
196 rearranged into a nearly balanced binary tree. */
28d81abb
RK
197 struct case_node *case_list;
198 /* Label to jump to if no case matches. */
199 tree default_label;
200 /* The expression to be dispatched on. */
201 tree index_expr;
202 /* Type that INDEX_EXPR should be converted to. */
203 tree nominal_type;
28d81abb 204 /* Name of this kind of statement, for warnings. */
dff01034 205 const char *printname;
a11759a3
JR
206 /* Used to save no_line_numbers till we see the first case label.
207 We set this to -1 when we see the first case label in this
208 case statement. */
209 int line_number_status;
e2500fed
GK
210 } GTY ((tag ("CASE_NESTING"))) case_stmt;
211 } GTY ((desc ("%1.desc"))) data;
28d81abb
RK
212};
213
28d81abb
RK
214/* Allocate and return a new `struct nesting'. */
215
703ad42b 216#define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
28d81abb 217
6ed1d6c5
RS
218/* Pop the nesting stack element by element until we pop off
219 the element which is at the top of STACK.
220 Update all the other stacks, popping off elements from them
221 as we pop them from nesting_stack. */
28d81abb
RK
222
223#define POPSTACK(STACK) \
6ed1d6c5
RS
224do { struct nesting *target = STACK; \
225 struct nesting *this; \
226 do { this = nesting_stack; \
6ed1d6c5
RS
227 if (cond_stack == this) \
228 cond_stack = cond_stack->next; \
229 if (block_stack == this) \
230 block_stack = block_stack->next; \
6ed1d6c5
RS
231 if (case_stack == this) \
232 case_stack = case_stack->next; \
6ed1d6c5 233 nesting_depth = nesting_stack->depth - 1; \
e2500fed 234 nesting_stack = this->all; } \
6ed1d6c5 235 while (this != target); } while (0)
28d81abb
RK
236\f
237/* In some cases it is impossible to generate code for a forward goto
238 until the label definition is seen. This happens when it may be necessary
239 for the goto to reset the stack pointer: we don't yet know how to do that.
240 So expand_goto puts an entry on this fixup list.
241 Each time a binding contour that resets the stack is exited,
242 we check each fixup.
243 If the target label has now been defined, we can insert the proper code. */
244
e2500fed 245struct goto_fixup GTY(())
28d81abb
RK
246{
247 /* Points to following fixup. */
248 struct goto_fixup *next;
249 /* Points to the insn before the jump insn.
250 If more code must be inserted, it goes after this insn. */
251 rtx before_jump;
252 /* The LABEL_DECL that this jump is jumping to, or 0
253 for break, continue or return. */
254 tree target;
7629c936
RS
255 /* The BLOCK for the place where this goto was found. */
256 tree context;
28d81abb
RK
257 /* The CODE_LABEL rtx that this is jumping to. */
258 rtx target_rtl;
259 /* Number of binding contours started in current function
260 before the label reference. */
261 int block_start_count;
28d81abb 262};
e9a25f70 263
e2500fed 264struct stmt_status GTY(())
3f1d071b
BS
265{
266 /* Chain of all pending binding contours. */
e2500fed 267 struct nesting * x_block_stack;
3f1d071b
BS
268
269 /* If any new stacks are added here, add them to POPSTACKS too. */
270
3f1d071b 271 /* Chain of all pending conditional statements. */
e2500fed 272 struct nesting * x_cond_stack;
3f1d071b 273
3f1d071b 274 /* Chain of all pending case or switch statements. */
e2500fed 275 struct nesting * x_case_stack;
3f1d071b
BS
276
277 /* Separate chain including all of the above,
278 chained through the `all' field. */
e2500fed 279 struct nesting * x_nesting_stack;
3f1d071b
BS
280
281 /* Number of entries on nesting_stack now. */
282 int x_nesting_depth;
283
284 /* Number of binding contours started so far in this function. */
285 int x_block_start_count;
286
c8608cd6
GDR
287 /* Location of last line-number note, whether we actually
288 emitted it or not. */
289 location_t x_emit_locus;
3f1d071b
BS
290
291 struct goto_fixup *x_goto_fixup_chain;
292};
293
01d939e8 294#define block_stack (cfun->stmt->x_block_stack)
01d939e8 295#define cond_stack (cfun->stmt->x_cond_stack)
01d939e8
BS
296#define case_stack (cfun->stmt->x_case_stack)
297#define nesting_stack (cfun->stmt->x_nesting_stack)
298#define nesting_depth (cfun->stmt->x_nesting_depth)
299#define current_block_start_count (cfun->stmt->x_block_start_count)
c8608cd6 300#define emit_locus (cfun->stmt->x_emit_locus)
01d939e8 301#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70 302
272d0bee 303/* Nonzero if we are using EH to handle cleanups. */
6de9cd9a 304int using_eh_for_cleanups_p = 0;
e9a25f70 305
46c5ad27 306static int n_occurrences (int, const char *);
46c5ad27 307static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
46c5ad27 308static void expand_nl_goto_receiver (void);
46c5ad27
AJ
309static bool check_operand_nalternatives (tree, tree);
310static bool check_unique_operand_names (tree, tree);
311static char *resolve_operand_name_1 (char *, tree, tree);
ac45df5d 312static void expand_null_return_1 (void);
46c5ad27 313static enum br_predictor return_prediction (rtx);
c988af2b 314static rtx shift_return_value (rtx);
46c5ad27 315static void expand_value_return (rtx);
46c5ad27
AJ
316static void do_jump_if_equal (rtx, rtx, rtx, int);
317static int estimate_case_costs (case_node_ptr);
318static bool same_case_target_p (rtx, rtx);
319static void strip_default_case_nodes (case_node_ptr *, rtx);
320static bool lshift_cheap_p (void);
321static int case_bit_test_cmp (const void *, const void *);
322static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
323static void group_case_nodes (case_node_ptr);
324static void balance_case_nodes (case_node_ptr *, case_node_ptr);
325static int node_has_low_bound (case_node_ptr, tree);
326static int node_has_high_bound (case_node_ptr, tree);
327static int node_is_bounded (case_node_ptr, tree);
328static void emit_jump_if_reachable (rtx);
329static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
330static struct case_node *case_tree2list (case_node *, case_node *);
28d81abb 331\f
e9a25f70 332void
46c5ad27 333using_eh_for_cleanups (void)
e9a25f70
JL
334{
335 using_eh_for_cleanups_p = 1;
336}
337
28d81abb 338void
46c5ad27 339init_stmt_for_function (void)
28d81abb 340{
3a70d621 341 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
28d81abb 342}
3f1d071b 343\f
3f1d071b 344/* Record the current file and line. Called from emit_line_note. */
0cea056b 345
28d81abb 346void
0cea056b 347set_file_and_line_for_stmt (location_t location)
3f1d071b 348{
61d84605
MM
349 /* If we're outputting an inline function, and we add a line note,
350 there may be no CFUN->STMT information. So, there's no need to
351 update it. */
352 if (cfun->stmt)
0cea056b 353 emit_locus = location;
28d81abb 354}
3f1d071b 355
28d81abb
RK
356/* Emit a no-op instruction. */
357
358void
46c5ad27 359emit_nop (void)
28d81abb 360{
ca695ac9
JB
361 rtx last_insn;
362
b93a436e
JL
363 last_insn = get_last_insn ();
364 if (!optimize
4b4bf941
JQ
365 && (LABEL_P (last_insn)
366 || (NOTE_P (last_insn)
b93a436e
JL
367 && prev_real_insn (last_insn) == 0)))
368 emit_insn (gen_nop ());
28d81abb
RK
369}
370\f
371/* Return the rtx-label that corresponds to a LABEL_DECL,
372 creating it if necessary. */
373
374rtx
46c5ad27 375label_rtx (tree label)
28d81abb
RK
376{
377 if (TREE_CODE (label) != LABEL_DECL)
378 abort ();
379
19e7881c 380 if (!DECL_RTL_SET_P (label))
6de9cd9a
DN
381 {
382 rtx r = gen_label_rtx ();
383 SET_DECL_RTL (label, r);
384 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
385 LABEL_PRESERVE_P (r) = 1;
386 }
28d81abb 387
19e7881c 388 return DECL_RTL (label);
28d81abb
RK
389}
390
046e4e36
ZW
391/* As above, but also put it on the forced-reference list of the
392 function that contains it. */
393rtx
46c5ad27 394force_label_rtx (tree label)
046e4e36
ZW
395{
396 rtx ref = label_rtx (label);
397 tree function = decl_function_context (label);
398 struct function *p;
399
400 if (!function)
401 abort ();
402
6de9cd9a 403 if (function != current_function_decl)
046e4e36
ZW
404 p = find_function_data (function);
405 else
406 p = cfun;
407
408 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
409 p->expr->x_forced_labels);
410 return ref;
411}
19e7881c 412
28d81abb
RK
413/* Add an unconditional jump to LABEL as the next sequential instruction. */
414
415void
46c5ad27 416emit_jump (rtx label)
28d81abb
RK
417{
418 do_pending_stack_adjust ();
419 emit_jump_insn (gen_jump (label));
420 emit_barrier ();
421}
422
423/* Emit code to jump to the address
424 specified by the pointer expression EXP. */
425
426void
46c5ad27 427expand_computed_goto (tree exp)
28d81abb 428{
b93a436e 429 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1 430
5ae6cd0d 431 x = convert_memory_address (Pmode, x);
ffa1a1ce 432
b93a436e 433 emit_queue ();
eb4e1c01
JH
434 do_pending_stack_adjust ();
435 emit_indirect_jump (x);
28d81abb
RK
436}
437\f
438/* Handle goto statements and the labels that they can go to. */
439
440/* Specify the location in the RTL code of a label LABEL,
441 which is a LABEL_DECL tree node.
442
443 This is used for the kind of label that the user can jump to with a
444 goto statement, and for alternatives of a switch or case statement.
445 RTL labels generated for loops and conditionals don't go through here;
446 they are generated directly at the RTL level, by other functions below.
447
448 Note that this has nothing to do with defining label *names*.
449 Languages vary in how they do that and what that even means. */
450
451void
46c5ad27 452expand_label (tree label)
28d81abb 453{
6de9cd9a 454 rtx label_r = label_rtx (label);
28d81abb
RK
455
456 do_pending_stack_adjust ();
6de9cd9a 457 emit_label (label_r);
28d81abb
RK
458 if (DECL_NAME (label))
459 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
460
6de9cd9a
DN
461 if (DECL_NONLOCAL (label))
462 {
463 expand_nl_goto_receiver ();
464 nonlocal_goto_handler_labels
465 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
466 nonlocal_goto_handler_labels);
467 }
468
469 if (FORCED_LABEL (label))
470 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
471
472 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
473 maybe_set_first_label_num (label_r);
28d81abb
RK
474}
475
28d81abb
RK
476/* Generate RTL code for a `goto' statement with target label LABEL.
477 LABEL should be a LABEL_DECL tree node that was or will later be
478 defined with `expand_label'. */
479
480void
46c5ad27 481expand_goto (tree label)
28d81abb 482{
6de9cd9a
DN
483#ifdef ENABLE_CHECKING
484 /* Check for a nonlocal goto to a containing function. Should have
485 gotten translated to __builtin_nonlocal_goto. */
486 tree context = decl_function_context (label);
28d81abb 487 if (context != 0 && context != current_function_decl)
6de9cd9a 488 abort ();
28d81abb 489#endif
4b01bd16 490
ac45df5d 491 emit_jump (label_rtx (label));
28d81abb 492}
2a230e9d
BS
493\f
494/* Return the number of times character C occurs in string S. */
495static int
46c5ad27 496n_occurrences (int c, const char *s)
2a230e9d
BS
497{
498 int n = 0;
499 while (*s)
500 n += (*s++ == c);
501 return n;
502}
28d81abb
RK
503\f
504/* Generate RTL for an asm statement (explicit assembler code).
4c46ea23
EB
505 STRING is a STRING_CST node containing the assembler code text,
506 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
507 insn is volatile; don't optimize it. */
28d81abb
RK
508
509void
46c5ad27 510expand_asm (tree string, int vol)
28d81abb 511{
4c46ea23
EB
512 rtx body;
513
514 if (TREE_CODE (string) == ADDR_EXPR)
515 string = TREE_OPERAND (string, 0);
516
839ee4bc 517 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
4c46ea23
EB
518
519 MEM_VOLATILE_P (body) = vol;
28d81abb 520
4c46ea23 521 emit_insn (body);
28d81abb
RK
522}
523
40b18c0a
MM
524/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
525 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
526 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
527 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
528 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
529 constraint allows the use of a register operand. And, *IS_INOUT
530 will be true if the operand is read-write, i.e., if it is used as
531 an input as well as an output. If *CONSTRAINT_P is not in
532 canonical form, it will be made canonical. (Note that `+' will be
14b493d6 533 replaced with `=' as part of this process.)
40b18c0a
MM
534
535 Returns TRUE if all went well; FALSE if an error occurred. */
536
537bool
46c5ad27
AJ
538parse_output_constraint (const char **constraint_p, int operand_num,
539 int ninputs, int noutputs, bool *allows_mem,
540 bool *allows_reg, bool *is_inout)
40b18c0a
MM
541{
542 const char *constraint = *constraint_p;
543 const char *p;
544
545 /* Assume the constraint doesn't allow the use of either a register
546 or memory. */
547 *allows_mem = false;
548 *allows_reg = false;
549
550 /* Allow the `=' or `+' to not be at the beginning of the string,
551 since it wasn't explicitly documented that way, and there is a
552 large body of code that puts it last. Swap the character to
553 the front, so as not to uglify any place else. */
554 p = strchr (constraint, '=');
555 if (!p)
556 p = strchr (constraint, '+');
557
558 /* If the string doesn't contain an `=', issue an error
559 message. */
560 if (!p)
561 {
562 error ("output operand constraint lacks `='");
563 return false;
564 }
565
566 /* If the constraint begins with `+', then the operand is both read
567 from and written to. */
568 *is_inout = (*p == '+');
569
40b18c0a
MM
570 /* Canonicalize the output constraint so that it begins with `='. */
571 if (p != constraint || is_inout)
572 {
573 char *buf;
574 size_t c_len = strlen (constraint);
575
576 if (p != constraint)
577 warning ("output constraint `%c' for operand %d is not at the beginning",
578 *p, operand_num);
579
580 /* Make a copy of the constraint. */
581 buf = alloca (c_len + 1);
582 strcpy (buf, constraint);
583 /* Swap the first character and the `=' or `+'. */
584 buf[p - constraint] = buf[0];
585 /* Make sure the first character is an `='. (Until we do this,
586 it might be a `+'.) */
587 buf[0] = '=';
588 /* Replace the constraint with the canonicalized string. */
589 *constraint_p = ggc_alloc_string (buf, c_len);
590 constraint = *constraint_p;
591 }
592
593 /* Loop through the constraint string. */
97488870 594 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
40b18c0a
MM
595 switch (*p)
596 {
597 case '+':
598 case '=':
357351e5 599 error ("operand constraint contains incorrectly positioned '+' or '='");
40b18c0a 600 return false;
786de7eb 601
40b18c0a
MM
602 case '%':
603 if (operand_num + 1 == ninputs + noutputs)
604 {
605 error ("`%%' constraint used with last operand");
606 return false;
607 }
608 break;
609
610 case 'V': case 'm': case 'o':
611 *allows_mem = true;
612 break;
613
614 case '?': case '!': case '*': case '&': case '#':
615 case 'E': case 'F': case 'G': case 'H':
616 case 's': case 'i': case 'n':
617 case 'I': case 'J': case 'K': case 'L': case 'M':
618 case 'N': case 'O': case 'P': case ',':
619 break;
620
621 case '0': case '1': case '2': case '3': case '4':
622 case '5': case '6': case '7': case '8': case '9':
84b72302 623 case '[':
40b18c0a
MM
624 error ("matching constraint not valid in output operand");
625 return false;
626
627 case '<': case '>':
628 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
629 excepting those that expand_call created. So match memory
630 and hope. */
631 *allows_mem = true;
632 break;
633
634 case 'g': case 'X':
635 *allows_reg = true;
636 *allows_mem = true;
637 break;
786de7eb 638
40b18c0a
MM
639 case 'p': case 'r':
640 *allows_reg = true;
641 break;
642
643 default:
644 if (!ISALPHA (*p))
645 break;
97488870 646 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
40b18c0a 647 *allows_reg = true;
97488870
R
648#ifdef EXTRA_CONSTRAINT_STR
649 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
ccfc6cc8 650 *allows_reg = true;
97488870 651 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
ccfc6cc8 652 *allows_mem = true;
40b18c0a
MM
653 else
654 {
655 /* Otherwise we can't assume anything about the nature of
656 the constraint except that it isn't purely registers.
657 Treat it like "g" and hope for the best. */
658 *allows_reg = true;
659 *allows_mem = true;
660 }
661#endif
662 break;
663 }
664
665 return true;
666}
667
6be2e1f8
RH
668/* Similar, but for input constraints. */
669
1456deaf 670bool
46c5ad27
AJ
671parse_input_constraint (const char **constraint_p, int input_num,
672 int ninputs, int noutputs, int ninout,
673 const char * const * constraints,
674 bool *allows_mem, bool *allows_reg)
6be2e1f8
RH
675{
676 const char *constraint = *constraint_p;
677 const char *orig_constraint = constraint;
678 size_t c_len = strlen (constraint);
679 size_t j;
f3da0ead 680 bool saw_match = false;
6be2e1f8
RH
681
682 /* Assume the constraint doesn't allow the use of either
683 a register or memory. */
684 *allows_mem = false;
685 *allows_reg = false;
686
687 /* Make sure constraint has neither `=', `+', nor '&'. */
688
97488870 689 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
6be2e1f8
RH
690 switch (constraint[j])
691 {
692 case '+': case '=': case '&':
693 if (constraint == orig_constraint)
694 {
695 error ("input operand constraint contains `%c'", constraint[j]);
696 return false;
697 }
698 break;
699
700 case '%':
701 if (constraint == orig_constraint
702 && input_num + 1 == ninputs - ninout)
703 {
704 error ("`%%' constraint used with last operand");
705 return false;
706 }
707 break;
708
709 case 'V': case 'm': case 'o':
710 *allows_mem = true;
711 break;
712
713 case '<': case '>':
714 case '?': case '!': case '*': case '#':
715 case 'E': case 'F': case 'G': case 'H':
716 case 's': case 'i': case 'n':
717 case 'I': case 'J': case 'K': case 'L': case 'M':
718 case 'N': case 'O': case 'P': case ',':
719 break;
720
721 /* Whether or not a numeric constraint allows a register is
722 decided by the matching constraint, and so there is no need
723 to do anything special with them. We must handle them in
724 the default case, so that we don't unnecessarily force
725 operands to memory. */
726 case '0': case '1': case '2': case '3': case '4':
727 case '5': case '6': case '7': case '8': case '9':
728 {
729 char *end;
730 unsigned long match;
731
f3da0ead
JM
732 saw_match = true;
733
6be2e1f8
RH
734 match = strtoul (constraint + j, &end, 10);
735 if (match >= (unsigned long) noutputs)
736 {
737 error ("matching constraint references invalid operand number");
738 return false;
739 }
740
741 /* Try and find the real constraint for this dup. Only do this
742 if the matching constraint is the only alternative. */
743 if (*end == '\0'
744 && (j == 0 || (j == 1 && constraint[0] == '%')))
745 {
746 constraint = constraints[match];
747 *constraint_p = constraint;
748 c_len = strlen (constraint);
749 j = 0;
97488870
R
750 /* ??? At the end of the loop, we will skip the first part of
751 the matched constraint. This assumes not only that the
752 other constraint is an output constraint, but also that
753 the '=' or '+' come first. */
6be2e1f8
RH
754 break;
755 }
756 else
757 j = end - constraint;
97488870
R
758 /* Anticipate increment at end of loop. */
759 j--;
6be2e1f8
RH
760 }
761 /* Fall through. */
762
763 case 'p': case 'r':
764 *allows_reg = true;
765 break;
766
767 case 'g': case 'X':
768 *allows_reg = true;
769 *allows_mem = true;
770 break;
771
772 default:
773 if (! ISALPHA (constraint[j]))
774 {
775 error ("invalid punctuation `%c' in constraint", constraint[j]);
776 return false;
777 }
97488870
R
778 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
779 != NO_REGS)
6be2e1f8 780 *allows_reg = true;
97488870
R
781#ifdef EXTRA_CONSTRAINT_STR
782 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 783 *allows_reg = true;
97488870 784 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 785 *allows_mem = true;
6be2e1f8
RH
786 else
787 {
788 /* Otherwise we can't assume anything about the nature of
789 the constraint except that it isn't purely registers.
790 Treat it like "g" and hope for the best. */
791 *allows_reg = true;
792 *allows_mem = true;
793 }
794#endif
795 break;
796 }
797
f3da0ead
JM
798 if (saw_match && !*allows_reg)
799 warning ("matching constraint does not allow a register");
800
6be2e1f8
RH
801 return true;
802}
803
6de9cd9a
DN
804/* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
805 if it is an operand which must be passed in memory (i.e. an "m"
806 constraint), false otherwise. */
807
808bool
809asm_op_is_mem_input (tree input, tree expr)
810{
811 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
812 tree outputs = ASM_OUTPUTS (expr);
813 int noutputs = list_length (outputs);
814 const char **constraints
815 = (const char **) alloca ((noutputs) * sizeof (const char *));
816 int i = 0;
817 bool allows_mem, allows_reg;
818 tree t;
819
820 /* Collect output constraints. */
821 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
822 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
823
824 /* We pass 0 for input_num, ninputs and ninout; they are only used for
825 error checking which will be done at expand time. */
826 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
827 &allows_mem, &allows_reg);
828 return (!allows_reg && allows_mem);
829}
830
acb5d088
HPN
831/* Check for overlap between registers marked in CLOBBERED_REGS and
832 anything inappropriate in DECL. Emit error and return TRUE for error,
833 FALSE for ok. */
834
835static bool
46c5ad27 836decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
acb5d088
HPN
837{
838 /* Conflicts between asm-declared register variables and the clobber
839 list are not allowed. */
840 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
841 && DECL_REGISTER (decl)
34146b94 842 && REG_P (DECL_RTL (decl))
acb5d088
HPN
843 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
844 {
845 rtx reg = DECL_RTL (decl);
846 unsigned int regno;
847
848 for (regno = REGNO (reg);
849 regno < (REGNO (reg)
66fd46b6 850 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
acb5d088
HPN
851 regno++)
852 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
853 {
854 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
855 IDENTIFIER_POINTER (DECL_NAME (decl)));
856
857 /* Reset registerness to stop multiple errors emitted for a
858 single variable. */
859 DECL_REGISTER (decl) = 0;
860 return true;
861 }
862 }
863 return false;
864}
865
28d81abb
RK
866/* Generate RTL for an asm statement with arguments.
867 STRING is the instruction template.
868 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
869 Each output or input has an expression in the TREE_VALUE and
2ec37136 870 and a tree list in TREE_PURPOSE which in turn contains a constraint
786de7eb 871 name in TREE_VALUE (or NULL_TREE) and a constraint string
2ec37136 872 in TREE_PURPOSE.
28d81abb
RK
873 CLOBBERS is a list of STRING_CST nodes each naming a hard register
874 that is clobbered by this insn.
875
876 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
877 Some elements of OUTPUTS may be replaced with trees representing temporary
878 values. The caller should copy those temporary values to the originally
879 specified lvalues.
880
881 VOL nonzero means the insn is volatile; don't optimize it. */
882
883void
46c5ad27 884expand_asm_operands (tree string, tree outputs, tree inputs,
177560b2 885 tree clobbers, int vol, location_t locus)
28d81abb 886{
84b72302 887 rtvec argvec, constraintvec;
28d81abb
RK
888 rtx body;
889 int ninputs = list_length (inputs);
890 int noutputs = list_length (outputs);
6be2e1f8 891 int ninout;
b4ccaa16 892 int nclobbers;
acb5d088
HPN
893 HARD_REG_SET clobbered_regs;
894 int clobber_conflict_found = 0;
28d81abb 895 tree tail;
7dc8b126 896 tree t;
b3694847 897 int i;
28d81abb 898 /* Vector of RTX's of evaluated output operands. */
703ad42b
KG
899 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
900 int *inout_opnum = alloca (noutputs * sizeof (int));
901 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
235c5021 902 enum machine_mode *inout_mode
703ad42b 903 = alloca (noutputs * sizeof (enum machine_mode));
84b72302 904 const char **constraints
703ad42b 905 = alloca ((noutputs + ninputs) * sizeof (const char *));
1b3d8f8a 906 int old_generating_concat_p = generating_concat_p;
28d81abb 907
e5e809f4 908 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
909 if (noutputs == 0)
910 vol = 1;
911
84b72302
RH
912 if (! check_operand_nalternatives (outputs, inputs))
913 return;
914
7dc8b126
JM
915 string = resolve_asm_operand_names (string, outputs, inputs);
916
917 /* Collect constraints. */
918 i = 0;
919 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
920 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
921 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
922 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
84b72302 923
57bcb97a
RH
924 /* Sometimes we wish to automatically clobber registers across an asm.
925 Case in point is when the i386 backend moved from cc0 to a hard reg --
f63d1bf7 926 maintaining source-level compatibility means automatically clobbering
57bcb97a 927 the flags register. */
67dfe110 928 clobbers = targetm.md_asm_clobbers (clobbers);
57bcb97a 929
b4ccaa16
RS
930 /* Count the number of meaningful clobbered registers, ignoring what
931 we would ignore later. */
932 nclobbers = 0;
acb5d088 933 CLEAR_HARD_REG_SET (clobbered_regs);
b4ccaa16
RS
934 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
935 {
47ee9bcb 936 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 937
c09e6498
RS
938 i = decode_reg_name (regname);
939 if (i >= 0 || i == -4)
b4ccaa16 940 ++nclobbers;
7859e3ac
DE
941 else if (i == -2)
942 error ("unknown register name `%s' in `asm'", regname);
acb5d088
HPN
943
944 /* Mark clobbered registers. */
945 if (i >= 0)
e54b4cae
EB
946 {
947 /* Clobbering the PIC register is an error */
fc555370 948 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
e54b4cae
EB
949 {
950 error ("PIC register `%s' clobbered in `asm'", regname);
951 return;
952 }
953
954 SET_HARD_REG_BIT (clobbered_regs, i);
955 }
b4ccaa16
RS
956 }
957
6be2e1f8
RH
958 /* First pass over inputs and outputs checks validity and sets
959 mark_addressable if needed. */
960
961 ninout = 0;
28d81abb
RK
962 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
963 {
964 tree val = TREE_VALUE (tail);
b50a024d 965 tree type = TREE_TYPE (val);
6be2e1f8 966 const char *constraint;
40b18c0a
MM
967 bool is_inout;
968 bool allows_reg;
969 bool allows_mem;
28d81abb
RK
970
971 /* If there's an erroneous arg, emit no insn. */
40b18c0a 972 if (type == error_mark_node)
28d81abb
RK
973 return;
974
40b18c0a
MM
975 /* Try to parse the output constraint. If that fails, there's
976 no point in going further. */
6be2e1f8
RH
977 constraint = constraints[i];
978 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
979 &allows_mem, &allows_reg, &is_inout))
980 return;
981
982 if (! allows_reg
983 && (allows_mem
984 || is_inout
985 || (DECL_P (val)
f8cfc6aa 986 && REG_P (DECL_RTL (val))
6be2e1f8 987 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
ae2bcd98 988 lang_hooks.mark_addressable (val);
6be2e1f8
RH
989
990 if (is_inout)
991 ninout++;
992 }
993
994 ninputs += ninout;
995 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
996 {
997 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
998 return;
999 }
1000
1001 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1002 {
1003 bool allows_reg, allows_mem;
1004 const char *constraint;
1005
1006 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1007 would get VOIDmode and that could cause a crash in reload. */
1008 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1009 return;
1010
1011 constraint = constraints[i + noutputs];
1012 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1013 constraints, &allows_mem, &allows_reg))
40b18c0a 1014 return;
d09a75ae 1015
6be2e1f8 1016 if (! allows_reg && allows_mem)
ae2bcd98 1017 lang_hooks.mark_addressable (TREE_VALUE (tail));
6be2e1f8
RH
1018 }
1019
1020 /* Second pass evaluates arguments. */
1021
1022 ninout = 0;
1023 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1024 {
1025 tree val = TREE_VALUE (tail);
1026 tree type = TREE_TYPE (val);
1027 bool is_inout;
1028 bool allows_reg;
1029 bool allows_mem;
5b50aa9d 1030 rtx op;
6be2e1f8
RH
1031
1032 if (!parse_output_constraint (&constraints[i], i, ninputs,
1033 noutputs, &allows_mem, &allows_reg,
1034 &is_inout))
1035 abort ();
1036
d09a75ae
RK
1037 /* If an output operand is not a decl or indirect ref and our constraint
1038 allows a register, make a temporary to act as an intermediate.
1039 Make the asm insn write into that, then our caller will copy it to
1040 the real output operand. Likewise for promoted variables. */
28d81abb 1041
1b3d8f8a
GK
1042 generating_concat_p = 0;
1043
947255ed 1044 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1045 if ((TREE_CODE (val) == INDIRECT_REF
1046 && allows_mem)
2f939d94 1047 || (DECL_P (val)
f8cfc6aa
JQ
1048 && (allows_mem || REG_P (DECL_RTL (val)))
1049 && ! (REG_P (DECL_RTL (val))
d09a75ae 1050 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
11579f33 1051 || ! allows_reg
2a230e9d 1052 || is_inout)
d09a75ae 1053 {
5b50aa9d 1054 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
3c0cb5de 1055 if (MEM_P (op))
5b50aa9d 1056 op = validize_mem (op);
d09a75ae 1057
3c0cb5de 1058 if (! allows_reg && !MEM_P (op))
d09a75ae 1059 error ("output number %d not directly addressable", i);
3c0cb5de 1060 if ((! allows_mem && MEM_P (op))
5b50aa9d 1061 || GET_CODE (op) == CONCAT)
947255ed 1062 {
5b50aa9d
RH
1063 real_output_rtx[i] = protect_from_queue (op, 1);
1064 op = gen_reg_rtx (GET_MODE (op));
11579f33 1065 if (is_inout)
5b50aa9d 1066 emit_move_insn (op, real_output_rtx[i]);
947255ed 1067 }
d09a75ae 1068 }
b50a024d 1069 else
e619bb8d 1070 {
5b50aa9d
RH
1071 op = assign_temp (type, 0, 0, 1);
1072 op = validize_mem (op);
1073 TREE_VALUE (tail) = make_tree (type, op);
b50a024d 1074 }
5b50aa9d 1075 output_rtx[i] = op;
235c5021 1076
1b3d8f8a
GK
1077 generating_concat_p = old_generating_concat_p;
1078
2a230e9d 1079 if (is_inout)
235c5021 1080 {
6be2e1f8 1081 inout_mode[ninout] = TYPE_MODE (type);
235c5021
RK
1082 inout_opnum[ninout++] = i;
1083 }
acb5d088
HPN
1084
1085 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1086 clobber_conflict_found = 1;
28d81abb
RK
1087 }
1088
84b72302
RH
1089 /* Make vectors for the expression-rtx, constraint strings,
1090 and named operands. */
28d81abb
RK
1091
1092 argvec = rtvec_alloc (ninputs);
84b72302 1093 constraintvec = rtvec_alloc (ninputs);
28d81abb 1094
6462bb43
AO
1095 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1096 : GET_MODE (output_rtx[0])),
839ee4bc 1097 TREE_STRING_POINTER (string),
84b72302 1098 empty_string, 0, argvec, constraintvec,
6773e15f 1099 locus);
c85f7c16 1100
78418280 1101 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1102
1103 /* Eval the inputs and put them into ARGVEC.
1104 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1105
84b72302 1106 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
28d81abb 1107 {
6be2e1f8
RH
1108 bool allows_reg, allows_mem;
1109 const char *constraint;
1110 tree val, type;
1f06ee8d 1111 rtx op;
28d81abb 1112
6be2e1f8
RH
1113 constraint = constraints[i + noutputs];
1114 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1115 constraints, &allows_mem, &allows_reg))
1116 abort ();
2a230e9d 1117
6be2e1f8 1118 generating_concat_p = 0;
65fed0cb 1119
6be2e1f8
RH
1120 val = TREE_VALUE (tail);
1121 type = TREE_TYPE (val);
017e1b43
RH
1122 op = expand_expr (val, NULL_RTX, VOIDmode,
1123 (allows_mem && !allows_reg
1124 ? EXPAND_MEMORY : EXPAND_NORMAL));
65fed0cb 1125
1b3d8f8a 1126 /* Never pass a CONCAT to an ASM. */
1b3d8f8a
GK
1127 if (GET_CODE (op) == CONCAT)
1128 op = force_reg (GET_MODE (op), op);
3c0cb5de 1129 else if (MEM_P (op))
5b50aa9d 1130 op = validize_mem (op);
1b3d8f8a 1131
1afbe1c4 1132 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1133 {
11579f33 1134 if (allows_reg)
6be2e1f8 1135 op = force_reg (TYPE_MODE (type), op);
11579f33 1136 else if (!allows_mem)
84b72302
RH
1137 warning ("asm operand %d probably doesn't match constraints",
1138 i + noutputs);
3c0cb5de 1139 else if (MEM_P (op))
6be2e1f8 1140 {
d50ad6af
RH
1141 /* We won't recognize either volatile memory or memory
1142 with a queued address as available a memory_operand
1143 at this point. Ignore it: clearly this *is* a memory. */
6be2e1f8 1144 }
1f06ee8d 1145 else
017e1b43 1146 {
71ed1fdb
RH
1147 warning ("use of memory input without lvalue in "
1148 "asm operand %d is deprecated", i + noutputs);
017e1b43
RH
1149
1150 if (CONSTANT_P (op))
1151 {
9c858681
RS
1152 rtx mem = force_const_mem (TYPE_MODE (type), op);
1153 if (mem)
1154 op = validize_mem (mem);
1155 else
1156 op = force_reg (TYPE_MODE (type), op);
017e1b43 1157 }
f8cfc6aa 1158 if (REG_P (op)
9c858681 1159 || GET_CODE (op) == SUBREG
9c858681 1160 || GET_CODE (op) == CONCAT)
017e1b43
RH
1161 {
1162 tree qual_type = build_qualified_type (type,
1163 (TYPE_QUALS (type)
1164 | TYPE_QUAL_CONST));
1165 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1166 memloc = validize_mem (memloc);
1167 emit_move_insn (memloc, op);
1168 op = memloc;
1169 }
1170 }
65fed0cb 1171 }
6be2e1f8 1172
1b3d8f8a 1173 generating_concat_p = old_generating_concat_p;
6462bb43 1174 ASM_OPERANDS_INPUT (body, i) = op;
2a230e9d 1175
6462bb43 1176 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
839ee4bc 1177 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
acb5d088
HPN
1178
1179 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1180 clobber_conflict_found = 1;
28d81abb
RK
1181 }
1182
14a774a9
RK
1183 /* Protect all the operands from the queue now that they have all been
1184 evaluated. */
28d81abb 1185
1b3d8f8a
GK
1186 generating_concat_p = 0;
1187
235c5021 1188 for (i = 0; i < ninputs - ninout; i++)
6462bb43
AO
1189 ASM_OPERANDS_INPUT (body, i)
1190 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
28d81abb
RK
1191
1192 for (i = 0; i < noutputs; i++)
1193 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1194
4381f7c2 1195 /* For in-out operands, copy output rtx to input rtx. */
235c5021
RK
1196 for (i = 0; i < ninout; i++)
1197 {
235c5021 1198 int j = inout_opnum[i];
84b72302 1199 char buffer[16];
235c5021 1200
6462bb43 1201 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
235c5021 1202 = output_rtx[j];
84b72302
RH
1203
1204 sprintf (buffer, "%d", j);
6462bb43 1205 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
485bad26 1206 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
235c5021
RK
1207 }
1208
1b3d8f8a
GK
1209 generating_concat_p = old_generating_concat_p;
1210
28d81abb 1211 /* Now, for each output, construct an rtx
84b72302
RH
1212 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1213 ARGVEC CONSTRAINTS OPNAMES))
28d81abb
RK
1214 If there is more than one, put them inside a PARALLEL. */
1215
1216 if (noutputs == 1 && nclobbers == 0)
1217 {
839ee4bc 1218 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
4977bab6 1219 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1220 }
14a774a9 1221
28d81abb
RK
1222 else if (noutputs == 0 && nclobbers == 0)
1223 {
1224 /* No output operands: put in a raw ASM_OPERANDS rtx. */
4977bab6 1225 emit_insn (body);
28d81abb 1226 }
14a774a9 1227
28d81abb
RK
1228 else
1229 {
1230 rtx obody = body;
1231 int num = noutputs;
14a774a9
RK
1232
1233 if (num == 0)
1234 num = 1;
1235
38a448ca 1236 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1237
1238 /* For each output operand, store a SET. */
28d81abb
RK
1239 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1240 {
1241 XVECEXP (body, 0, i)
38a448ca
RH
1242 = gen_rtx_SET (VOIDmode,
1243 output_rtx[i],
c5c76735 1244 gen_rtx_ASM_OPERANDS
6462bb43 1245 (GET_MODE (output_rtx[i]),
839ee4bc
RO
1246 TREE_STRING_POINTER (string),
1247 constraints[i], i, argvec, constraintvec,
6773e15f 1248 locus));
c5c76735 1249
28d81abb
RK
1250 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1251 }
1252
1253 /* If there are no outputs (but there are some clobbers)
1254 store the bare ASM_OPERANDS into the PARALLEL. */
1255
1256 if (i == 0)
1257 XVECEXP (body, 0, i++) = obody;
1258
1259 /* Store (clobber REG) for each clobbered register specified. */
1260
b4ccaa16 1261 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1262 {
47ee9bcb 1263 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1264 int j = decode_reg_name (regname);
acb5d088 1265 rtx clobbered_reg;
28d81abb 1266
b4ac57ab 1267 if (j < 0)
28d81abb 1268 {
c09e6498 1269 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1270 continue;
1271
c09e6498
RS
1272 if (j == -4) /* `memory', don't cache memory across asm */
1273 {
bffc6177 1274 XVECEXP (body, 0, i++)
38a448ca 1275 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1276 gen_rtx_MEM
1277 (BLKmode,
1278 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1279 continue;
1280 }
1281
956d6950 1282 /* Ignore unknown register, error already signaled. */
cc1f5387 1283 continue;
28d81abb
RK
1284 }
1285
1286 /* Use QImode since that's guaranteed to clobber just one reg. */
acb5d088
HPN
1287 clobbered_reg = gen_rtx_REG (QImode, j);
1288
1289 /* Do sanity check for overlap between clobbers and respectively
1290 input and outputs that hasn't been handled. Such overlap
1291 should have been detected and reported above. */
1292 if (!clobber_conflict_found)
1293 {
1294 int opno;
1295
1296 /* We test the old body (obody) contents to avoid tripping
1297 over the under-construction body. */
1298 for (opno = 0; opno < noutputs; opno++)
1299 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1300 internal_error ("asm clobber conflict with output operand");
1301
1302 for (opno = 0; opno < ninputs - ninout; opno++)
1303 if (reg_overlap_mentioned_p (clobbered_reg,
1304 ASM_OPERANDS_INPUT (obody, opno)))
1305 internal_error ("asm clobber conflict with input operand");
1306 }
1307
b4ccaa16 1308 XVECEXP (body, 0, i++)
acb5d088 1309 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
28d81abb
RK
1310 }
1311
4977bab6 1312 emit_insn (body);
28d81abb
RK
1313 }
1314
947255ed
RH
1315 /* For any outputs that needed reloading into registers, spill them
1316 back to where they belong. */
1317 for (i = 0; i < noutputs; ++i)
1318 if (real_output_rtx[i])
1319 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1320
28d81abb
RK
1321 free_temp_slots ();
1322}
84b72302 1323
6de9cd9a
DN
1324void
1325expand_asm_expr (tree exp)
1326{
1327 int noutputs, i;
1328 tree outputs, tail;
1329 tree *o;
1330
1331 if (ASM_INPUT_P (exp))
1332 {
1333 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1334 return;
1335 }
1336
1337 outputs = ASM_OUTPUTS (exp);
1338 noutputs = list_length (outputs);
1339 /* o[I] is the place that output number I should be written. */
1340 o = (tree *) alloca (noutputs * sizeof (tree));
1341
1342 /* Record the contents of OUTPUTS before it is modified. */
1343 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1344 o[i] = TREE_VALUE (tail);
1345
1346 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1347 OUTPUTS some trees for where the values were actually stored. */
1348 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1349 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1350 input_location);
1351
1352 /* Copy all the intermediate outputs into the specified outputs. */
1353 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1354 {
1355 if (o[i] != TREE_VALUE (tail))
1356 {
1357 expand_assignment (o[i], TREE_VALUE (tail), 0);
1358 free_temp_slots ();
1359
1360 /* Restore the original value so that it's correct the next
1361 time we expand this function. */
1362 TREE_VALUE (tail) = o[i];
1363 }
1364 }
1365
1366 /* Those MODIFY_EXPRs could do autoincrements. */
1367 emit_queue ();
1368}
1369
84b72302
RH
1370/* A subroutine of expand_asm_operands. Check that all operands have
1371 the same number of alternatives. Return true if so. */
1372
1373static bool
46c5ad27 1374check_operand_nalternatives (tree outputs, tree inputs)
84b72302
RH
1375{
1376 if (outputs || inputs)
1377 {
1378 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1379 int nalternatives
1380 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1381 tree next = inputs;
1382
1383 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1384 {
1385 error ("too many alternatives in `asm'");
1386 return false;
1387 }
1388
1389 tmp = outputs;
1390 while (tmp)
1391 {
1392 const char *constraint
1393 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1394
1395 if (n_occurrences (',', constraint) != nalternatives)
1396 {
1397 error ("operand constraints for `asm' differ in number of alternatives");
1398 return false;
1399 }
1400
1401 if (TREE_CHAIN (tmp))
1402 tmp = TREE_CHAIN (tmp);
1403 else
1404 tmp = next, next = 0;
1405 }
1406 }
1407
1408 return true;
1409}
1410
1411/* A subroutine of expand_asm_operands. Check that all operand names
1412 are unique. Return true if so. We rely on the fact that these names
1413 are identifiers, and so have been canonicalized by get_identifier,
1414 so all we need are pointer comparisons. */
1415
1416static bool
46c5ad27 1417check_unique_operand_names (tree outputs, tree inputs)
84b72302
RH
1418{
1419 tree i, j;
1420
1421 for (i = outputs; i ; i = TREE_CHAIN (i))
1422 {
1423 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1424 if (! i_name)
1425 continue;
1426
1427 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1428 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1429 goto failure;
1430 }
1431
1432 for (i = inputs; i ; i = TREE_CHAIN (i))
1433 {
1434 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1435 if (! i_name)
1436 continue;
1437
1438 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1439 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1440 goto failure;
1441 for (j = outputs; j ; j = TREE_CHAIN (j))
fc552851 1442 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1443 goto failure;
1444 }
1445
1446 return true;
1447
1448 failure:
1449 error ("duplicate asm operand name '%s'",
fc552851 1450 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
84b72302
RH
1451 return false;
1452}
1453
1454/* A subroutine of expand_asm_operands. Resolve the names of the operands
1455 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1456 STRING and in the constraints to those numbers. */
1457
7dc8b126
JM
1458tree
1459resolve_asm_operand_names (tree string, tree outputs, tree inputs)
84b72302 1460{
7dc8b126 1461 char *buffer;
84b72302 1462 char *p;
40209195 1463 const char *c;
84b72302
RH
1464 tree t;
1465
1456deaf
JM
1466 check_unique_operand_names (outputs, inputs);
1467
7dc8b126
JM
1468 /* Substitute [<name>] in input constraint strings. There should be no
1469 named operands in output constraints. */
1470 for (t = inputs; t ; t = TREE_CHAIN (t))
1471 {
40209195 1472 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
7dc8b126
JM
1473 if (strchr (c, '[') != NULL)
1474 {
1475 p = buffer = xstrdup (c);
1476 while ((p = strchr (p, '[')) != NULL)
1477 p = resolve_operand_name_1 (p, outputs, inputs);
1478 TREE_VALUE (TREE_PURPOSE (t))
1479 = build_string (strlen (buffer), buffer);
1480 free (buffer);
1481 }
1482 }
1483
40209195
JM
1484 /* Now check for any needed substitutions in the template. */
1485 c = TREE_STRING_POINTER (string);
1486 while ((c = strchr (c, '%')) != NULL)
84b72302 1487 {
40209195
JM
1488 if (c[1] == '[')
1489 break;
1490 else if (ISALPHA (c[1]) && c[2] == '[')
1491 break;
7abcb63a
RH
1492 else
1493 {
40209195 1494 c += 1;
7abcb63a
RH
1495 continue;
1496 }
84b72302
RH
1497 }
1498
40209195
JM
1499 if (c)
1500 {
1501 /* OK, we need to make a copy so we can perform the substitutions.
1502 Assume that we will not need extra space--we get to remove '['
1503 and ']', which means we cannot have a problem until we have more
1504 than 999 operands. */
1505 buffer = xstrdup (TREE_STRING_POINTER (string));
1506 p = buffer + (c - TREE_STRING_POINTER (string));
1507
1508 while ((p = strchr (p, '%')) != NULL)
1509 {
1510 if (p[1] == '[')
1511 p += 1;
1512 else if (ISALPHA (p[1]) && p[2] == '[')
1513 p += 2;
1514 else
1515 {
1516 p += 1;
1517 continue;
1518 }
1519
1520 p = resolve_operand_name_1 (p, outputs, inputs);
1521 }
1522
1523 string = build_string (strlen (buffer), buffer);
1524 free (buffer);
1525 }
84b72302 1526
84b72302
RH
1527 return string;
1528}
1529
1530/* A subroutine of resolve_operand_names. P points to the '[' for a
1531 potential named operand of the form [<name>]. In place, replace
786de7eb 1532 the name and brackets with a number. Return a pointer to the
84b72302
RH
1533 balance of the string after substitution. */
1534
1535static char *
46c5ad27 1536resolve_operand_name_1 (char *p, tree outputs, tree inputs)
84b72302
RH
1537{
1538 char *q;
1539 int op;
1540 tree t;
1541 size_t len;
1542
1543 /* Collect the operand name. */
1544 q = strchr (p, ']');
1545 if (!q)
1546 {
1547 error ("missing close brace for named operand");
1548 return strchr (p, '\0');
1549 }
1550 len = q - p - 1;
1551
1552 /* Resolve the name to a number. */
1553 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1554 {
fc552851
RS
1555 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1556 if (name)
edd1967d 1557 {
fc552851 1558 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
1559 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1560 goto found;
1561 }
84b72302
RH
1562 }
1563 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1564 {
fc552851
RS
1565 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1566 if (name)
edd1967d 1567 {
fc552851 1568 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
1569 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1570 goto found;
1571 }
84b72302
RH
1572 }
1573
1574 *q = '\0';
1575 error ("undefined named operand '%s'", p + 1);
1576 op = 0;
1577 found:
1578
1579 /* Replace the name with the number. Unfortunately, not all libraries
1580 get the return value of sprintf correct, so search for the end of the
1581 generated string by hand. */
1582 sprintf (p, "%d", op);
1583 p = strchr (p, '\0');
1584
1585 /* Verify the no extra buffer space assumption. */
1586 if (p > q)
1587 abort ();
1588
1589 /* Shift the rest of the buffer down to fill the gap. */
1590 memmove (p, q + 1, strlen (q + 1) + 1);
1591
1592 return p;
1593}
28d81abb 1594\f
4dfa0342 1595/* Generate RTL to evaluate the expression EXP. */
28d81abb
RK
1596
1597void
46c5ad27 1598expand_expr_stmt (tree exp)
1574ef13
AO
1599{
1600 rtx value;
1601 tree type;
b6ec8c5f 1602
4dfa0342 1603 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1574ef13 1604 type = TREE_TYPE (exp);
28d81abb
RK
1605
1606 /* If all we do is reference a volatile value in memory,
1607 copy it to a register to be sure it is actually touched. */
3c0cb5de 1608 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
28d81abb 1609 {
1574ef13 1610 if (TYPE_MODE (type) == VOIDmode)
6a5bbbe6 1611 ;
1574ef13
AO
1612 else if (TYPE_MODE (type) != BLKmode)
1613 value = copy_to_reg (value);
28d81abb 1614 else
ddbe9812
RS
1615 {
1616 rtx lab = gen_label_rtx ();
4381f7c2 1617
ddbe9812 1618 /* Compare the value with itself to reference it. */
1574ef13
AO
1619 emit_cmp_and_jump_insns (value, value, EQ,
1620 expand_expr (TYPE_SIZE (type),
c5d5d461 1621 NULL_RTX, VOIDmode, 0),
d43e0b7d 1622 BLKmode, 0, lab);
ddbe9812
RS
1623 emit_label (lab);
1624 }
28d81abb
RK
1625 }
1626
4dfa0342 1627 /* Free any temporaries used to evaluate this expression. */
28d81abb
RK
1628 free_temp_slots ();
1629
1630 emit_queue ();
1631}
1632
1633/* Warn if EXP contains any computations whose results are not used.
b9861bff
RH
1634 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1635 (potential) location of the expression. */
28d81abb 1636
150a992a 1637int
b9861bff 1638warn_if_unused_value (tree exp, location_t locus)
28d81abb 1639{
b9861bff 1640 restart:
28d81abb
RK
1641 if (TREE_USED (exp))
1642 return 0;
1643
9790cefd
RH
1644 /* Don't warn about void constructs. This includes casting to void,
1645 void function calls, and statement expressions with a final cast
1646 to void. */
1647 if (VOID_TYPE_P (TREE_TYPE (exp)))
1648 return 0;
1649
b9861bff
RH
1650 if (EXPR_LOCUS (exp))
1651 locus = *EXPR_LOCUS (exp);
1652
28d81abb
RK
1653 switch (TREE_CODE (exp))
1654 {
1655 case PREINCREMENT_EXPR:
1656 case POSTINCREMENT_EXPR:
1657 case PREDECREMENT_EXPR:
1658 case POSTDECREMENT_EXPR:
1659 case MODIFY_EXPR:
1660 case INIT_EXPR:
1661 case TARGET_EXPR:
1662 case CALL_EXPR:
81797aba 1663 case TRY_CATCH_EXPR:
28d81abb
RK
1664 case WITH_CLEANUP_EXPR:
1665 case EXIT_EXPR:
28d81abb
RK
1666 return 0;
1667
1668 case BIND_EXPR:
1669 /* For a binding, warn if no side effect within it. */
b9861bff
RH
1670 exp = BIND_EXPR_BODY (exp);
1671 goto restart;
28d81abb 1672
de73f171 1673 case SAVE_EXPR:
b9861bff
RH
1674 exp = TREE_OPERAND (exp, 0);
1675 goto restart;
de73f171 1676
28d81abb
RK
1677 case TRUTH_ORIF_EXPR:
1678 case TRUTH_ANDIF_EXPR:
1679 /* In && or ||, warn if 2nd operand has no side effect. */
b9861bff
RH
1680 exp = TREE_OPERAND (exp, 1);
1681 goto restart;
28d81abb
RK
1682
1683 case COMPOUND_EXPR:
6de9cd9a 1684 if (TREE_NO_WARNING (exp))
a646a211 1685 return 0;
b9861bff 1686 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
28d81abb 1687 return 1;
4d23e509
RS
1688 /* Let people do `(foo (), 0)' without a warning. */
1689 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1690 return 0;
b9861bff
RH
1691 exp = TREE_OPERAND (exp, 1);
1692 goto restart;
28d81abb
RK
1693
1694 case NOP_EXPR:
1695 case CONVERT_EXPR:
b4ac57ab 1696 case NON_LVALUE_EXPR:
28d81abb 1697 /* Don't warn about conversions not explicit in the user's program. */
6de9cd9a 1698 if (TREE_NO_WARNING (exp))
28d81abb
RK
1699 return 0;
1700 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
1701 Don't complain about that. There can be an arbitrary number of
1702 casts before the modify, so we must loop until we find the first
1703 non-cast expression and then test to see if that is a modify. */
1704 {
1705 tree tem = TREE_OPERAND (exp, 0);
1706
1707 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1708 tem = TREE_OPERAND (tem, 0);
1709
de73f171
RK
1710 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1711 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
1712 return 0;
1713 }
7133e992 1714 goto maybe_warn;
28d81abb 1715
d1e1adfb
JM
1716 case INDIRECT_REF:
1717 /* Don't warn about automatic dereferencing of references, since
1718 the user cannot control it. */
1719 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
b9861bff
RH
1720 {
1721 exp = TREE_OPERAND (exp, 0);
1722 goto restart;
1723 }
4381f7c2
KH
1724 /* Fall through. */
1725
28d81abb 1726 default:
ddbe9812 1727 /* Referencing a volatile value is a side effect, so don't warn. */
2f939d94 1728 if ((DECL_P (exp)
ddbe9812
RS
1729 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1730 && TREE_THIS_VOLATILE (exp))
1731 return 0;
8d5e6e25
RK
1732
1733 /* If this is an expression which has no operands, there is no value
1734 to be unused. There are no such language-independent codes,
1735 but front ends may define such. */
1736 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1737 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1738 return 0;
1739
7133e992
JJ
1740 maybe_warn:
1741 /* If this is an expression with side effects, don't warn. */
1742 if (TREE_SIDE_EFFECTS (exp))
1743 return 0;
1744
b9861bff 1745 warning ("%Hvalue computed is not used", &locus);
28d81abb
RK
1746 return 1;
1747 }
1748}
28d81abb 1749\f
28d81abb
RK
1750/* Generate RTL for the start of an if-then. COND is the expression
1751 whose truth should be tested.
1752
1753 If EXITFLAG is nonzero, this conditional is visible to
1754 `exit_something'. */
1755
1756void
46c5ad27 1757expand_start_cond (tree cond, int exitflag)
28d81abb
RK
1758{
1759 struct nesting *thiscond = ALLOC_NESTING ();
1760
1761 /* Make an entry on cond_stack for the cond we are entering. */
1762
e2500fed 1763 thiscond->desc = COND_NESTING;
28d81abb
RK
1764 thiscond->next = cond_stack;
1765 thiscond->all = nesting_stack;
1766 thiscond->depth = ++nesting_depth;
1767 thiscond->data.cond.next_label = gen_label_rtx ();
1768 /* Before we encounter an `else', we don't need a separate exit label
1769 unless there are supposed to be exit statements
1770 to exit this conditional. */
1771 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1772 thiscond->data.cond.endif_label = thiscond->exit_label;
1773 cond_stack = thiscond;
1774 nesting_stack = thiscond;
1775
b93a436e 1776 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
1777}
1778
1779/* Generate RTL between then-clause and the elseif-clause
1780 of an if-then-elseif-.... */
1781
1782void
46c5ad27 1783expand_start_elseif (tree cond)
28d81abb
RK
1784{
1785 if (cond_stack->data.cond.endif_label == 0)
1786 cond_stack->data.cond.endif_label = gen_label_rtx ();
1787 emit_jump (cond_stack->data.cond.endif_label);
1788 emit_label (cond_stack->data.cond.next_label);
1789 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 1790 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
1791}
1792
1793/* Generate RTL between the then-clause and the else-clause
1794 of an if-then-else. */
1795
1796void
46c5ad27 1797expand_start_else (void)
28d81abb
RK
1798{
1799 if (cond_stack->data.cond.endif_label == 0)
1800 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 1801
28d81abb
RK
1802 emit_jump (cond_stack->data.cond.endif_label);
1803 emit_label (cond_stack->data.cond.next_label);
0f41302f 1804 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
1805}
1806
d947ba59
RK
1807/* After calling expand_start_else, turn this "else" into an "else if"
1808 by providing another condition. */
1809
1810void
46c5ad27 1811expand_elseif (tree cond)
d947ba59
RK
1812{
1813 cond_stack->data.cond.next_label = gen_label_rtx ();
1814 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1815}
1816
28d81abb
RK
1817/* Generate RTL for the end of an if-then.
1818 Pop the record for it off of cond_stack. */
1819
1820void
46c5ad27 1821expand_end_cond (void)
28d81abb
RK
1822{
1823 struct nesting *thiscond = cond_stack;
1824
b93a436e
JL
1825 do_pending_stack_adjust ();
1826 if (thiscond->data.cond.next_label)
1827 emit_label (thiscond->data.cond.next_label);
1828 if (thiscond->data.cond.endif_label)
1829 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
1830
1831 POPSTACK (cond_stack);
28d81abb
RK
1832}
1833\f
0e9e1e0a 1834/* Return nonzero if we should preserve sub-expressions as separate
28d81abb 1835 pseudos. We never do so if we aren't optimizing. We always do so
6af8eb57 1836 if -fexpensive-optimizations. */
28d81abb
RK
1837
1838int
46c5ad27 1839preserve_subexpressions_p (void)
28d81abb 1840{
28d81abb
RK
1841 if (flag_expensive_optimizations)
1842 return 1;
1843
6af8eb57 1844 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
28d81abb
RK
1845 return 0;
1846
6af8eb57 1847 return 1;
28d81abb
RK
1848}
1849
28d81abb
RK
1850\f
1851/* Generate RTL to return from the current function, with no value.
1852 (That is, we do not do anything about returning any value.) */
1853
1854void
46c5ad27 1855expand_null_return (void)
28d81abb 1856{
4381f7c2 1857 /* If this function was declared to return a value, but we
bd695e1e 1858 didn't, clobber the return registers so that they are not
a1f300c0 1859 propagated live to the rest of the function. */
c13fde05 1860 clobber_return_register ();
28d81abb 1861
ac45df5d 1862 expand_null_return_1 ();
28d81abb
RK
1863}
1864
6e3077c6
EB
1865/* Generate RTL to return directly from the current function.
1866 (That is, we bypass any return value.) */
1867
1868void
1869expand_naked_return (void)
1870{
ac45df5d 1871 rtx end_label;
6e3077c6
EB
1872
1873 clear_pending_stack_adjust ();
1874 do_pending_stack_adjust ();
6e3077c6 1875
ac45df5d 1876 end_label = naked_return_label;
6e3077c6
EB
1877 if (end_label == 0)
1878 end_label = naked_return_label = gen_label_rtx ();
ac45df5d
RH
1879
1880 emit_jump (end_label);
6e3077c6
EB
1881}
1882
969d70ca
JH
1883/* Try to guess whether the value of return means error code. */
1884static enum br_predictor
46c5ad27 1885return_prediction (rtx val)
969d70ca
JH
1886{
1887 /* Different heuristics for pointers and scalars. */
1888 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
1889 {
1890 /* NULL is usually not returned. */
1891 if (val == const0_rtx)
1892 return PRED_NULL_RETURN;
1893 }
1894 else
1895 {
1896 /* Negative return values are often used to indicate
1897 errors. */
1898 if (GET_CODE (val) == CONST_INT
1899 && INTVAL (val) < 0)
1900 return PRED_NEGATIVE_RETURN;
1901 /* Constant return values are also usually erors,
1902 zero/one often mean booleans so exclude them from the
1903 heuristics. */
1904 if (CONSTANT_P (val)
1905 && (val != const0_rtx && val != const1_rtx))
1906 return PRED_CONST_RETURN;
1907 }
1908 return PRED_NO_PREDICTION;
1909}
1910
c988af2b
RS
1911
1912/* If the current function returns values in the most significant part
1913 of a register, shift return value VAL appropriately. The mode of
1914 the function's return type is known not to be BLKmode. */
1915
1916static rtx
1917shift_return_value (rtx val)
1918{
1919 tree type;
1920
1921 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1922 if (targetm.calls.return_in_msb (type))
1923 {
1924 rtx target;
1925 HOST_WIDE_INT shift;
1926
1927 target = DECL_RTL (DECL_RESULT (current_function_decl));
1928 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1929 - BITS_PER_UNIT * int_size_in_bytes (type));
1930 if (shift > 0)
09b52670 1931 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
c988af2b 1932 gen_lowpart (GET_MODE (target), val),
09b52670 1933 build_int_2 (shift, 0), target, 1);
c988af2b
RS
1934 }
1935 return val;
1936}
1937
1938
28d81abb
RK
1939/* Generate RTL to return from the current function, with value VAL. */
1940
8d800403 1941static void
46c5ad27 1942expand_value_return (rtx val)
28d81abb 1943{
969d70ca
JH
1944 rtx return_reg;
1945 enum br_predictor pred;
1946
d50672ef
JH
1947 if (flag_guess_branch_prob
1948 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
969d70ca
JH
1949 {
1950 /* Emit information for branch prediction. */
1951 rtx note;
1952
2e040219 1953 note = emit_note (NOTE_INSN_PREDICTION);
969d70ca
JH
1954
1955 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
1956
1957 }
1958
969d70ca 1959 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb
RK
1960
1961 /* Copy the value to the return location
1962 unless it's already there. */
1963
1964 if (return_reg != val)
77636079 1965 {
77636079 1966 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
61f71b34
DD
1967 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1968 {
8df83eae 1969 int unsignedp = TYPE_UNSIGNED (type);
61f71b34
DD
1970 enum machine_mode old_mode
1971 = DECL_MODE (DECL_RESULT (current_function_decl));
1972 enum machine_mode mode
1973 = promote_mode (type, old_mode, &unsignedp, 1);
1974
1975 if (mode != old_mode)
1976 val = convert_modes (mode, old_mode, val, unsignedp);
1977 }
14a774a9 1978 if (GET_CODE (return_reg) == PARALLEL)
6e985040 1979 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
14a774a9 1980 else
77636079
RS
1981 emit_move_insn (return_reg, val);
1982 }
14a774a9 1983
ac45df5d 1984 expand_null_return_1 ();
28d81abb
RK
1985}
1986
ac45df5d 1987/* Output a return with no value. */
28d81abb
RK
1988
1989static void
ac45df5d 1990expand_null_return_1 (void)
28d81abb 1991{
ac45df5d 1992 rtx end_label;
28d81abb
RK
1993
1994 clear_pending_stack_adjust ();
1995 do_pending_stack_adjust ();
28d81abb 1996
ac45df5d 1997 end_label = return_label;
396ad517
JDA
1998 if (end_label == 0)
1999 end_label = return_label = gen_label_rtx ();
ac45df5d 2000 emit_jump (end_label);
28d81abb
RK
2001}
2002\f
2003/* Generate RTL to evaluate the expression RETVAL and return it
2004 from the current function. */
2005
2006void
46c5ad27 2007expand_return (tree retval)
28d81abb 2008{
19e7881c 2009 rtx result_rtl;
b3694847 2010 rtx val = 0;
28d81abb 2011 tree retval_rhs;
28d81abb
RK
2012
2013 /* If function wants no value, give it none. */
2014 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2015 {
37366632 2016 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2017 emit_queue ();
28d81abb
RK
2018 expand_null_return ();
2019 return;
2020 }
2021
ea11ca7e 2022 if (retval == error_mark_node)
c9407e4c
MM
2023 {
2024 /* Treat this like a return of no value from a function that
2025 returns a value. */
2026 expand_null_return ();
786de7eb 2027 return;
c9407e4c 2028 }
ea11ca7e 2029 else if (TREE_CODE (retval) == RESULT_DECL)
28d81abb 2030 retval_rhs = retval;
ac45df5d
RH
2031 else if ((TREE_CODE (retval) == MODIFY_EXPR
2032 || TREE_CODE (retval) == INIT_EXPR)
28d81abb
RK
2033 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2034 retval_rhs = TREE_OPERAND (retval, 1);
28d81abb 2035 else
6de9cd9a 2036 retval_rhs = retval;
28d81abb 2037
19e7881c
MM
2038 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2039
4c485b63
JL
2040 /* If the result is an aggregate that is being returned in one (or more)
2041 registers, load the registers here. The compiler currently can't handle
2042 copying a BLKmode value into registers. We could put this code in a
2043 more general area (for use by everyone instead of just function
2044 call/return), but until this feature is generally usable it is kept here
ac45df5d 2045 (and in expand_call). */
4c485b63
JL
2046
2047 if (retval_rhs != 0
2048 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
f8cfc6aa 2049 && REG_P (result_rtl))
4c485b63 2050 {
770ae6cc
RK
2051 int i;
2052 unsigned HOST_WIDE_INT bitpos, xbitpos;
c988af2b 2053 unsigned HOST_WIDE_INT padding_correction = 0;
770ae6cc
RK
2054 unsigned HOST_WIDE_INT bytes
2055 = int_size_in_bytes (TREE_TYPE (retval_rhs));
4c485b63 2056 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
770ae6cc
RK
2057 unsigned int bitsize
2058 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
703ad42b 2059 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
c16ddde3 2060 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 2061 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2062 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2063
2954d7db
RK
2064 if (bytes == 0)
2065 {
2066 expand_null_return ();
2067 return;
2068 }
2069
c988af2b
RS
2070 /* If the structure doesn't take up a whole number of words, see
2071 whether the register value should be padded on the left or on
2072 the right. Set PADDING_CORRECTION to the number of padding
2073 bits needed on the left side.
2074
2075 In most ABIs, the structure will be returned at the least end of
2076 the register, which translates to right padding on little-endian
2077 targets and left padding on big-endian targets. The opposite
2078 holds if the structure is returned at the most significant
2079 end of the register. */
2080 if (bytes % UNITS_PER_WORD != 0
2081 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2082 ? !BYTES_BIG_ENDIAN
2083 : BYTES_BIG_ENDIAN))
2084 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2085 * BITS_PER_UNIT));
a7f875d7 2086
4381f7c2 2087 /* Copy the structure BITSIZE bits at a time. */
c988af2b 2088 for (bitpos = 0, xbitpos = padding_correction;
a7f875d7
RK
2089 bitpos < bytes * BITS_PER_UNIT;
2090 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2091 {
a7f875d7 2092 /* We need a new destination pseudo each time xbitpos is
c988af2b 2093 on a word boundary and when xbitpos == padding_correction
a7f875d7
RK
2094 (the first time through). */
2095 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2096 || xbitpos == padding_correction)
4c485b63 2097 {
a7f875d7
RK
2098 /* Generate an appropriate register. */
2099 dst = gen_reg_rtx (word_mode);
2100 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2101
8a38ed86
AM
2102 /* Clear the destination before we move anything into it. */
2103 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
4c485b63 2104 }
a7f875d7
RK
2105
2106 /* We need a new source operand each time bitpos is on a word
2107 boundary. */
2108 if (bitpos % BITS_PER_WORD == 0)
2109 src = operand_subword_force (result_val,
2110 bitpos / BITS_PER_WORD,
2111 BLKmode);
2112
2113 /* Use bitpos for the source extraction (left justified) and
2114 xbitpos for the destination store (right justified). */
2115 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2116 extract_bit_field (src, bitsize,
2117 bitpos % BITS_PER_WORD, 1,
19caa751 2118 NULL_RTX, word_mode, word_mode,
04050c69
RK
2119 BITS_PER_WORD),
2120 BITS_PER_WORD);
4c485b63
JL
2121 }
2122
c988af2b
RS
2123 tmpmode = GET_MODE (result_rtl);
2124 if (tmpmode == BLKmode)
2125 {
2126 /* Find the smallest integer mode large enough to hold the
2127 entire structure and use that mode instead of BLKmode
2128 on the USE insn for the return register. */
2129 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2130 tmpmode != VOIDmode;
2131 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2132 /* Have we found a large enough mode? */
2133 if (GET_MODE_SIZE (tmpmode) >= bytes)
2134 break;
4c485b63 2135
c988af2b
RS
2136 /* No suitable mode found. */
2137 if (tmpmode == VOIDmode)
2138 abort ();
4c485b63 2139
c988af2b
RS
2140 PUT_MODE (result_rtl, tmpmode);
2141 }
3ffeb8f1 2142
af55da56
JW
2143 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2144 result_reg_mode = word_mode;
2145 else
2146 result_reg_mode = tmpmode;
2147 result_reg = gen_reg_rtx (result_reg_mode);
2148
3ffeb8f1 2149 emit_queue ();
3ffeb8f1 2150 for (i = 0; i < n_regs; i++)
af55da56 2151 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 2152 result_pseudos[i]);
4c485b63 2153
af55da56
JW
2154 if (tmpmode != result_reg_mode)
2155 result_reg = gen_lowpart (tmpmode, result_reg);
2156
4c485b63
JL
2157 expand_value_return (result_reg);
2158 }
7cc8342c
RH
2159 else if (retval_rhs != 0
2160 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
f8cfc6aa 2161 && (REG_P (result_rtl)
7cc8342c 2162 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 2163 {
14a774a9
RK
2164 /* Calculate the return value into a temporary (usually a pseudo
2165 reg). */
1da68f56
RK
2166 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2167 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2168
2169 val = assign_temp (nt, 0, 0, 1);
dd98f85c
JM
2170 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2171 val = force_not_mem (val);
28d81abb 2172 emit_queue ();
ac45df5d 2173 /* Return the calculated value. */
c988af2b 2174 expand_value_return (shift_return_value (val));
28d81abb
RK
2175 }
2176 else
2177 {
ac45df5d 2178 /* No hard reg used; calculate value into hard return reg. */
cba389cd 2179 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 2180 emit_queue ();
14a774a9 2181 expand_value_return (result_rtl);
28d81abb
RK
2182 }
2183}
28d81abb 2184\f
28d81abb
RK
2185/* Generate the RTL code for entering a binding contour.
2186 The variables are declared one by one, by calls to `expand_decl'.
2187
8e91754e
MM
2188 FLAGS is a bitwise or of the following flags:
2189
2190 1 - Nonzero if this construct should be visible to
2191 `exit_something'.
2192
2193 2 - Nonzero if this contour does not require a
2194 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2195 language-independent code should set this flag because they
2196 will not create corresponding BLOCK nodes. (There should be
2197 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2198 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
4381f7c2 2199 when expand_end_bindings is called.
a97901e6
MM
2200
2201 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2202 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2203 note. */
28d81abb
RK
2204
2205void
46c5ad27 2206expand_start_bindings_and_block (int flags, tree block)
28d81abb
RK
2207{
2208 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
2209 rtx note;
2210 int exit_flag = ((flags & 1) != 0);
2211 int block_flag = ((flags & 2) == 0);
4381f7c2 2212
a97901e6
MM
2213 /* If a BLOCK is supplied, then the caller should be requesting a
2214 NOTE_INSN_BLOCK_BEG note. */
2215 if (!block_flag && block)
2216 abort ();
8e91754e 2217
a97901e6 2218 /* Create a note to mark the beginning of the block. */
1ea463a2 2219 note = emit_note (NOTE_INSN_DELETED);
4381f7c2 2220
28d81abb
RK
2221 /* Make an entry on block_stack for the block we are entering. */
2222
e2500fed 2223 thisblock->desc = BLOCK_NESTING;
28d81abb
RK
2224 thisblock->next = block_stack;
2225 thisblock->all = nesting_stack;
2226 thisblock->depth = ++nesting_depth;
3f1d071b 2227 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
2228
2229 thisblock->data.block.conditional_code = 0;
2230 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
2231 /* When we insert instructions after the last unconditional cleanup,
2232 we don't adjust last_insn. That means that a later add_insn will
2233 clobber the instructions we've just added. The easiest way to
2234 fix this is to just insert another instruction here, so that the
2235 instructions inserted after the last unconditional cleanup are
2236 never the last instruction. */
2e040219 2237 emit_note (NOTE_INSN_DELETED);
e976b8b2 2238
28d81abb 2239 thisblock->data.block.first_insn = note;
3f1d071b 2240 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
2241 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2242 block_stack = thisblock;
2243 nesting_stack = thisblock;
2244
b93a436e
JL
2245 /* Make a new level for allocating stack slots. */
2246 push_temp_slots ();
28d81abb
RK
2247}
2248
e976b8b2
MS
2249/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2250 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2251 expand_expr are made. After we end the region, we know that all
2252 space for all temporaries that were created by TARGET_EXPRs will be
2253 destroyed and their space freed for reuse. */
2254
2255void
46c5ad27 2256expand_start_target_temps (void)
e976b8b2
MS
2257{
2258 /* This is so that even if the result is preserved, the space
2259 allocated will be freed, as we know that it is no longer in use. */
2260 push_temp_slots ();
2261
2262 /* Start a new binding layer that will keep track of all cleanup
2263 actions to be performed. */
8e91754e 2264 expand_start_bindings (2);
e976b8b2
MS
2265
2266 target_temp_slot_level = temp_slot_level;
2267}
2268
2269void
46c5ad27 2270expand_end_target_temps (void)
e976b8b2
MS
2271{
2272 expand_end_bindings (NULL_TREE, 0, 0);
4381f7c2 2273
e976b8b2
MS
2274 /* This is so that even if the result is preserved, the space
2275 allocated will be freed, as we know that it is no longer in use. */
2276 pop_temp_slots ();
2277}
2278
0e9e1e0a 2279/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
deb5e280
JM
2280 in question represents the outermost pair of curly braces (i.e. the "body
2281 block") of a function or method.
2282
2283 For any BLOCK node representing a "body block" of a function or method, the
2284 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2285 represents the outermost (function) scope for the function or method (i.e.
2286 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
4381f7c2 2287 *that* node in turn will point to the relevant FUNCTION_DECL node. */
deb5e280
JM
2288
2289int
46c5ad27 2290is_body_block (tree stmt)
deb5e280 2291{
2896d056
ZW
2292 if (lang_hooks.no_body_blocks)
2293 return 0;
2294
deb5e280
JM
2295 if (TREE_CODE (stmt) == BLOCK)
2296 {
2297 tree parent = BLOCK_SUPERCONTEXT (stmt);
2298
2299 if (parent && TREE_CODE (parent) == BLOCK)
2300 {
2301 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2302
2303 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2304 return 1;
2305 }
2306 }
2307
2308 return 0;
2309}
2310
e976b8b2
MS
2311/* True if we are currently emitting insns in an area of output code
2312 that is controlled by a conditional expression. This is used by
2313 the cleanup handling code to generate conditional cleanup actions. */
2314
2315int
46c5ad27 2316conditional_context (void)
e976b8b2
MS
2317{
2318 return block_stack && block_stack->data.block.conditional_code;
2319}
2320
91088ddb
JM
2321/* Return an opaque pointer to the current nesting level, so frontend code
2322 can check its own sanity. */
2323
2324struct nesting *
46c5ad27 2325current_nesting_level (void)
91088ddb
JM
2326{
2327 return cfun ? block_stack : 0;
2328}
2329
ba716ac9
BS
2330/* Emit code to restore vital registers at the beginning of a nonlocal goto
2331 handler. */
2332static void
46c5ad27 2333expand_nl_goto_receiver (void)
ba716ac9 2334{
6de9cd9a 2335 /* Clobber the FP when we get here, so we have to make sure it's
e292dbb0
WH
2336 marked as used by this function. */
2337 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2338
2339 /* Mark the static chain as clobbered here so life information
2340 doesn't get messed up for it. */
2341 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2342
ba716ac9
BS
2343#ifdef HAVE_nonlocal_goto
2344 if (! HAVE_nonlocal_goto)
2345#endif
2346 /* First adjust our frame pointer to its actual value. It was
2347 previously set to the start of the virtual area corresponding to
2348 the stacked variables when we branched here and now needs to be
2349 adjusted to the actual hardware fp value.
2350
2351 Assignments are to virtual registers are converted by
2352 instantiate_virtual_regs into the corresponding assignment
2353 to the underlying register (fp in this case) that makes
2354 the original assignment true.
2355 So the following insn will actually be
2356 decrementing fp by STARTING_FRAME_OFFSET. */
2357 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2358
2359#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2360 if (fixed_regs[ARG_POINTER_REGNUM])
2361 {
2362#ifdef ELIMINABLE_REGS
2363 /* If the argument pointer can be eliminated in favor of the
2364 frame pointer, we don't need to restore it. We assume here
2365 that if such an elimination is present, it can always be used.
2366 This is the case on all known machines; if we don't make this
2367 assumption, we do unnecessary saving on many machines. */
8b60264b 2368 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
ba716ac9
BS
2369 size_t i;
2370
b6a1cbae 2371 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
ba716ac9
BS
2372 if (elim_regs[i].from == ARG_POINTER_REGNUM
2373 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2374 break;
2375
b6a1cbae 2376 if (i == ARRAY_SIZE (elim_regs))
ba716ac9
BS
2377#endif
2378 {
2379 /* Now restore our arg pointer from the address at which it
278ed218 2380 was saved in our stack frame. */
ba716ac9 2381 emit_move_insn (virtual_incoming_args_rtx,
278ed218 2382 copy_to_reg (get_arg_pointer_save_area (cfun)));
ba716ac9
BS
2383 }
2384 }
2385#endif
2386
2387#ifdef HAVE_nonlocal_goto_receiver
2388 if (HAVE_nonlocal_goto_receiver)
2389 emit_insn (gen_nonlocal_goto_receiver ());
2390#endif
e292dbb0
WH
2391
2392 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2393 insn, but we must not allow the code we just generated to be reordered
2394 by scheduling. Specifically, the update of the frame pointer must
2395 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2396 insn. */
2397 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
ba716ac9
BS
2398}
2399
ba716677
MM
2400/* Warn about any unused VARS (which may contain nodes other than
2401 VAR_DECLs, but such nodes are ignored). The nodes are connected
2402 via the TREE_CHAIN field. */
2403
2404void
46c5ad27 2405warn_about_unused_variables (tree vars)
ba716677
MM
2406{
2407 tree decl;
2408
078721e1 2409 if (warn_unused_variable)
ba716677 2410 for (decl = vars; decl; decl = TREE_CHAIN (decl))
4381f7c2 2411 if (TREE_CODE (decl) == VAR_DECL
ba716677
MM
2412 && ! TREE_USED (decl)
2413 && ! DECL_IN_SYSTEM_HEADER (decl)
4381f7c2 2414 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
ddd2d57e 2415 warning ("%Junused variable '%D'", decl, decl);
ba716677
MM
2416}
2417
28d81abb 2418/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
2419
2420 VARS is the chain of VAR_DECL nodes for the variables bound in this
2421 contour. There may actually be other nodes in this chain, but any
2422 nodes other than VAR_DECLS are ignored.
2423
28d81abb
RK
2424 MARK_ENDS is nonzero if we should put a note at the beginning
2425 and end of this binding contour.
2426
cda26058
RK
2427 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2428 zero if we can jump into this contour only if it does not have a saved
2429 stack level, and negative if we are not to check for invalid use of
2430 labels (because the front end does that). */
28d81abb
RK
2431
2432void
1ea463a2 2433expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
ac45df5d 2434 int dont_jump_in ATTRIBUTE_UNUSED)
28d81abb 2435{
b3694847 2436 struct nesting *thisblock = block_stack;
e976b8b2 2437
ba716677
MM
2438 /* If any of the variables in this scope were not used, warn the
2439 user. */
2440 warn_about_unused_variables (vars);
28d81abb 2441
28d81abb
RK
2442 if (thisblock->exit_label)
2443 {
2444 do_pending_stack_adjust ();
2445 emit_label (thisblock->exit_label);
2446 }
2447
ac45df5d 2448 /* Mark the beginning and end of the scope if requested. */
c7d2d61d 2449
1ea463a2
RH
2450 /* Get rid of the beginning-mark if we don't make an end-mark. */
2451 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
c7d2d61d 2452
e976b8b2 2453 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 2454 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 2455
28d81abb
RK
2456 /* Restore block_stack level for containing block. */
2457
28d81abb
RK
2458 POPSTACK (block_stack);
2459
2460 /* Pop the stack slot nesting and free any slots at this level. */
2461 pop_temp_slots ();
2462}
2463\f
2464/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 2465 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
2466
2467void
46c5ad27 2468expand_decl (tree decl)
28d81abb 2469{
ca695ac9
JB
2470 tree type;
2471
ca695ac9 2472 type = TREE_TYPE (decl);
28d81abb 2473
eabb9ed0
RK
2474 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2475 type in case this node is used in a reference. */
2476 if (TREE_CODE (decl) == CONST_DECL)
2477 {
2478 DECL_MODE (decl) = TYPE_MODE (type);
2479 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2480 DECL_SIZE (decl) = TYPE_SIZE (type);
2481 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2482 return;
2483 }
28d81abb 2484
eabb9ed0
RK
2485 /* Otherwise, only automatic variables need any expansion done. Static and
2486 external variables, and external functions, will be handled by
2487 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2488 nothing. PARM_DECLs are handled in `assign_parms'. */
28d81abb
RK
2489 if (TREE_CODE (decl) != VAR_DECL)
2490 return;
eabb9ed0 2491
44fe2e80 2492 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
2493 return;
2494
2495 /* Create the RTL representation for the variable. */
2496
2497 if (type == error_mark_node)
19e7881c 2498 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1da68f56 2499
28d81abb
RK
2500 else if (DECL_SIZE (decl) == 0)
2501 /* Variable with incomplete type. */
2502 {
abde42f7 2503 rtx x;
28d81abb
RK
2504 if (DECL_INITIAL (decl) == 0)
2505 /* Error message was already done; now avoid a crash. */
abde42f7 2506 x = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
2507 else
2508 /* An initializer is going to decide the size of this array.
2509 Until we know the size, represent its address with a reg. */
abde42f7 2510 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3bdf5ad1 2511
abde42f7
JH
2512 set_mem_attributes (x, decl, 1);
2513 SET_DECL_RTL (decl, x);
28d81abb 2514 }
8fff4fc1 2515 else if (use_register_for_decl (decl))
28d81abb
RK
2516 {
2517 /* Automatic variable that can go in a register. */
8df83eae 2518 int unsignedp = TYPE_UNSIGNED (type);
28612f9e
RK
2519 enum machine_mode reg_mode
2520 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 2521
19e7881c 2522 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
0d4903b8 2523
0b068ee9 2524 /* Note if the object is a user variable. */
7dc8b126 2525 if (!DECL_ARTIFICIAL (decl))
0b068ee9
JL
2526 {
2527 mark_user_reg (DECL_RTL (decl));
2528
2529 /* Trust user variables which have a pointer type to really
2530 be pointers. Do not trust compiler generated temporaries
2531 as our type system is totally busted as it relates to
2532 pointer arithmetic which translates into lots of compiler
2533 generated objects with pointer types, but which are not really
2534 pointers. */
2535 if (POINTER_TYPE_P (type))
2536 mark_reg_pointer (DECL_RTL (decl),
2537 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2538 }
258a120b
JM
2539
2540 maybe_set_unchanging (DECL_RTL (decl), decl);
28d81abb 2541 }
0df15c2c 2542
4559fd9e 2543 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
5e4ef18a 2544 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
2545 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2546 STACK_CHECK_MAX_VAR_SIZE)))
28d81abb
RK
2547 {
2548 /* Variable of fixed size that goes on the stack. */
2549 rtx oldaddr = 0;
2550 rtx addr;
0d4903b8 2551 rtx x;
28d81abb
RK
2552
2553 /* If we previously made RTL for this decl, it must be an array
2554 whose size was determined by the initializer.
2555 The old address was a register; set that register now
2556 to the proper address. */
19e7881c 2557 if (DECL_RTL_SET_P (decl))
28d81abb 2558 {
3c0cb5de 2559 if (!MEM_P (DECL_RTL (decl))
f8cfc6aa 2560 || !REG_P (XEXP (DECL_RTL (decl), 0)))
28d81abb
RK
2561 abort ();
2562 oldaddr = XEXP (DECL_RTL (decl), 0);
2563 }
2564
28d81abb
RK
2565 /* Set alignment we actually gave this decl. */
2566 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2567 : GET_MODE_BITSIZE (DECL_MODE (decl)));
11cf4d18 2568 DECL_USER_ALIGN (decl) = 0;
28d81abb 2569
9432c136 2570 x = assign_temp (decl, 1, 1, 1);
0d4903b8
RK
2571 set_mem_attributes (x, decl, 1);
2572 SET_DECL_RTL (decl, x);
2573
28d81abb
RK
2574 if (oldaddr)
2575 {
2576 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2577 if (addr != oldaddr)
2578 emit_move_insn (oldaddr, addr);
2579 }
28d81abb
RK
2580 }
2581 else
2582 /* Dynamic-size object: must push space on the stack. */
2583 {
abde42f7 2584 rtx address, size, x;
28d81abb
RK
2585
2586 /* Record the stack pointer on entry to block, if have
2587 not already done so. */
7393c642 2588 do_pending_stack_adjust ();
28d81abb 2589
1c9766da
RK
2590 /* Compute the variable's size, in bytes. This will expand any
2591 needed SAVE_EXPRs for the first time. */
4559fd9e 2592 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
28d81abb
RK
2593 free_temp_slots ();
2594
ff91ad08 2595 /* Allocate space on the stack for the variable. Note that
4381f7c2 2596 DECL_ALIGN says how the variable is to be aligned and we
ff91ad08
RK
2597 cannot use it to conclude anything about the alignment of
2598 the size. */
37366632 2599 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 2600 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 2601
28d81abb 2602 /* Reference the variable indirect through that rtx. */
abde42f7
JH
2603 x = gen_rtx_MEM (DECL_MODE (decl), address);
2604 set_mem_attributes (x, decl, 1);
2605 SET_DECL_RTL (decl, x);
28d81abb 2606
2207e295 2607
28d81abb
RK
2608 /* Indicate the alignment we actually gave this variable. */
2609#ifdef STACK_BOUNDARY
2610 DECL_ALIGN (decl) = STACK_BOUNDARY;
2611#else
2612 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2613#endif
11cf4d18 2614 DECL_USER_ALIGN (decl) = 0;
28d81abb 2615 }
28d81abb
RK
2616}
2617\f
6de9cd9a
DN
2618/* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2619void
2620expand_stack_alloc (tree alloc, tree t_size)
2621{
2622 rtx address, dest, size;
2623 tree var, type;
2624
2625 if (TREE_CODE (alloc) != ADDR_EXPR)
2626 abort ();
2627 var = TREE_OPERAND (alloc, 0);
2628 if (TREE_CODE (var) != VAR_DECL)
2629 abort ();
2630
2631 type = TREE_TYPE (var);
2632
6de9cd9a
DN
2633 /* Compute the variable's size, in bytes. */
2634 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2635 free_temp_slots ();
2636
2637 /* Allocate space on the stack for the variable. */
2638 address = XEXP (DECL_RTL (var), 0);
2639 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2640 if (dest != address)
2641 emit_move_insn (address, dest);
2642
2643 /* Indicate the alignment we actually gave this variable. */
2644#ifdef STACK_BOUNDARY
2645 DECL_ALIGN (var) = STACK_BOUNDARY;
2646#else
2647 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2648#endif
2649 DECL_USER_ALIGN (var) = 0;
2650}
2651
2652/* Emit code to save the current value of stack. */
2653rtx
2654expand_stack_save (void)
2655{
2656 rtx ret = NULL_RTX;
2657
2658 do_pending_stack_adjust ();
2659 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2660 return ret;
2661}
2662
2663/* Emit code to restore the current value of stack. */
2664void
2665expand_stack_restore (tree var)
2666{
2667 rtx sa = DECL_RTL (var);
2668
2669 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2670}
2671\f
28d81abb
RK
2672/* Emit code to perform the initialization of a declaration DECL. */
2673
2674void
46c5ad27 2675expand_decl_init (tree decl)
28d81abb 2676{
b4ac57ab
RS
2677 int was_used = TREE_USED (decl);
2678
ac79cd5a
RK
2679 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2680 for static decls. */
2681 if (TREE_CODE (decl) == CONST_DECL
2682 || TREE_STATIC (decl))
28d81abb
RK
2683 return;
2684
2685 /* Compute and store the initial value now. */
2686
59a7f9bf
DJ
2687 push_temp_slots ();
2688
28d81abb
RK
2689 if (DECL_INITIAL (decl) == error_mark_node)
2690 {
2691 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 2692
28d81abb 2693 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 2694 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb 2695 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
b90f141a 2696 0);
28d81abb
RK
2697 emit_queue ();
2698 }
2699 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2700 {
f31686a3 2701 emit_line_note (DECL_SOURCE_LOCATION (decl));
b90f141a 2702 expand_assignment (decl, DECL_INITIAL (decl), 0);
28d81abb
RK
2703 emit_queue ();
2704 }
2705
b4ac57ab
RS
2706 /* Don't let the initialization count as "using" the variable. */
2707 TREE_USED (decl) = was_used;
2708
28d81abb 2709 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 2710 preserve_temp_slots (NULL_RTX);
28d81abb 2711 free_temp_slots ();
59a7f9bf 2712 pop_temp_slots ();
28d81abb
RK
2713}
2714
28d81abb
RK
2715\f
2716/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2717 DECL_ELTS is the list of elements that belong to DECL's type.
2718 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2719
2720void
ac45df5d
RH
2721expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2722 tree decl_elts)
28d81abb 2723{
28d81abb 2724 rtx x;
8a693bd0 2725 tree t;
28d81abb 2726
8a693bd0
MM
2727 /* If any of the elements are addressable, so is the entire union. */
2728 for (t = decl_elts; t; t = TREE_CHAIN (t))
2729 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2730 {
2731 TREE_ADDRESSABLE (decl) = 1;
2732 break;
2733 }
4381f7c2 2734
ec5cd386 2735 expand_decl (decl);
28d81abb
RK
2736 x = DECL_RTL (decl);
2737
8a693bd0
MM
2738 /* Go through the elements, assigning RTL to each. */
2739 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 2740 {
8a693bd0 2741 tree decl_elt = TREE_VALUE (t);
28d81abb
RK
2742 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2743
3256b817
JJ
2744 /* If any of the elements are addressable, so is the entire
2745 union. */
2746 if (TREE_USED (decl_elt))
2747 TREE_USED (decl) = 1;
2748
7b9032dd
JM
2749 /* Propagate the union's alignment to the elements. */
2750 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
11cf4d18 2751 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
7b9032dd
JM
2752
2753 /* If the element has BLKmode and the union doesn't, the union is
2754 aligned such that the element doesn't need to have BLKmode, so
2755 change the element's mode to the appropriate one for its size. */
2756 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2757 DECL_MODE (decl_elt) = mode
05bccae2 2758 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
7b9032dd 2759
28d81abb
RK
2760 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2761 instead create a new MEM rtx with the proper mode. */
3c0cb5de 2762 if (MEM_P (x))
28d81abb
RK
2763 {
2764 if (mode == GET_MODE (x))
19e7881c 2765 SET_DECL_RTL (decl_elt, x);
28d81abb 2766 else
f1ec5147 2767 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
28d81abb 2768 }
f8cfc6aa 2769 else if (REG_P (x))
28d81abb
RK
2770 {
2771 if (mode == GET_MODE (x))
19e7881c 2772 SET_DECL_RTL (decl_elt, x);
28d81abb 2773 else
ddef6bc7 2774 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
28d81abb
RK
2775 }
2776 else
2777 abort ();
28d81abb
RK
2778 }
2779}
2780\f
28d81abb
RK
2781/* Enter a case (Pascal) or switch (C) statement.
2782 Push a block onto case_stack and nesting_stack
2783 to accumulate the case-labels that are seen
2784 and to record the labels generated for the statement.
2785
2786 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2787 Otherwise, this construct is transparent for `exit_something'.
2788
2789 EXPR is the index-expression to be dispatched on.
2790 TYPE is its nominal type. We could simply convert EXPR to this type,
2791 but instead we take short cuts. */
2792
2793void
46c5ad27
AJ
2794expand_start_case (int exit_flag, tree expr, tree type,
2795 const char *printname)
28d81abb 2796{
b3694847 2797 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
2798
2799 /* Make an entry on case_stack for the case we are entering. */
2800
e2500fed 2801 thiscase->desc = CASE_NESTING;
28d81abb
RK
2802 thiscase->next = case_stack;
2803 thiscase->all = nesting_stack;
2804 thiscase->depth = ++nesting_depth;
2805 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
2806 thiscase->data.case_stmt.case_list = 0;
2807 thiscase->data.case_stmt.index_expr = expr;
2808 thiscase->data.case_stmt.nominal_type = type;
2809 thiscase->data.case_stmt.default_label = 0;
28d81abb 2810 thiscase->data.case_stmt.printname = printname;
a11759a3 2811 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
2812 case_stack = thiscase;
2813 nesting_stack = thiscase;
2814
2815 do_pending_stack_adjust ();
f43f4314 2816 emit_queue ();
28d81abb
RK
2817
2818 /* Make sure case_stmt.start points to something that won't
2819 need any transformation before expand_end_case. */
4b4bf941 2820 if (!NOTE_P (get_last_insn ()))
2e040219 2821 emit_note (NOTE_INSN_DELETED);
28d81abb
RK
2822
2823 thiscase->data.case_stmt.start = get_last_insn ();
2824}
a11759a3 2825
28d81abb
RK
2826/* Accumulate one case or default label inside a case or switch statement.
2827 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
2828 The function CONVERTER, when applied to arguments T and V,
2829 converts the value V to the type T.
28d81abb
RK
2830
2831 If not currently inside a case or switch statement, return 1 and do
2832 nothing. The caller will print a language-specific error message.
2833 If VALUE is a duplicate or overlaps, return 2 and do nothing
2834 except store the (first) duplicate node in *DUPLICATE.
2835 If VALUE is out of range, return 3 and do nothing.
28d81abb
RK
2836 Return 0 on success.
2837
2838 Extended to handle range statements. */
2839
2840int
46c5ad27
AJ
2841pushcase (tree value, tree (*converter) (tree, tree), tree label,
2842 tree *duplicate)
28d81abb 2843{
28d81abb
RK
2844 tree index_type;
2845 tree nominal_type;
2846
2847 /* Fail if not inside a real case statement. */
2848 if (! (case_stack && case_stack->data.case_stmt.start))
2849 return 1;
2850
28d81abb
RK
2851 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2852 nominal_type = case_stack->data.case_stmt.nominal_type;
2853
2854 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2855 if (index_type == error_mark_node)
2856 return 0;
2857
2f985ca6
JW
2858 /* Convert VALUE to the type in which the comparisons are nominally done. */
2859 if (value != 0)
2860 value = (*converter) (nominal_type, value);
2861
28d81abb
RK
2862 /* Fail if this value is out of range for the actual type of the index
2863 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
2864 if (value != 0
2865 && (TREE_CONSTANT_OVERFLOW (value)
2866 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
2867 return 3;
2868
6de9cd9a 2869 return add_case_node (value, value, label, duplicate, false);
28d81abb
RK
2870}
2871
956d6950
JL
2872/* Like pushcase but this case applies to all values between VALUE1 and
2873 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
2874 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
2875 starts at VALUE1 and ends at the highest value of the index type.
2876 If both are NULL, this case applies to all values.
2877
2878 The return value is the same as that of pushcase but there is one
2879 additional error code: 4 means the specified range was empty. */
28d81abb
RK
2880
2881int
46c5ad27
AJ
2882pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
2883 tree label, tree *duplicate)
28d81abb 2884{
28d81abb
RK
2885 tree index_type;
2886 tree nominal_type;
2887
2888 /* Fail if not inside a real case statement. */
2889 if (! (case_stack && case_stack->data.case_stmt.start))
2890 return 1;
2891
28d81abb
RK
2892 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2893 nominal_type = case_stack->data.case_stmt.nominal_type;
2894
2895 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2896 if (index_type == error_mark_node)
2897 return 0;
2898
956d6950
JL
2899 /* Convert VALUEs to type in which the comparisons are nominally done
2900 and replace any unspecified value with the corresponding bound. */
2901 if (value1 == 0)
1974bfb1 2902 value1 = TYPE_MIN_VALUE (index_type);
956d6950 2903 if (value2 == 0)
1974bfb1 2904 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
2905
2906 /* Fail if the range is empty. Do this before any conversion since
2907 we want to allow out-of-range empty ranges. */
14a774a9 2908 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
2909 return 4;
2910
4381f7c2 2911 /* If the max was unbounded, use the max of the nominal_type we are
e1ee5cdc
RH
2912 converting to. Do this after the < check above to suppress false
2913 positives. */
14a774a9 2914 if (value2 == 0)
e1ee5cdc 2915 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 2916
2f985ca6
JW
2917 value1 = (*converter) (nominal_type, value1);
2918 value2 = (*converter) (nominal_type, value2);
2919
28d81abb 2920 /* Fail if these values are out of range. */
956d6950
JL
2921 if (TREE_CONSTANT_OVERFLOW (value1)
2922 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
2923 return 3;
2924
956d6950
JL
2925 if (TREE_CONSTANT_OVERFLOW (value2)
2926 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
2927 return 3;
2928
6de9cd9a 2929 return add_case_node (value1, value2, label, duplicate, false);
57641239
RK
2930}
2931
2932/* Do the actual insertion of a case label for pushcase and pushcase_range
2933 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
2934 slowdown for large switch statements. */
2935
56cb9733 2936int
6de9cd9a
DN
2937add_case_node (tree low, tree high, tree label, tree *duplicate,
2938 bool dont_expand_label)
57641239
RK
2939{
2940 struct case_node *p, **q, *r;
2941
56cb9733
MM
2942 /* If there's no HIGH value, then this is not a case range; it's
2943 just a simple case label. But that's just a degenerate case
2944 range. */
2945 if (!high)
2946 high = low;
2947
2948 /* Handle default labels specially. */
2949 if (!high && !low)
2950 {
2951 if (case_stack->data.case_stmt.default_label != 0)
2952 {
2953 *duplicate = case_stack->data.case_stmt.default_label;
2954 return 2;
2955 }
2956 case_stack->data.case_stmt.default_label = label;
6de9cd9a
DN
2957 if (!dont_expand_label)
2958 expand_label (label);
56cb9733
MM
2959 return 0;
2960 }
2961
57641239
RK
2962 q = &case_stack->data.case_stmt.case_list;
2963 p = *q;
2964
69d4ca36 2965 while ((r = *q))
28d81abb 2966 {
57641239
RK
2967 p = r;
2968
2969 /* Keep going past elements distinctly greater than HIGH. */
2970 if (tree_int_cst_lt (high, p->low))
2971 q = &p->left;
2972
2973 /* or distinctly less than LOW. */
2974 else if (tree_int_cst_lt (p->high, low))
2975 q = &p->right;
2976
2977 else
28d81abb 2978 {
57641239
RK
2979 /* We have an overlap; this is an error. */
2980 *duplicate = p->code_label;
28d81abb
RK
2981 return 2;
2982 }
2983 }
2984
1da68f56 2985 /* Add this label to the chain, and succeed. */
28d81abb 2986
703ad42b 2987 r = ggc_alloc (sizeof (struct case_node));
1da68f56 2988 r->low = low;
28d81abb 2989
57641239 2990 /* If the bounds are equal, turn this into the one-value case. */
57641239
RK
2991 if (tree_int_cst_equal (low, high))
2992 r->high = r->low;
2993 else
1da68f56 2994 r->high = high;
57641239
RK
2995
2996 r->code_label = label;
6de9cd9a
DN
2997 if (!dont_expand_label)
2998 expand_label (label);
28d81abb 2999
57641239
RK
3000 *q = r;
3001 r->parent = p;
3002 r->left = 0;
3003 r->right = 0;
3004 r->balance = 0;
3005
3006 while (p)
3007 {
3008 struct case_node *s;
3009
3010 if (r == p->left)
3011 {
3012 int b;
3013
3014 if (! (b = p->balance))
3015 /* Growth propagation from left side. */
3016 p->balance = -1;
3017 else if (b < 0)
3018 {
3019 if (r->balance < 0)
3020 {
3021 /* R-Rotation */
51723711 3022 if ((p->left = s = r->right))
57641239
RK
3023 s->parent = p;
3024
3025 r->right = p;
3026 p->balance = 0;
3027 r->balance = 0;
3028 s = p->parent;
3029 p->parent = r;
3030
51723711 3031 if ((r->parent = s))
57641239
RK
3032 {
3033 if (s->left == p)
3034 s->left = r;
3035 else
3036 s->right = r;
3037 }
3038 else
3039 case_stack->data.case_stmt.case_list = r;
3040 }
3041 else
3042 /* r->balance == +1 */
3043 {
5720c7e7
RK
3044 /* LR-Rotation */
3045
57641239
RK
3046 int b2;
3047 struct case_node *t = r->right;
3048
51723711 3049 if ((p->left = s = t->right))
57641239
RK
3050 s->parent = p;
3051
3052 t->right = p;
51723711 3053 if ((r->right = s = t->left))
57641239
RK
3054 s->parent = r;
3055
3056 t->left = r;
3057 b = t->balance;
3058 b2 = b < 0;
3059 p->balance = b2;
3060 b2 = -b2 - b;
3061 r->balance = b2;
3062 t->balance = 0;
3063 s = p->parent;
3064 p->parent = t;
3065 r->parent = t;
3066
51723711 3067 if ((t->parent = s))
57641239
RK
3068 {
3069 if (s->left == p)
3070 s->left = t;
3071 else
3072 s->right = t;
3073 }
3074 else
3075 case_stack->data.case_stmt.case_list = t;
3076 }
3077 break;
3078 }
3079
3080 else
3081 {
3082 /* p->balance == +1; growth of left side balances the node. */
3083 p->balance = 0;
3084 break;
3085 }
3086 }
3087 else
3088 /* r == p->right */
3089 {
3090 int b;
3091
3092 if (! (b = p->balance))
3093 /* Growth propagation from right side. */
3094 p->balance++;
3095 else if (b > 0)
3096 {
3097 if (r->balance > 0)
3098 {
3099 /* L-Rotation */
3100
51723711 3101 if ((p->right = s = r->left))
57641239
RK
3102 s->parent = p;
3103
3104 r->left = p;
3105 p->balance = 0;
3106 r->balance = 0;
3107 s = p->parent;
3108 p->parent = r;
51723711 3109 if ((r->parent = s))
57641239
RK
3110 {
3111 if (s->left == p)
3112 s->left = r;
3113 else
3114 s->right = r;
3115 }
3116
3117 else
3118 case_stack->data.case_stmt.case_list = r;
3119 }
3120
3121 else
3122 /* r->balance == -1 */
3123 {
3124 /* RL-Rotation */
3125 int b2;
3126 struct case_node *t = r->left;
3127
51723711 3128 if ((p->right = s = t->left))
57641239
RK
3129 s->parent = p;
3130
3131 t->left = p;
3132
51723711 3133 if ((r->left = s = t->right))
57641239
RK
3134 s->parent = r;
3135
3136 t->right = r;
3137 b = t->balance;
3138 b2 = b < 0;
3139 r->balance = b2;
3140 b2 = -b2 - b;
3141 p->balance = b2;
3142 t->balance = 0;
3143 s = p->parent;
3144 p->parent = t;
3145 r->parent = t;
3146
51723711 3147 if ((t->parent = s))
57641239
RK
3148 {
3149 if (s->left == p)
3150 s->left = t;
3151 else
3152 s->right = t;
3153 }
3154
3155 else
3156 case_stack->data.case_stmt.case_list = t;
3157 }
3158 break;
3159 }
3160 else
3161 {
3162 /* p->balance == -1; growth of right side balances the node. */
3163 p->balance = 0;
3164 break;
3165 }
3166 }
3167
3168 r = p;
3169 p = p->parent;
3170 }
28d81abb
RK
3171
3172 return 0;
3173}
28d81abb 3174\f
9bb231fd
RS
3175/* Maximum number of case bit tests. */
3176#define MAX_CASE_BIT_TESTS 3
3177
3178/* By default, enable case bit tests on targets with ashlsi3. */
3179#ifndef CASE_USE_BIT_TESTS
3180#define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
3181 != CODE_FOR_nothing)
3182#endif
3183
3184
3185/* A case_bit_test represents a set of case nodes that may be
3186 selected from using a bit-wise comparison. HI and LO hold
3187 the integer to be tested against, LABEL contains the label
3188 to jump to upon success and BITS counts the number of case
3189 nodes handled by this test, typically the number of bits
3190 set in HI:LO. */
3191
3192struct case_bit_test
3193{
3194 HOST_WIDE_INT hi;
3195 HOST_WIDE_INT lo;
3196 rtx label;
3197 int bits;
3198};
3199
3200/* Determine whether "1 << x" is relatively cheap in word_mode. */
3201
7e51717c
AJ
3202static
3203bool lshift_cheap_p (void)
9bb231fd
RS
3204{
3205 static bool init = false;
3206 static bool cheap = true;
3207
3208 if (!init)
3209 {
3210 rtx reg = gen_rtx_REG (word_mode, 10000);
3211 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
3212 cheap = cost < COSTS_N_INSNS (3);
3213 init = true;
3214 }
3215
3216 return cheap;
3217}
3218
3219/* Comparison function for qsort to order bit tests by decreasing
3220 number of case nodes, i.e. the node with the most cases gets
3221 tested first. */
3222
f667741c
SB
3223static int
3224case_bit_test_cmp (const void *p1, const void *p2)
9bb231fd
RS
3225{
3226 const struct case_bit_test *d1 = p1;
3227 const struct case_bit_test *d2 = p2;
3228
3229 return d2->bits - d1->bits;
3230}
3231
3232/* Expand a switch statement by a short sequence of bit-wise
3233 comparisons. "switch(x)" is effectively converted into
3234 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
3235 integer constants.
3236
3237 INDEX_EXPR is the value being switched on, which is of
3238 type INDEX_TYPE. MINVAL is the lowest case value of in
3239 the case nodes, of INDEX_TYPE type, and RANGE is highest
3240 value minus MINVAL, also of type INDEX_TYPE. NODES is
3241 the set of case nodes, and DEFAULT_LABEL is the label to
3242 branch to should none of the cases match.
3243
3244 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
3245 node targets. */
3246
3247static void
46c5ad27
AJ
3248emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
3249 tree range, case_node_ptr nodes, rtx default_label)
9bb231fd
RS
3250{
3251 struct case_bit_test test[MAX_CASE_BIT_TESTS];
3252 enum machine_mode mode;
3253 rtx expr, index, label;
3254 unsigned int i,j,lo,hi;
3255 struct case_node *n;
3256 unsigned int count;
3257
3258 count = 0;
3259 for (n = nodes; n; n = n->right)
3260 {
3261 label = label_rtx (n->code_label);
3262 for (i = 0; i < count; i++)
3263 if (same_case_target_p (label, test[i].label))
3264 break;
3265
3266 if (i == count)
3267 {
3268 if (count >= MAX_CASE_BIT_TESTS)
3269 abort ();
3270 test[i].hi = 0;
3271 test[i].lo = 0;
3272 test[i].label = label;
3273 test[i].bits = 1;
3274 count++;
3275 }
3276 else
3277 test[i].bits++;
3278
3279 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3280 n->low, minval)), 1);
3281 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3282 n->high, minval)), 1);
3283 for (j = lo; j <= hi; j++)
3284 if (j >= HOST_BITS_PER_WIDE_INT)
3285 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
3286 else
3287 test[i].lo |= (HOST_WIDE_INT) 1 << j;
3288 }
3289
3290 qsort (test, count, sizeof(*test), case_bit_test_cmp);
3291
3292 index_expr = fold (build (MINUS_EXPR, index_type,
3293 convert (index_type, index_expr),
3294 convert (index_type, minval)));
3295 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3296 emit_queue ();
3297 index = protect_from_queue (index, 0);
3298 do_pending_stack_adjust ();
3299
3300 mode = TYPE_MODE (index_type);
3301 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
3302 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
3303 default_label);
3304
3305 index = convert_to_mode (word_mode, index, 0);
3306 index = expand_binop (word_mode, ashl_optab, const1_rtx,
3307 index, NULL_RTX, 1, OPTAB_WIDEN);
3308
3309 for (i = 0; i < count; i++)
3310 {
3311 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
3312 expr = expand_binop (word_mode, and_optab, index, expr,
3313 NULL_RTX, 1, OPTAB_WIDEN);
3314 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
3315 word_mode, 1, test[i].label);
3316 }
3317
3318 emit_jump (default_label);
3319}
ad82abb8 3320
41cbdcd0
KH
3321#ifndef HAVE_casesi
3322#define HAVE_casesi 0
3323#endif
3324
3325#ifndef HAVE_tablejump
3326#define HAVE_tablejump 0
3327#endif
3328
28d81abb 3329/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 3330 in which ORIG_INDEX is the expression to be tested.
6f9fdf4d
JJ
3331 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
3332 type as given in the source before any compiler conversions.
28d81abb
RK
3333 Generate the code to test it and jump to the right place. */
3334
3335void
46c5ad27 3336expand_end_case_type (tree orig_index, tree orig_type)
28d81abb 3337{
9fb60a0d 3338 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
28d81abb 3339 rtx default_label = 0;
9bb231fd
RS
3340 struct case_node *n, *m;
3341 unsigned int count, uniq;
28d81abb 3342 rtx index;
ca695ac9 3343 rtx table_label;
28d81abb
RK
3344 int ncases;
3345 rtx *labelvec;
b3694847 3346 int i;
9bb231fd 3347 rtx before_case, end, lab;
b3694847 3348 struct nesting *thiscase = case_stack;
1b0cb6fc 3349 tree index_expr, index_type;
100e3acb 3350 bool exit_done = false;
ca695ac9
JB
3351 int unsignedp;
3352
03c03770
AS
3353 /* Don't crash due to previous errors. */
3354 if (thiscase == NULL)
3355 return;
3356
ca695ac9 3357 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc 3358 index_type = TREE_TYPE (index_expr);
8df83eae 3359 unsignedp = TYPE_UNSIGNED (index_type);
6f9fdf4d
JJ
3360 if (orig_type == NULL)
3361 orig_type = TREE_TYPE (orig_index);
28d81abb
RK
3362
3363 do_pending_stack_adjust ();
3364
3365 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 3366 if (index_type != error_mark_node)
28d81abb 3367 {
28d81abb
RK
3368 /* If we don't have a default-label, create one here,
3369 after the body of the switch. */
3370 if (thiscase->data.case_stmt.default_label == 0)
3371 {
3372 thiscase->data.case_stmt.default_label
3373 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
100e3acb
RS
3374 /* Share the exit label if possible. */
3375 if (thiscase->exit_label)
3376 {
3377 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
3378 thiscase->exit_label);
3379 exit_done = true;
3380 }
28d81abb
RK
3381 expand_label (thiscase->data.case_stmt.default_label);
3382 }
3383 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3384
3385 before_case = get_last_insn ();
3386
5720c7e7
RK
3387 if (thiscase->data.case_stmt.case_list
3388 && thiscase->data.case_stmt.case_list->left)
b059139c 3389 thiscase->data.case_stmt.case_list
4381f7c2 3390 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
b059139c 3391
28d81abb
RK
3392 /* Simplify the case-list before we count it. */
3393 group_case_nodes (thiscase->data.case_stmt.case_list);
100e3acb
RS
3394 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
3395 default_label);
28d81abb
RK
3396
3397 /* Get upper and lower bounds of case values.
3398 Also convert all the case values to the index expr's data type. */
3399
9bb231fd 3400 uniq = 0;
28d81abb
RK
3401 count = 0;
3402 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3403 {
3404 /* Check low and high label values are integers. */
3405 if (TREE_CODE (n->low) != INTEGER_CST)
3406 abort ();
3407 if (TREE_CODE (n->high) != INTEGER_CST)
3408 abort ();
3409
1b0cb6fc
RK
3410 n->low = convert (index_type, n->low);
3411 n->high = convert (index_type, n->high);
28d81abb
RK
3412
3413 /* Count the elements and track the largest and smallest
3414 of them (treating them as signed even if they are not). */
3415 if (count++ == 0)
3416 {
3417 minval = n->low;
3418 maxval = n->high;
3419 }
3420 else
3421 {
3422 if (INT_CST_LT (n->low, minval))
3423 minval = n->low;
3424 if (INT_CST_LT (maxval, n->high))
3425 maxval = n->high;
3426 }
3427 /* A range counts double, since it requires two compares. */
3428 if (! tree_int_cst_equal (n->low, n->high))
3429 count++;
9bb231fd
RS
3430
3431 /* Count the number of unique case node targets. */
3432 uniq++;
3433 lab = label_rtx (n->code_label);
3434 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
3435 if (same_case_target_p (label_rtx (m->code_label), lab))
3436 {
3437 uniq--;
3438 break;
3439 }
28d81abb
RK
3440 }
3441
3442 /* Compute span of values. */
3443 if (count != 0)
1b0cb6fc 3444 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 3445
1b0cb6fc 3446 if (count == 0)
28d81abb
RK
3447 {
3448 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3449 emit_queue ();
3450 emit_jump (default_label);
3451 }
3474db0e 3452
9bb231fd
RS
3453 /* Try implementing this switch statement by a short sequence of
3454 bit-wise comparisons. However, we let the binary-tree case
3455 below handle constant index expressions. */
3456 else if (CASE_USE_BIT_TESTS
3457 && ! TREE_CONSTANT (index_expr)
3458 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
766dec0e 3459 && compare_tree_int (range, 0) > 0
9bb231fd
RS
3460 && lshift_cheap_p ()
3461 && ((uniq == 1 && count >= 3)
3462 || (uniq == 2 && count >= 5)
3463 || (uniq == 3 && count >= 6)))
3464 {
3465 /* Optimize the case where all the case values fit in a
3466 word without having to subtract MINVAL. In this case,
3467 we can optimize away the subtraction. */
3468 if (compare_tree_int (minval, 0) > 0
3469 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3470 {
3471 minval = integer_zero_node;
3472 range = maxval;
3473 }
3474 emit_case_bit_tests (index_type, index_expr, minval, range,
3475 thiscase->data.case_stmt.case_list,
3476 default_label);
3477 }
3478
28d81abb
RK
3479 /* If range of values is much bigger than number of values,
3480 make a sequence of conditional branches instead of a dispatch.
3481 If the switch-index is a constant, do it this way
3482 because we can optimize it. */
4f73c5dd 3483
ad82abb8 3484 else if (count < case_values_threshold ()
9e4b13a7
SB
3485 || compare_tree_int (range,
3486 (optimize_size ? 3 : 10) * count) > 0
f0c988c8
BS
3487 /* RANGE may be signed, and really large ranges will show up
3488 as negative numbers. */
3489 || compare_tree_int (range, 0) < 0
3f6fe18e
RK
3490#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3491 || flag_pic
3492#endif
41cbdcd0
KH
3493 || TREE_CONSTANT (index_expr)
3494 /* If neither casesi or tablejump is available, we can
3495 only go this way. */
3496 || (!HAVE_casesi && !HAVE_tablejump))
28d81abb 3497 {
37366632 3498 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
3499
3500 /* If the index is a short or char that we do not have
3501 an insn to handle comparisons directly, convert it to
3502 a full integer now, rather than letting each comparison
3503 generate the conversion. */
3504
3505 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
ef89d648 3506 && ! have_insn_for (COMPARE, GET_MODE (index)))
28d81abb
RK
3507 {
3508 enum machine_mode wider_mode;
3509 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3510 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
ef89d648 3511 if (have_insn_for (COMPARE, wider_mode))
28d81abb
RK
3512 {
3513 index = convert_to_mode (wider_mode, index, unsignedp);
3514 break;
3515 }
3516 }
3517
3518 emit_queue ();
3519 do_pending_stack_adjust ();
3520
3521 index = protect_from_queue (index, 0);
3c0cb5de 3522 if (MEM_P (index))
28d81abb
RK
3523 index = copy_to_reg (index);
3524 if (GET_CODE (index) == CONST_INT
3525 || TREE_CODE (index_expr) == INTEGER_CST)
3526 {
3527 /* Make a tree node with the proper constant value
3528 if we don't already have one. */
3529 if (TREE_CODE (index_expr) != INTEGER_CST)
3530 {
3531 index_expr
3532 = build_int_2 (INTVAL (index),
e9a042b6 3533 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 3534 index_expr = convert (index_type, index_expr);
28d81abb
RK
3535 }
3536
3537 /* For constant index expressions we need only
4fe9b91c 3538 issue an unconditional branch to the appropriate
28d81abb 3539 target code. The job of removing any unreachable
6356f892 3540 code is left to the optimization phase if the
28d81abb 3541 "-O" option is specified. */
1b0cb6fc
RK
3542 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3543 if (! tree_int_cst_lt (index_expr, n->low)
3544 && ! tree_int_cst_lt (n->high, index_expr))
3545 break;
3546
28d81abb
RK
3547 if (n)
3548 emit_jump (label_rtx (n->code_label));
3549 else
3550 emit_jump (default_label);
3551 }
3552 else
3553 {
3554 /* If the index expression is not constant we generate
3555 a binary decision tree to select the appropriate
3556 target code. This is done as follows:
3557
3558 The list of cases is rearranged into a binary tree,
3559 nearly optimal assuming equal probability for each case.
3560
3561 The tree is transformed into RTL, eliminating
3562 redundant test conditions at the same time.
3563
3564 If program flow could reach the end of the
3565 decision tree an unconditional jump to the
3566 default code is emitted. */
3567
3568 use_cost_table
6f9fdf4d 3569 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
28d81abb 3570 && estimate_case_costs (thiscase->data.case_stmt.case_list));
9714cf43 3571 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
28d81abb 3572 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 3573 default_label, index_type);
28d81abb
RK
3574 emit_jump_if_reachable (default_label);
3575 }
3576 }
3577 else
3578 {
100e3acb 3579 table_label = gen_label_rtx ();
ad82abb8
ZW
3580 if (! try_casesi (index_type, index_expr, minval, range,
3581 table_label, default_label))
28d81abb 3582 {
ecc9dd93 3583 index_type = thiscase->data.case_stmt.nominal_type;
1ff37128 3584
786de7eb 3585 /* Index jumptables from zero for suitable values of
1ff37128 3586 minval to avoid a subtraction. */
786de7eb
KH
3587 if (! optimize_size
3588 && compare_tree_int (minval, 0) > 0
3589 && compare_tree_int (minval, 3) < 0)
3590 {
3591 minval = integer_zero_node;
3592 range = maxval;
3593 }
1ff37128 3594
ad82abb8
ZW
3595 if (! try_tablejump (index_type, index_expr, minval, range,
3596 table_label, default_label))
3597 abort ();
28d81abb 3598 }
786de7eb 3599
28d81abb
RK
3600 /* Get table of labels to jump to, in order of case index. */
3601
1ff37128 3602 ncases = tree_low_cst (range, 0) + 1;
703ad42b
KG
3603 labelvec = alloca (ncases * sizeof (rtx));
3604 memset (labelvec, 0, ncases * sizeof (rtx));
28d81abb
RK
3605
3606 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3607 {
2d9d49e4
OH
3608 /* Compute the low and high bounds relative to the minimum
3609 value since that should fit in a HOST_WIDE_INT while the
3610 actual values may not. */
3611 HOST_WIDE_INT i_low
786de7eb
KH
3612 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3613 n->low, minval)), 1);
2d9d49e4 3614 HOST_WIDE_INT i_high
786de7eb
KH
3615 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3616 n->high, minval)), 1);
2d9d49e4
OH
3617 HOST_WIDE_INT i;
3618
3619 for (i = i_low; i <= i_high; i ++)
3620 labelvec[i]
3621 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
28d81abb
RK
3622 }
3623
3624 /* Fill in the gaps with the default. */
3625 for (i = 0; i < ncases; i++)
3626 if (labelvec[i] == 0)
38a448ca 3627 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb 3628
f9da5064 3629 /* Output the table. */
28d81abb
RK
3630 emit_label (table_label);
3631
18543a22 3632 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
3633 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
3634 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 3635 gen_rtvec_v (ncases, labelvec),
4381f7c2 3636 const0_rtx, const0_rtx));
28d81abb 3637 else
38a448ca
RH
3638 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
3639 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
3640
3641 /* If the case insn drops through the table,
3642 after the table we must jump to the default-label.
3643 Otherwise record no drop-through after the table. */
3644#ifdef CASE_DROPS_THROUGH
3645 emit_jump (default_label);
3646#else
3647 emit_barrier ();
3648#endif
3649 }
3650
2270623a
JM
3651 before_case = NEXT_INSN (before_case);
3652 end = get_last_insn ();
2b7d71b2
JJ
3653 if (squeeze_notes (&before_case, &end))
3654 abort ();
2270623a 3655 reorder_insns (before_case, end,
28d81abb
RK
3656 thiscase->data.case_stmt.start);
3657 }
1b0cb6fc 3658
100e3acb 3659 if (thiscase->exit_label && !exit_done)
28d81abb
RK
3660 emit_label (thiscase->exit_label);
3661
3662 POPSTACK (case_stack);
3663
3664 free_temp_slots ();
3665}
3666
57641239
RK
3667/* Convert the tree NODE into a list linked by the right field, with the left
3668 field zeroed. RIGHT is used for recursion; it is a list to be placed
3669 rightmost in the resulting list. */
3670
3671static struct case_node *
46c5ad27 3672case_tree2list (struct case_node *node, struct case_node *right)
57641239
RK
3673{
3674 struct case_node *left;
3675
3676 if (node->right)
3677 right = case_tree2list (node->right, right);
3678
3679 node->right = right;
51723711 3680 if ((left = node->left))
57641239
RK
3681 {
3682 node->left = 0;
3683 return case_tree2list (left, node);
3684 }
3685
3686 return node;
3687}
ca695ac9 3688
28d81abb
RK
3689/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3690
3691static void
46c5ad27 3692do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
28d81abb 3693{
d43e0b7d 3694 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
28d81abb 3695 {
d51d146f 3696 if (op1 == op2)
28d81abb
RK
3697 emit_jump (label);
3698 }
3699 else
d43e0b7d
RK
3700 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3701 (GET_MODE (op1) == VOIDmode
3702 ? GET_MODE (op2) : GET_MODE (op1)),
3703 unsignedp, label);
28d81abb
RK
3704}
3705\f
3706/* Not all case values are encountered equally. This function
3707 uses a heuristic to weight case labels, in cases where that
3708 looks like a reasonable thing to do.
3709
3710 Right now, all we try to guess is text, and we establish the
3711 following weights:
3712
3713 chars above space: 16
3714 digits: 16
3715 default: 12
3716 space, punct: 8
3717 tab: 4
3718 newline: 2
3719 other "\" chars: 1
3720 remaining chars: 0
3721
3722 If we find any cases in the switch that are not either -1 or in the range
3723 of valid ASCII characters, or are control characters other than those
3724 commonly used with "\", don't treat this switch scanning text.
3725
3726 Return 1 if these nodes are suitable for cost estimation, otherwise
3727 return 0. */
3728
3729static int
46c5ad27 3730estimate_case_costs (case_node_ptr node)
28d81abb 3731{
f2d1f0ba 3732 tree min_ascii = integer_minus_one_node;
28d81abb
RK
3733 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3734 case_node_ptr n;
3735 int i;
3736
3737 /* If we haven't already made the cost table, make it now. Note that the
3738 lower bound of the table is -1, not zero. */
3739
2a2137c4 3740 if (! cost_table_initialized)
28d81abb 3741 {
2a2137c4 3742 cost_table_initialized = 1;
28d81abb
RK
3743
3744 for (i = 0; i < 128; i++)
3745 {
e9a780ec 3746 if (ISALNUM (i))
2a2137c4 3747 COST_TABLE (i) = 16;
e9a780ec 3748 else if (ISPUNCT (i))
2a2137c4 3749 COST_TABLE (i) = 8;
e9a780ec 3750 else if (ISCNTRL (i))
2a2137c4 3751 COST_TABLE (i) = -1;
28d81abb
RK
3752 }
3753
2a2137c4
RH
3754 COST_TABLE (' ') = 8;
3755 COST_TABLE ('\t') = 4;
3756 COST_TABLE ('\0') = 4;
3757 COST_TABLE ('\n') = 2;
3758 COST_TABLE ('\f') = 1;
3759 COST_TABLE ('\v') = 1;
3760 COST_TABLE ('\b') = 1;
28d81abb
RK
3761 }
3762
3763 /* See if all the case expressions look like text. It is text if the
3764 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3765 as signed arithmetic since we don't want to ever access cost_table with a
3766 value less than -1. Also check that none of the constants in a range
3767 are strange control characters. */
3768
3769 for (n = node; n; n = n->right)
3770 {
3771 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3772 return 0;
3773
05bccae2
RK
3774 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3775 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2a2137c4 3776 if (COST_TABLE (i) < 0)
28d81abb
RK
3777 return 0;
3778 }
3779
3780 /* All interesting values are within the range of interesting
3781 ASCII characters. */
3782 return 1;
3783}
3784
100e3acb
RS
3785/* Determine whether two case labels branch to the same target. */
3786
3787static bool
46c5ad27 3788same_case_target_p (rtx l1, rtx l2)
100e3acb 3789{
6de9cd9a 3790#if 0
100e3acb
RS
3791 rtx i1, i2;
3792
3793 if (l1 == l2)
3794 return true;
3795
3796 i1 = next_real_insn (l1);
3797 i2 = next_real_insn (l2);
3798 if (i1 == i2)
3799 return true;
3800
3801 if (i1 && simplejump_p (i1))
3802 {
3803 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
3804 }
3805
3806 if (i2 && simplejump_p (i2))
3807 {
3808 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
3809 }
6de9cd9a
DN
3810#endif
3811 /* When coming from gimple, we usually won't have emitted either
3812 the labels or the body of the switch statement. The job being
3813 done here should be done via jump threading at the tree level.
3814 Cases that go the same place should have the same label. */
100e3acb
RS
3815 return l1 == l2;
3816}
3817
3818/* Delete nodes that branch to the default label from a list of
3819 case nodes. Eg. case 5: default: becomes just default: */
3820
3821static void
46c5ad27 3822strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
100e3acb
RS
3823{
3824 case_node_ptr ptr;
3825
3826 while (*prev)
3827 {
3828 ptr = *prev;
3829 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
3830 *prev = ptr->right;
3831 else
3832 prev = &ptr->right;
3833 }
3834}
3835
28d81abb
RK
3836/* Scan an ordered list of case nodes
3837 combining those with consecutive values or ranges.
3838
3839 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3840
3841static void
46c5ad27 3842group_case_nodes (case_node_ptr head)
28d81abb
RK
3843{
3844 case_node_ptr node = head;
3845
3846 while (node)
3847 {
6de9cd9a 3848 rtx lab;
28d81abb
RK
3849 case_node_ptr np = node;
3850
6de9cd9a
DN
3851 lab = label_rtx (node->code_label);
3852
28d81abb
RK
3853 /* Try to group the successors of NODE with NODE. */
3854 while (((np = np->right) != 0)
3855 /* Do they jump to the same place? */
100e3acb 3856 && same_case_target_p (label_rtx (np->code_label), lab)
28d81abb
RK
3857 /* Are their ranges consecutive? */
3858 && tree_int_cst_equal (np->low,
3859 fold (build (PLUS_EXPR,
3860 TREE_TYPE (node->high),
3861 node->high,
3862 integer_one_node)))
3863 /* An overflow is not consecutive. */
3864 && tree_int_cst_lt (node->high,
3865 fold (build (PLUS_EXPR,
3866 TREE_TYPE (node->high),
3867 node->high,
3868 integer_one_node))))
3869 {
3870 node->high = np->high;
3871 }
3872 /* NP is the first node after NODE which can't be grouped with it.
3873 Delete the nodes in between, and move on to that node. */
3874 node->right = np;
3875 node = np;
3876 }
3877}
3878
3879/* Take an ordered list of case nodes
3880 and transform them into a near optimal binary tree,
6dc42e49 3881 on the assumption that any target code selection value is as
28d81abb
RK
3882 likely as any other.
3883
3884 The transformation is performed by splitting the ordered
3885 list into two equal sections plus a pivot. The parts are
3886 then attached to the pivot as left and right branches. Each
38e01259 3887 branch is then transformed recursively. */
28d81abb
RK
3888
3889static void
46c5ad27 3890balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
28d81abb 3891{
b3694847 3892 case_node_ptr np;
28d81abb
RK
3893
3894 np = *head;
3895 if (np)
3896 {
3897 int cost = 0;
3898 int i = 0;
3899 int ranges = 0;
b3694847 3900 case_node_ptr *npp;
28d81abb
RK
3901 case_node_ptr left;
3902
3903 /* Count the number of entries on branch. Also count the ranges. */
3904
3905 while (np)
3906 {
3907 if (!tree_int_cst_equal (np->low, np->high))
3908 {
3909 ranges++;
3910 if (use_cost_table)
2a2137c4 3911 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
28d81abb
RK
3912 }
3913
3914 if (use_cost_table)
2a2137c4 3915 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
28d81abb
RK
3916
3917 i++;
3918 np = np->right;
3919 }
3920
3921 if (i > 2)
3922 {
3923 /* Split this list if it is long enough for that to help. */
3924 npp = head;
3925 left = *npp;
3926 if (use_cost_table)
3927 {
3928 /* Find the place in the list that bisects the list's total cost,
3929 Here I gets half the total cost. */
3930 int n_moved = 0;
3931 i = (cost + 1) / 2;
3932 while (1)
3933 {
3934 /* Skip nodes while their cost does not reach that amount. */
3935 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2a2137c4
RH
3936 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3937 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
28d81abb
RK
3938 if (i <= 0)
3939 break;
3940 npp = &(*npp)->right;
3941 n_moved += 1;
3942 }
3943 if (n_moved == 0)
3944 {
3945 /* Leave this branch lopsided, but optimize left-hand
3946 side and fill in `parent' fields for right-hand side. */
3947 np = *head;
3948 np->parent = parent;
3949 balance_case_nodes (&np->left, np);
3950 for (; np->right; np = np->right)
3951 np->right->parent = np;
3952 return;
3953 }
3954 }
3955 /* If there are just three nodes, split at the middle one. */
3956 else if (i == 3)
3957 npp = &(*npp)->right;
3958 else
3959 {
3960 /* Find the place in the list that bisects the list's total cost,
3961 where ranges count as 2.
3962 Here I gets half the total cost. */
3963 i = (i + ranges + 1) / 2;
3964 while (1)
3965 {
3966 /* Skip nodes while their cost does not reach that amount. */
3967 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3968 i--;
3969 i--;
3970 if (i <= 0)
3971 break;
3972 npp = &(*npp)->right;
3973 }
3974 }
3975 *head = np = *npp;
3976 *npp = 0;
3977 np->parent = parent;
3978 np->left = left;
3979
3980 /* Optimize each of the two split parts. */
3981 balance_case_nodes (&np->left, np);
3982 balance_case_nodes (&np->right, np);
3983 }
3984 else
3985 {
3986 /* Else leave this branch as one level,
3987 but fill in `parent' fields. */
3988 np = *head;
3989 np->parent = parent;
3990 for (; np->right; np = np->right)
3991 np->right->parent = np;
3992 }
3993 }
3994}
3995\f
3996/* Search the parent sections of the case node tree
3997 to see if a test for the lower bound of NODE would be redundant.
3998 INDEX_TYPE is the type of the index expression.
3999
4000 The instructions to generate the case decision tree are
4001 output in the same order as nodes are processed so it is
4002 known that if a parent node checks the range of the current
4003 node minus one that the current node is bounded at its lower
4004 span. Thus the test would be redundant. */
4005
4006static int
46c5ad27 4007node_has_low_bound (case_node_ptr node, tree index_type)
28d81abb
RK
4008{
4009 tree low_minus_one;
4010 case_node_ptr pnode;
4011
4012 /* If the lower bound of this node is the lowest value in the index type,
4013 we need not test it. */
4014
4015 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4016 return 1;
4017
4018 /* If this node has a left branch, the value at the left must be less
4019 than that at this node, so it cannot be bounded at the bottom and
4020 we need not bother testing any further. */
4021
4022 if (node->left)
4023 return 0;
4024
4025 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4026 node->low, integer_one_node));
4027
4028 /* If the subtraction above overflowed, we can't verify anything.
4029 Otherwise, look for a parent that tests our value - 1. */
4030
4031 if (! tree_int_cst_lt (low_minus_one, node->low))
4032 return 0;
4033
4034 for (pnode = node->parent; pnode; pnode = pnode->parent)
4035 if (tree_int_cst_equal (low_minus_one, pnode->high))
4036 return 1;
4037
4038 return 0;
4039}
4040
4041/* Search the parent sections of the case node tree
4042 to see if a test for the upper bound of NODE would be redundant.
4043 INDEX_TYPE is the type of the index expression.
4044
4045 The instructions to generate the case decision tree are
4046 output in the same order as nodes are processed so it is
4047 known that if a parent node checks the range of the current
4048 node plus one that the current node is bounded at its upper
4049 span. Thus the test would be redundant. */
4050
4051static int
46c5ad27 4052node_has_high_bound (case_node_ptr node, tree index_type)
28d81abb
RK
4053{
4054 tree high_plus_one;
4055 case_node_ptr pnode;
4056
e1ee5cdc
RH
4057 /* If there is no upper bound, obviously no test is needed. */
4058
4059 if (TYPE_MAX_VALUE (index_type) == NULL)
4060 return 1;
4061
28d81abb
RK
4062 /* If the upper bound of this node is the highest value in the type
4063 of the index expression, we need not test against it. */
4064
4065 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4066 return 1;
4067
4068 /* If this node has a right branch, the value at the right must be greater
4069 than that at this node, so it cannot be bounded at the top and
4070 we need not bother testing any further. */
4071
4072 if (node->right)
4073 return 0;
4074
4075 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4076 node->high, integer_one_node));
4077
4078 /* If the addition above overflowed, we can't verify anything.
4079 Otherwise, look for a parent that tests our value + 1. */
4080
4081 if (! tree_int_cst_lt (node->high, high_plus_one))
4082 return 0;
4083
4084 for (pnode = node->parent; pnode; pnode = pnode->parent)
4085 if (tree_int_cst_equal (high_plus_one, pnode->low))
4086 return 1;
4087
4088 return 0;
4089}
4090
4091/* Search the parent sections of the
4092 case node tree to see if both tests for the upper and lower
4093 bounds of NODE would be redundant. */
4094
4095static int
46c5ad27 4096node_is_bounded (case_node_ptr node, tree index_type)
28d81abb
RK
4097{
4098 return (node_has_low_bound (node, index_type)
4099 && node_has_high_bound (node, index_type));
4100}
4101
4102/* Emit an unconditional jump to LABEL unless it would be dead code. */
4103
4104static void
46c5ad27 4105emit_jump_if_reachable (rtx label)
28d81abb 4106{
4b4bf941 4107 if (!BARRIER_P (get_last_insn ()))
28d81abb
RK
4108 emit_jump (label);
4109}
4110\f
4111/* Emit step-by-step code to select a case for the value of INDEX.
4112 The thus generated decision tree follows the form of the
4113 case-node binary tree NODE, whose nodes represent test conditions.
4114 INDEX_TYPE is the type of the index of the switch.
4115
4116 Care is taken to prune redundant tests from the decision tree
4117 by detecting any boundary conditions already checked by
4118 emitted rtx. (See node_has_high_bound, node_has_low_bound
4119 and node_is_bounded, above.)
4120
4121 Where the test conditions can be shown to be redundant we emit
4122 an unconditional jump to the target code. As a further
4123 optimization, the subordinates of a tree node are examined to
4124 check for bounded nodes. In this case conditional and/or
4125 unconditional jumps as a result of the boundary check for the
4126 current node are arranged to target the subordinates associated
38e01259 4127 code for out of bound conditions on the current node.
28d81abb 4128
f72aed24 4129 We can assume that when control reaches the code generated here,
28d81abb
RK
4130 the index value has already been compared with the parents
4131 of this node, and determined to be on the same side of each parent
4132 as this node is. Thus, if this node tests for the value 51,
4133 and a parent tested for 52, we don't need to consider
4134 the possibility of a value greater than 51. If another parent
4135 tests for the value 50, then this node need not test anything. */
4136
4137static void
46c5ad27
AJ
4138emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
4139 tree index_type)
28d81abb
RK
4140{
4141 /* If INDEX has an unsigned type, we must make unsigned branches. */
8df83eae 4142 int unsignedp = TYPE_UNSIGNED (index_type);
28d81abb 4143 enum machine_mode mode = GET_MODE (index);
69107307 4144 enum machine_mode imode = TYPE_MODE (index_type);
28d81abb
RK
4145
4146 /* See if our parents have already tested everything for us.
4147 If they have, emit an unconditional jump for this node. */
4148 if (node_is_bounded (node, index_type))
4149 emit_jump (label_rtx (node->code_label));
4150
4151 else if (tree_int_cst_equal (node->low, node->high))
4152 {
4153 /* Node is single valued. First see if the index expression matches
0f41302f 4154 this node and then check our children, if any. */
28d81abb 4155
69107307
AO
4156 do_jump_if_equal (index,
4157 convert_modes (mode, imode,
4158 expand_expr (node->low, NULL_RTX,
4159 VOIDmode, 0),
4160 unsignedp),
28d81abb
RK
4161 label_rtx (node->code_label), unsignedp);
4162
4163 if (node->right != 0 && node->left != 0)
4164 {
4165 /* This node has children on both sides.
4166 Dispatch to one side or the other
4167 by comparing the index value with this node's value.
4168 If one subtree is bounded, check that one first,
4169 so we can avoid real branches in the tree. */
4170
4171 if (node_is_bounded (node->right, index_type))
4172 {
4381f7c2 4173 emit_cmp_and_jump_insns (index,
69107307
AO
4174 convert_modes
4175 (mode, imode,
4176 expand_expr (node->high, NULL_RTX,
4177 VOIDmode, 0),
4178 unsignedp),
d43e0b7d 4179 GT, NULL_RTX, mode, unsignedp,
4381f7c2 4180 label_rtx (node->right->code_label));
28d81abb
RK
4181 emit_case_nodes (index, node->left, default_label, index_type);
4182 }
4183
4184 else if (node_is_bounded (node->left, index_type))
4185 {
4381f7c2 4186 emit_cmp_and_jump_insns (index,
69107307
AO
4187 convert_modes
4188 (mode, imode,
4189 expand_expr (node->high, NULL_RTX,
4190 VOIDmode, 0),
4191 unsignedp),
d43e0b7d 4192 LT, NULL_RTX, mode, unsignedp,
c5d5d461 4193 label_rtx (node->left->code_label));
28d81abb
RK
4194 emit_case_nodes (index, node->right, default_label, index_type);
4195 }
4196
43a21dfc
KH
4197 /* If both children are single-valued cases with no
4198 children, finish up all the work. This way, we can save
4199 one ordered comparison. */
4200 else if (tree_int_cst_equal (node->right->low, node->right->high)
4201 && node->right->left == 0
4202 && node->right->right == 0
4203 && tree_int_cst_equal (node->left->low, node->left->high)
4204 && node->left->left == 0
4205 && node->left->right == 0)
4206 {
4207 /* Neither node is bounded. First distinguish the two sides;
4208 then emit the code for one side at a time. */
4209
4210 /* See if the value matches what the right hand side
4211 wants. */
4212 do_jump_if_equal (index,
4213 convert_modes (mode, imode,
4214 expand_expr (node->right->low,
4215 NULL_RTX,
4216 VOIDmode, 0),
4217 unsignedp),
4218 label_rtx (node->right->code_label),
4219 unsignedp);
4220
4221 /* See if the value matches what the left hand side
4222 wants. */
4223 do_jump_if_equal (index,
4224 convert_modes (mode, imode,
4225 expand_expr (node->left->low,
4226 NULL_RTX,
4227 VOIDmode, 0),
4228 unsignedp),
4229 label_rtx (node->left->code_label),
4230 unsignedp);
4231 }
4232
28d81abb
RK
4233 else
4234 {
4235 /* Neither node is bounded. First distinguish the two sides;
4236 then emit the code for one side at a time. */
4237
4381f7c2 4238 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
28d81abb
RK
4239
4240 /* See if the value is on the right. */
4381f7c2 4241 emit_cmp_and_jump_insns (index,
69107307
AO
4242 convert_modes
4243 (mode, imode,
4244 expand_expr (node->high, NULL_RTX,
4245 VOIDmode, 0),
4246 unsignedp),
d43e0b7d 4247 GT, NULL_RTX, mode, unsignedp,
c5d5d461 4248 label_rtx (test_label));
28d81abb
RK
4249
4250 /* Value must be on the left.
4251 Handle the left-hand subtree. */
4252 emit_case_nodes (index, node->left, default_label, index_type);
4253 /* If left-hand subtree does nothing,
4254 go to default. */
4255 emit_jump_if_reachable (default_label);
4256
4257 /* Code branches here for the right-hand subtree. */
4258 expand_label (test_label);
4259 emit_case_nodes (index, node->right, default_label, index_type);
4260 }
4261 }
4262
4263 else if (node->right != 0 && node->left == 0)
4264 {
4265 /* Here we have a right child but no left so we issue conditional
4266 branch to default and process the right child.
4267
4268 Omit the conditional branch to default if we it avoid only one
4269 right child; it costs too much space to save so little time. */
4270
de14fd73 4271 if (node->right->right || node->right->left
28d81abb
RK
4272 || !tree_int_cst_equal (node->right->low, node->right->high))
4273 {
4274 if (!node_has_low_bound (node, index_type))
4275 {
4381f7c2 4276 emit_cmp_and_jump_insns (index,
69107307
AO
4277 convert_modes
4278 (mode, imode,
4279 expand_expr (node->high, NULL_RTX,
4280 VOIDmode, 0),
4281 unsignedp),
d43e0b7d 4282 LT, NULL_RTX, mode, unsignedp,
c5d5d461 4283 default_label);
28d81abb
RK
4284 }
4285
4286 emit_case_nodes (index, node->right, default_label, index_type);
4287 }
4288 else
4289 /* We cannot process node->right normally
4290 since we haven't ruled out the numbers less than
4291 this node's value. So handle node->right explicitly. */
4292 do_jump_if_equal (index,
69107307
AO
4293 convert_modes
4294 (mode, imode,
4295 expand_expr (node->right->low, NULL_RTX,
4296 VOIDmode, 0),
4297 unsignedp),
28d81abb
RK
4298 label_rtx (node->right->code_label), unsignedp);
4299 }
4300
4301 else if (node->right == 0 && node->left != 0)
4302 {
4303 /* Just one subtree, on the left. */
4381f7c2 4304 if (node->left->left || node->left->right
28d81abb
RK
4305 || !tree_int_cst_equal (node->left->low, node->left->high))
4306 {
4307 if (!node_has_high_bound (node, index_type))
4308 {
69107307
AO
4309 emit_cmp_and_jump_insns (index,
4310 convert_modes
4311 (mode, imode,
4312 expand_expr (node->high, NULL_RTX,
4313 VOIDmode, 0),
4314 unsignedp),
d43e0b7d 4315 GT, NULL_RTX, mode, unsignedp,
c5d5d461 4316 default_label);
28d81abb
RK
4317 }
4318
4319 emit_case_nodes (index, node->left, default_label, index_type);
4320 }
4321 else
4322 /* We cannot process node->left normally
4323 since we haven't ruled out the numbers less than
4324 this node's value. So handle node->left explicitly. */
4325 do_jump_if_equal (index,
69107307
AO
4326 convert_modes
4327 (mode, imode,
4328 expand_expr (node->left->low, NULL_RTX,
4329 VOIDmode, 0),
4330 unsignedp),
28d81abb
RK
4331 label_rtx (node->left->code_label), unsignedp);
4332 }
4333 }
4334 else
4335 {
4336 /* Node is a range. These cases are very similar to those for a single
4337 value, except that we do not start by testing whether this node
4338 is the one to branch to. */
4339
4340 if (node->right != 0 && node->left != 0)
4341 {
4342 /* Node has subtrees on both sides.
4343 If the right-hand subtree is bounded,
4344 test for it first, since we can go straight there.
4345 Otherwise, we need to make a branch in the control structure,
4346 then handle the two subtrees. */
4347 tree test_label = 0;
4348
28d81abb
RK
4349 if (node_is_bounded (node->right, index_type))
4350 /* Right hand node is fully bounded so we can eliminate any
4351 testing and branch directly to the target code. */
69107307
AO
4352 emit_cmp_and_jump_insns (index,
4353 convert_modes
4354 (mode, imode,
4355 expand_expr (node->high, NULL_RTX,
4356 VOIDmode, 0),
4357 unsignedp),
d43e0b7d 4358 GT, NULL_RTX, mode, unsignedp,
c5d5d461 4359 label_rtx (node->right->code_label));
28d81abb
RK
4360 else
4361 {
4362 /* Right hand node requires testing.
4363 Branch to a label where we will handle it later. */
4364
4365 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4381f7c2 4366 emit_cmp_and_jump_insns (index,
69107307
AO
4367 convert_modes
4368 (mode, imode,
4369 expand_expr (node->high, NULL_RTX,
4370 VOIDmode, 0),
4371 unsignedp),
d43e0b7d 4372 GT, NULL_RTX, mode, unsignedp,
c5d5d461 4373 label_rtx (test_label));
28d81abb
RK
4374 }
4375
4376 /* Value belongs to this node or to the left-hand subtree. */
4377
69107307
AO
4378 emit_cmp_and_jump_insns (index,
4379 convert_modes
4380 (mode, imode,
4381 expand_expr (node->low, NULL_RTX,
4382 VOIDmode, 0),
4383 unsignedp),
d43e0b7d 4384 GE, NULL_RTX, mode, unsignedp,
c5d5d461 4385 label_rtx (node->code_label));
28d81abb
RK
4386
4387 /* Handle the left-hand subtree. */
4388 emit_case_nodes (index, node->left, default_label, index_type);
4389
4390 /* If right node had to be handled later, do that now. */
4391
4392 if (test_label)
4393 {
4394 /* If the left-hand subtree fell through,
4395 don't let it fall into the right-hand subtree. */
4396 emit_jump_if_reachable (default_label);
4397
4398 expand_label (test_label);
4399 emit_case_nodes (index, node->right, default_label, index_type);
4400 }
4401 }
4402
4403 else if (node->right != 0 && node->left == 0)
4404 {
4405 /* Deal with values to the left of this node,
4406 if they are possible. */
4407 if (!node_has_low_bound (node, index_type))
4408 {
4381f7c2 4409 emit_cmp_and_jump_insns (index,
69107307
AO
4410 convert_modes
4411 (mode, imode,
4412 expand_expr (node->low, NULL_RTX,
4413 VOIDmode, 0),
4414 unsignedp),
d43e0b7d 4415 LT, NULL_RTX, mode, unsignedp,
c5d5d461 4416 default_label);
28d81abb
RK
4417 }
4418
4419 /* Value belongs to this node or to the right-hand subtree. */
4420
69107307
AO
4421 emit_cmp_and_jump_insns (index,
4422 convert_modes
4423 (mode, imode,
4424 expand_expr (node->high, NULL_RTX,
4425 VOIDmode, 0),
4426 unsignedp),
d43e0b7d 4427 LE, NULL_RTX, mode, unsignedp,
c5d5d461 4428 label_rtx (node->code_label));
28d81abb
RK
4429
4430 emit_case_nodes (index, node->right, default_label, index_type);
4431 }
4432
4433 else if (node->right == 0 && node->left != 0)
4434 {
4435 /* Deal with values to the right of this node,
4436 if they are possible. */
4437 if (!node_has_high_bound (node, index_type))
4438 {
4381f7c2 4439 emit_cmp_and_jump_insns (index,
69107307
AO
4440 convert_modes
4441 (mode, imode,
4442 expand_expr (node->high, NULL_RTX,
4443 VOIDmode, 0),
4444 unsignedp),
d43e0b7d 4445 GT, NULL_RTX, mode, unsignedp,
c5d5d461 4446 default_label);
28d81abb
RK
4447 }
4448
4449 /* Value belongs to this node or to the left-hand subtree. */
4450
4381f7c2 4451 emit_cmp_and_jump_insns (index,
69107307
AO
4452 convert_modes
4453 (mode, imode,
4454 expand_expr (node->low, NULL_RTX,
4455 VOIDmode, 0),
4456 unsignedp),
d43e0b7d 4457 GE, NULL_RTX, mode, unsignedp,
c5d5d461 4458 label_rtx (node->code_label));
28d81abb
RK
4459
4460 emit_case_nodes (index, node->left, default_label, index_type);
4461 }
4462
4463 else
4464 {
4465 /* Node has no children so we check low and high bounds to remove
4466 redundant tests. Only one of the bounds can exist,
4467 since otherwise this node is bounded--a case tested already. */
923cbdc3
JH
4468 int high_bound = node_has_high_bound (node, index_type);
4469 int low_bound = node_has_low_bound (node, index_type);
28d81abb 4470
923cbdc3 4471 if (!high_bound && low_bound)
28d81abb 4472 {
4381f7c2 4473 emit_cmp_and_jump_insns (index,
69107307
AO
4474 convert_modes
4475 (mode, imode,
4476 expand_expr (node->high, NULL_RTX,
4477 VOIDmode, 0),
4478 unsignedp),
d43e0b7d 4479 GT, NULL_RTX, mode, unsignedp,
c5d5d461 4480 default_label);
28d81abb
RK
4481 }
4482
923cbdc3 4483 else if (!low_bound && high_bound)
28d81abb 4484 {
4381f7c2 4485 emit_cmp_and_jump_insns (index,
69107307
AO
4486 convert_modes
4487 (mode, imode,
4488 expand_expr (node->low, NULL_RTX,
4489 VOIDmode, 0),
4490 unsignedp),
d43e0b7d 4491 LT, NULL_RTX, mode, unsignedp,
c5d5d461 4492 default_label);
28d81abb 4493 }
923cbdc3
JH
4494 else if (!low_bound && !high_bound)
4495 {
9312aecc 4496 /* Widen LOW and HIGH to the same width as INDEX. */
ae2bcd98 4497 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9312aecc
JDA
4498 tree low = build1 (CONVERT_EXPR, type, node->low);
4499 tree high = build1 (CONVERT_EXPR, type, node->high);
ef89d648 4500 rtx low_rtx, new_index, new_bound;
9312aecc
JDA
4501
4502 /* Instead of doing two branches, emit one unsigned branch for
4503 (index-low) > (high-low). */
ef89d648
ZW
4504 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
4505 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
4506 NULL_RTX, unsignedp,
4507 OPTAB_WIDEN);
9312aecc
JDA
4508 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
4509 high, low)),
4510 NULL_RTX, mode, 0);
786de7eb 4511
9312aecc 4512 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
d43e0b7d 4513 mode, 1, default_label);
923cbdc3 4514 }
28d81abb
RK
4515
4516 emit_jump (label_rtx (node->code_label));
4517 }
4518 }
4519}
e2500fed
GK
4520
4521#include "gt-stmt.h"