]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/stmt.c
cfgloop.c (get_loop_body_in_bfs_order): Avoid redundant call to bitmap_bit_p.
[thirdparty/gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31
32 #include "rtl.h"
33 #include "hard-reg-set.h"
34 #include "tree.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "except.h"
38 #include "function.h"
39 #include "insn-config.h"
40 #include "expr.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "machmode.h"
44 #include "diagnostic-core.h"
45 #include "toplev.h"
46 #include "output.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "predict.h"
50 #include "optabs.h"
51 #include "target.h"
52 #include "gimple.h"
53 #include "regs.h"
54 #include "alloc-pool.h"
55 #include "pretty-print.h"
56 #include "bitmap.h"
57
58 \f
59 /* Functions and data structures for expanding case statements. */
60
61 /* Case label structure, used to hold info on labels within case
62 statements. We handle "range" labels; for a single-value label
63 as in C, the high and low limits are the same.
64
65 We start with a vector of case nodes sorted in ascending order, and
66 the default label as the last element in the vector. Before expanding
67 to RTL, we transform this vector into a list linked via the RIGHT
68 fields in the case_node struct. Nodes with higher case values are
69 later in the list.
70
71 Switch statements can be output in three forms. A branch table is
72 used if there are more than a few labels and the labels are dense
73 within the range between the smallest and largest case value. If a
74 branch table is used, no further manipulations are done with the case
75 node chain.
76
77 The alternative to the use of a branch table is to generate a series
78 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
79 and PARENT fields to hold a binary tree. Initially the tree is
80 totally unbalanced, with everything on the right. We balance the tree
81 with nodes on the left having lower case values than the parent
82 and nodes on the right having higher values. We then output the tree
83 in order.
84
85 For very small, suitable switch statements, we can generate a series
86 of simple bit test and branches instead. */
87
88 struct case_node
89 {
90 struct case_node *left; /* Left son in binary tree */
91 struct case_node *right; /* Right son in binary tree; also node chain */
92 struct case_node *parent; /* Parent of node in binary tree */
93 tree low; /* Lowest index value for this label */
94 tree high; /* Highest index value for this label */
95 tree code_label; /* Label to jump to when node matches */
96 };
97
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
100
101 /* These are used by estimate_case_costs and balance_case_nodes. */
102
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
107
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
111 \f
112 static int n_occurrences (int, const char *);
113 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
114 static void expand_nl_goto_receiver (void);
115 static bool check_operand_nalternatives (tree, tree);
116 static bool check_unique_operand_names (tree, tree, tree);
117 static char *resolve_operand_name_1 (char *, tree, tree, tree);
118 static void expand_null_return_1 (void);
119 static void expand_value_return (rtx);
120 static int estimate_case_costs (case_node_ptr);
121 static bool lshift_cheap_p (void);
122 static int case_bit_test_cmp (const void *, const void *);
123 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
124 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
125 static int node_has_low_bound (case_node_ptr, tree);
126 static int node_has_high_bound (case_node_ptr, tree);
127 static int node_is_bounded (case_node_ptr, tree);
128 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
129 static struct case_node *add_case_node (struct case_node *, tree,
130 tree, tree, tree, alloc_pool);
131
132 \f
133 /* Return the rtx-label that corresponds to a LABEL_DECL,
134 creating it if necessary. */
135
136 rtx
137 label_rtx (tree label)
138 {
139 gcc_assert (TREE_CODE (label) == LABEL_DECL);
140
141 if (!DECL_RTL_SET_P (label))
142 {
143 rtx r = gen_label_rtx ();
144 SET_DECL_RTL (label, r);
145 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
146 LABEL_PRESERVE_P (r) = 1;
147 }
148
149 return DECL_RTL (label);
150 }
151
152 /* As above, but also put it on the forced-reference list of the
153 function that contains it. */
154 rtx
155 force_label_rtx (tree label)
156 {
157 rtx ref = label_rtx (label);
158 tree function = decl_function_context (label);
159
160 gcc_assert (function);
161
162 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
163 return ref;
164 }
165
166 /* Add an unconditional jump to LABEL as the next sequential instruction. */
167
168 void
169 emit_jump (rtx label)
170 {
171 do_pending_stack_adjust ();
172 emit_jump_insn (gen_jump (label));
173 emit_barrier ();
174 }
175
176 /* Emit code to jump to the address
177 specified by the pointer expression EXP. */
178
179 void
180 expand_computed_goto (tree exp)
181 {
182 rtx x = expand_normal (exp);
183
184 x = convert_memory_address (Pmode, x);
185
186 do_pending_stack_adjust ();
187 emit_indirect_jump (x);
188 }
189 \f
190 /* Handle goto statements and the labels that they can go to. */
191
192 /* Specify the location in the RTL code of a label LABEL,
193 which is a LABEL_DECL tree node.
194
195 This is used for the kind of label that the user can jump to with a
196 goto statement, and for alternatives of a switch or case statement.
197 RTL labels generated for loops and conditionals don't go through here;
198 they are generated directly at the RTL level, by other functions below.
199
200 Note that this has nothing to do with defining label *names*.
201 Languages vary in how they do that and what that even means. */
202
203 void
204 expand_label (tree label)
205 {
206 rtx label_r = label_rtx (label);
207
208 do_pending_stack_adjust ();
209 emit_label (label_r);
210 if (DECL_NAME (label))
211 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
212
213 if (DECL_NONLOCAL (label))
214 {
215 expand_nl_goto_receiver ();
216 nonlocal_goto_handler_labels
217 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
218 nonlocal_goto_handler_labels);
219 }
220
221 if (FORCED_LABEL (label))
222 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
223
224 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
225 maybe_set_first_label_num (label_r);
226 }
227
228 /* Generate RTL code for a `goto' statement with target label LABEL.
229 LABEL should be a LABEL_DECL tree node that was or will later be
230 defined with `expand_label'. */
231
232 void
233 expand_goto (tree label)
234 {
235 #ifdef ENABLE_CHECKING
236 /* Check for a nonlocal goto to a containing function. Should have
237 gotten translated to __builtin_nonlocal_goto. */
238 tree context = decl_function_context (label);
239 gcc_assert (!context || context == current_function_decl);
240 #endif
241
242 emit_jump (label_rtx (label));
243 }
244 \f
245 /* Return the number of times character C occurs in string S. */
246 static int
247 n_occurrences (int c, const char *s)
248 {
249 int n = 0;
250 while (*s)
251 n += (*s++ == c);
252 return n;
253 }
254 \f
255 /* Generate RTL for an asm statement (explicit assembler code).
256 STRING is a STRING_CST node containing the assembler code text,
257 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
258 insn is volatile; don't optimize it. */
259
260 static void
261 expand_asm_loc (tree string, int vol, location_t locus)
262 {
263 rtx body;
264
265 if (TREE_CODE (string) == ADDR_EXPR)
266 string = TREE_OPERAND (string, 0);
267
268 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
269 ggc_strdup (TREE_STRING_POINTER (string)),
270 locus);
271
272 MEM_VOLATILE_P (body) = vol;
273
274 emit_insn (body);
275 }
276
277 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
278 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
279 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
280 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
281 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
282 constraint allows the use of a register operand. And, *IS_INOUT
283 will be true if the operand is read-write, i.e., if it is used as
284 an input as well as an output. If *CONSTRAINT_P is not in
285 canonical form, it will be made canonical. (Note that `+' will be
286 replaced with `=' as part of this process.)
287
288 Returns TRUE if all went well; FALSE if an error occurred. */
289
290 bool
291 parse_output_constraint (const char **constraint_p, int operand_num,
292 int ninputs, int noutputs, bool *allows_mem,
293 bool *allows_reg, bool *is_inout)
294 {
295 const char *constraint = *constraint_p;
296 const char *p;
297
298 /* Assume the constraint doesn't allow the use of either a register
299 or memory. */
300 *allows_mem = false;
301 *allows_reg = false;
302
303 /* Allow the `=' or `+' to not be at the beginning of the string,
304 since it wasn't explicitly documented that way, and there is a
305 large body of code that puts it last. Swap the character to
306 the front, so as not to uglify any place else. */
307 p = strchr (constraint, '=');
308 if (!p)
309 p = strchr (constraint, '+');
310
311 /* If the string doesn't contain an `=', issue an error
312 message. */
313 if (!p)
314 {
315 error ("output operand constraint lacks %<=%>");
316 return false;
317 }
318
319 /* If the constraint begins with `+', then the operand is both read
320 from and written to. */
321 *is_inout = (*p == '+');
322
323 /* Canonicalize the output constraint so that it begins with `='. */
324 if (p != constraint || *is_inout)
325 {
326 char *buf;
327 size_t c_len = strlen (constraint);
328
329 if (p != constraint)
330 warning (0, "output constraint %qc for operand %d "
331 "is not at the beginning",
332 *p, operand_num);
333
334 /* Make a copy of the constraint. */
335 buf = XALLOCAVEC (char, c_len + 1);
336 strcpy (buf, constraint);
337 /* Swap the first character and the `=' or `+'. */
338 buf[p - constraint] = buf[0];
339 /* Make sure the first character is an `='. (Until we do this,
340 it might be a `+'.) */
341 buf[0] = '=';
342 /* Replace the constraint with the canonicalized string. */
343 *constraint_p = ggc_alloc_string (buf, c_len);
344 constraint = *constraint_p;
345 }
346
347 /* Loop through the constraint string. */
348 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
349 switch (*p)
350 {
351 case '+':
352 case '=':
353 error ("operand constraint contains incorrectly positioned "
354 "%<+%> or %<=%>");
355 return false;
356
357 case '%':
358 if (operand_num + 1 == ninputs + noutputs)
359 {
360 error ("%<%%%> constraint used with last operand");
361 return false;
362 }
363 break;
364
365 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
366 *allows_mem = true;
367 break;
368
369 case '?': case '!': case '*': case '&': case '#':
370 case 'E': case 'F': case 'G': case 'H':
371 case 's': case 'i': case 'n':
372 case 'I': case 'J': case 'K': case 'L': case 'M':
373 case 'N': case 'O': case 'P': case ',':
374 break;
375
376 case '0': case '1': case '2': case '3': case '4':
377 case '5': case '6': case '7': case '8': case '9':
378 case '[':
379 error ("matching constraint not valid in output operand");
380 return false;
381
382 case '<': case '>':
383 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
384 excepting those that expand_call created. So match memory
385 and hope. */
386 *allows_mem = true;
387 break;
388
389 case 'g': case 'X':
390 *allows_reg = true;
391 *allows_mem = true;
392 break;
393
394 case 'p': case 'r':
395 *allows_reg = true;
396 break;
397
398 default:
399 if (!ISALPHA (*p))
400 break;
401 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
402 *allows_reg = true;
403 #ifdef EXTRA_CONSTRAINT_STR
404 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
405 *allows_reg = true;
406 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
407 *allows_mem = true;
408 else
409 {
410 /* Otherwise we can't assume anything about the nature of
411 the constraint except that it isn't purely registers.
412 Treat it like "g" and hope for the best. */
413 *allows_reg = true;
414 *allows_mem = true;
415 }
416 #endif
417 break;
418 }
419
420 return true;
421 }
422
423 /* Similar, but for input constraints. */
424
425 bool
426 parse_input_constraint (const char **constraint_p, int input_num,
427 int ninputs, int noutputs, int ninout,
428 const char * const * constraints,
429 bool *allows_mem, bool *allows_reg)
430 {
431 const char *constraint = *constraint_p;
432 const char *orig_constraint = constraint;
433 size_t c_len = strlen (constraint);
434 size_t j;
435 bool saw_match = false;
436
437 /* Assume the constraint doesn't allow the use of either
438 a register or memory. */
439 *allows_mem = false;
440 *allows_reg = false;
441
442 /* Make sure constraint has neither `=', `+', nor '&'. */
443
444 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
445 switch (constraint[j])
446 {
447 case '+': case '=': case '&':
448 if (constraint == orig_constraint)
449 {
450 error ("input operand constraint contains %qc", constraint[j]);
451 return false;
452 }
453 break;
454
455 case '%':
456 if (constraint == orig_constraint
457 && input_num + 1 == ninputs - ninout)
458 {
459 error ("%<%%%> constraint used with last operand");
460 return false;
461 }
462 break;
463
464 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
465 *allows_mem = true;
466 break;
467
468 case '<': case '>':
469 case '?': case '!': case '*': case '#':
470 case 'E': case 'F': case 'G': case 'H':
471 case 's': case 'i': case 'n':
472 case 'I': case 'J': case 'K': case 'L': case 'M':
473 case 'N': case 'O': case 'P': case ',':
474 break;
475
476 /* Whether or not a numeric constraint allows a register is
477 decided by the matching constraint, and so there is no need
478 to do anything special with them. We must handle them in
479 the default case, so that we don't unnecessarily force
480 operands to memory. */
481 case '0': case '1': case '2': case '3': case '4':
482 case '5': case '6': case '7': case '8': case '9':
483 {
484 char *end;
485 unsigned long match;
486
487 saw_match = true;
488
489 match = strtoul (constraint + j, &end, 10);
490 if (match >= (unsigned long) noutputs)
491 {
492 error ("matching constraint references invalid operand number");
493 return false;
494 }
495
496 /* Try and find the real constraint for this dup. Only do this
497 if the matching constraint is the only alternative. */
498 if (*end == '\0'
499 && (j == 0 || (j == 1 && constraint[0] == '%')))
500 {
501 constraint = constraints[match];
502 *constraint_p = constraint;
503 c_len = strlen (constraint);
504 j = 0;
505 /* ??? At the end of the loop, we will skip the first part of
506 the matched constraint. This assumes not only that the
507 other constraint is an output constraint, but also that
508 the '=' or '+' come first. */
509 break;
510 }
511 else
512 j = end - constraint;
513 /* Anticipate increment at end of loop. */
514 j--;
515 }
516 /* Fall through. */
517
518 case 'p': case 'r':
519 *allows_reg = true;
520 break;
521
522 case 'g': case 'X':
523 *allows_reg = true;
524 *allows_mem = true;
525 break;
526
527 default:
528 if (! ISALPHA (constraint[j]))
529 {
530 error ("invalid punctuation %qc in constraint", constraint[j]);
531 return false;
532 }
533 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
534 != NO_REGS)
535 *allows_reg = true;
536 #ifdef EXTRA_CONSTRAINT_STR
537 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
538 *allows_reg = true;
539 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
540 *allows_mem = true;
541 else
542 {
543 /* Otherwise we can't assume anything about the nature of
544 the constraint except that it isn't purely registers.
545 Treat it like "g" and hope for the best. */
546 *allows_reg = true;
547 *allows_mem = true;
548 }
549 #endif
550 break;
551 }
552
553 if (saw_match && !*allows_reg)
554 warning (0, "matching constraint does not allow a register");
555
556 return true;
557 }
558
559 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
560 can be an asm-declared register. Called via walk_tree. */
561
562 static tree
563 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
564 void *data)
565 {
566 tree decl = *declp;
567 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
568
569 if (TREE_CODE (decl) == VAR_DECL)
570 {
571 if (DECL_HARD_REGISTER (decl)
572 && REG_P (DECL_RTL (decl))
573 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
574 {
575 rtx reg = DECL_RTL (decl);
576
577 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
578 return decl;
579 }
580 walk_subtrees = 0;
581 }
582 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
583 walk_subtrees = 0;
584 return NULL_TREE;
585 }
586
587 /* If there is an overlap between *REGS and DECL, return the first overlap
588 found. */
589 tree
590 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
591 {
592 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
593 }
594
595 /* Check for overlap between registers marked in CLOBBERED_REGS and
596 anything inappropriate in T. Emit error and return the register
597 variable definition for error, NULL_TREE for ok. */
598
599 static bool
600 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
601 {
602 /* Conflicts between asm-declared register variables and the clobber
603 list are not allowed. */
604 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
605
606 if (overlap)
607 {
608 error ("asm-specifier for variable %qE conflicts with asm clobber list",
609 DECL_NAME (overlap));
610
611 /* Reset registerness to stop multiple errors emitted for a single
612 variable. */
613 DECL_REGISTER (overlap) = 0;
614 return true;
615 }
616
617 return false;
618 }
619
620 /* Generate RTL for an asm statement with arguments.
621 STRING is the instruction template.
622 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
623 Each output or input has an expression in the TREE_VALUE and
624 a tree list in TREE_PURPOSE which in turn contains a constraint
625 name in TREE_VALUE (or NULL_TREE) and a constraint string
626 in TREE_PURPOSE.
627 CLOBBERS is a list of STRING_CST nodes each naming a hard register
628 that is clobbered by this insn.
629
630 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
631 Some elements of OUTPUTS may be replaced with trees representing temporary
632 values. The caller should copy those temporary values to the originally
633 specified lvalues.
634
635 VOL nonzero means the insn is volatile; don't optimize it. */
636
637 static void
638 expand_asm_operands (tree string, tree outputs, tree inputs,
639 tree clobbers, tree labels, int vol, location_t locus)
640 {
641 rtvec argvec, constraintvec, labelvec;
642 rtx body;
643 int ninputs = list_length (inputs);
644 int noutputs = list_length (outputs);
645 int nlabels = list_length (labels);
646 int ninout;
647 int nclobbers;
648 HARD_REG_SET clobbered_regs;
649 int clobber_conflict_found = 0;
650 tree tail;
651 tree t;
652 int i;
653 /* Vector of RTX's of evaluated output operands. */
654 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
655 int *inout_opnum = XALLOCAVEC (int, noutputs);
656 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
657 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
658 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
659 int old_generating_concat_p = generating_concat_p;
660
661 /* An ASM with no outputs needs to be treated as volatile, for now. */
662 if (noutputs == 0)
663 vol = 1;
664
665 if (! check_operand_nalternatives (outputs, inputs))
666 return;
667
668 string = resolve_asm_operand_names (string, outputs, inputs, labels);
669
670 /* Collect constraints. */
671 i = 0;
672 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
673 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
674 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
675 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
676
677 /* Sometimes we wish to automatically clobber registers across an asm.
678 Case in point is when the i386 backend moved from cc0 to a hard reg --
679 maintaining source-level compatibility means automatically clobbering
680 the flags register. */
681 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
682
683 /* Count the number of meaningful clobbered registers, ignoring what
684 we would ignore later. */
685 nclobbers = 0;
686 CLEAR_HARD_REG_SET (clobbered_regs);
687 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
688 {
689 const char *regname;
690
691 if (TREE_VALUE (tail) == error_mark_node)
692 return;
693 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
694
695 i = decode_reg_name (regname);
696 if (i >= 0 || i == -4)
697 ++nclobbers;
698 else if (i == -2)
699 error ("unknown register name %qs in %<asm%>", regname);
700
701 /* Mark clobbered registers. */
702 if (i >= 0)
703 {
704 /* Clobbering the PIC register is an error. */
705 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
706 {
707 error ("PIC register %qs clobbered in %<asm%>", regname);
708 return;
709 }
710
711 SET_HARD_REG_BIT (clobbered_regs, i);
712 }
713 }
714
715 /* First pass over inputs and outputs checks validity and sets
716 mark_addressable if needed. */
717
718 ninout = 0;
719 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
720 {
721 tree val = TREE_VALUE (tail);
722 tree type = TREE_TYPE (val);
723 const char *constraint;
724 bool is_inout;
725 bool allows_reg;
726 bool allows_mem;
727
728 /* If there's an erroneous arg, emit no insn. */
729 if (type == error_mark_node)
730 return;
731
732 /* Try to parse the output constraint. If that fails, there's
733 no point in going further. */
734 constraint = constraints[i];
735 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
736 &allows_mem, &allows_reg, &is_inout))
737 return;
738
739 if (! allows_reg
740 && (allows_mem
741 || is_inout
742 || (DECL_P (val)
743 && REG_P (DECL_RTL (val))
744 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
745 mark_addressable (val);
746
747 if (is_inout)
748 ninout++;
749 }
750
751 ninputs += ninout;
752 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
753 {
754 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
755 return;
756 }
757
758 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
759 {
760 bool allows_reg, allows_mem;
761 const char *constraint;
762
763 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
764 would get VOIDmode and that could cause a crash in reload. */
765 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
766 return;
767
768 constraint = constraints[i + noutputs];
769 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
770 constraints, &allows_mem, &allows_reg))
771 return;
772
773 if (! allows_reg && allows_mem)
774 mark_addressable (TREE_VALUE (tail));
775 }
776
777 /* Second pass evaluates arguments. */
778
779 ninout = 0;
780 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
781 {
782 tree val = TREE_VALUE (tail);
783 tree type = TREE_TYPE (val);
784 bool is_inout;
785 bool allows_reg;
786 bool allows_mem;
787 rtx op;
788 bool ok;
789
790 ok = parse_output_constraint (&constraints[i], i, ninputs,
791 noutputs, &allows_mem, &allows_reg,
792 &is_inout);
793 gcc_assert (ok);
794
795 /* If an output operand is not a decl or indirect ref and our constraint
796 allows a register, make a temporary to act as an intermediate.
797 Make the asm insn write into that, then our caller will copy it to
798 the real output operand. Likewise for promoted variables. */
799
800 generating_concat_p = 0;
801
802 real_output_rtx[i] = NULL_RTX;
803 if ((TREE_CODE (val) == INDIRECT_REF
804 && allows_mem)
805 || (DECL_P (val)
806 && (allows_mem || REG_P (DECL_RTL (val)))
807 && ! (REG_P (DECL_RTL (val))
808 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
809 || ! allows_reg
810 || is_inout)
811 {
812 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
813 if (MEM_P (op))
814 op = validize_mem (op);
815
816 if (! allows_reg && !MEM_P (op))
817 error ("output number %d not directly addressable", i);
818 if ((! allows_mem && MEM_P (op))
819 || GET_CODE (op) == CONCAT)
820 {
821 real_output_rtx[i] = op;
822 op = gen_reg_rtx (GET_MODE (op));
823 if (is_inout)
824 emit_move_insn (op, real_output_rtx[i]);
825 }
826 }
827 else
828 {
829 op = assign_temp (type, 0, 0, 1);
830 op = validize_mem (op);
831 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
832 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
833 TREE_VALUE (tail) = make_tree (type, op);
834 }
835 output_rtx[i] = op;
836
837 generating_concat_p = old_generating_concat_p;
838
839 if (is_inout)
840 {
841 inout_mode[ninout] = TYPE_MODE (type);
842 inout_opnum[ninout++] = i;
843 }
844
845 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
846 clobber_conflict_found = 1;
847 }
848
849 /* Make vectors for the expression-rtx, constraint strings,
850 and named operands. */
851
852 argvec = rtvec_alloc (ninputs);
853 constraintvec = rtvec_alloc (ninputs);
854 labelvec = rtvec_alloc (nlabels);
855
856 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
857 : GET_MODE (output_rtx[0])),
858 ggc_strdup (TREE_STRING_POINTER (string)),
859 empty_string, 0, argvec, constraintvec,
860 labelvec, locus);
861
862 MEM_VOLATILE_P (body) = vol;
863
864 /* Eval the inputs and put them into ARGVEC.
865 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
866
867 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
868 {
869 bool allows_reg, allows_mem;
870 const char *constraint;
871 tree val, type;
872 rtx op;
873 bool ok;
874
875 constraint = constraints[i + noutputs];
876 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
877 constraints, &allows_mem, &allows_reg);
878 gcc_assert (ok);
879
880 generating_concat_p = 0;
881
882 val = TREE_VALUE (tail);
883 type = TREE_TYPE (val);
884 /* EXPAND_INITIALIZER will not generate code for valid initializer
885 constants, but will still generate code for other types of operand.
886 This is the behavior we want for constant constraints. */
887 op = expand_expr (val, NULL_RTX, VOIDmode,
888 allows_reg ? EXPAND_NORMAL
889 : allows_mem ? EXPAND_MEMORY
890 : EXPAND_INITIALIZER);
891
892 /* Never pass a CONCAT to an ASM. */
893 if (GET_CODE (op) == CONCAT)
894 op = force_reg (GET_MODE (op), op);
895 else if (MEM_P (op))
896 op = validize_mem (op);
897
898 if (asm_operand_ok (op, constraint, NULL) <= 0)
899 {
900 if (allows_reg && TYPE_MODE (type) != BLKmode)
901 op = force_reg (TYPE_MODE (type), op);
902 else if (!allows_mem)
903 warning (0, "asm operand %d probably doesn%'t match constraints",
904 i + noutputs);
905 else if (MEM_P (op))
906 {
907 /* We won't recognize either volatile memory or memory
908 with a queued address as available a memory_operand
909 at this point. Ignore it: clearly this *is* a memory. */
910 }
911 else
912 {
913 warning (0, "use of memory input without lvalue in "
914 "asm operand %d is deprecated", i + noutputs);
915
916 if (CONSTANT_P (op))
917 {
918 rtx mem = force_const_mem (TYPE_MODE (type), op);
919 if (mem)
920 op = validize_mem (mem);
921 else
922 op = force_reg (TYPE_MODE (type), op);
923 }
924 if (REG_P (op)
925 || GET_CODE (op) == SUBREG
926 || GET_CODE (op) == CONCAT)
927 {
928 tree qual_type = build_qualified_type (type,
929 (TYPE_QUALS (type)
930 | TYPE_QUAL_CONST));
931 rtx memloc = assign_temp (qual_type, 1, 1, 1);
932 memloc = validize_mem (memloc);
933 emit_move_insn (memloc, op);
934 op = memloc;
935 }
936 }
937 }
938
939 generating_concat_p = old_generating_concat_p;
940 ASM_OPERANDS_INPUT (body, i) = op;
941
942 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
943 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
944 ggc_strdup (constraints[i + noutputs]));
945
946 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
947 clobber_conflict_found = 1;
948 }
949
950 /* Protect all the operands from the queue now that they have all been
951 evaluated. */
952
953 generating_concat_p = 0;
954
955 /* For in-out operands, copy output rtx to input rtx. */
956 for (i = 0; i < ninout; i++)
957 {
958 int j = inout_opnum[i];
959 char buffer[16];
960
961 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
962 = output_rtx[j];
963
964 sprintf (buffer, "%d", j);
965 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
966 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
967 }
968
969 /* Copy labels to the vector. */
970 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
971 ASM_OPERANDS_LABEL (body, i)
972 = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
973
974 generating_concat_p = old_generating_concat_p;
975
976 /* Now, for each output, construct an rtx
977 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
978 ARGVEC CONSTRAINTS OPNAMES))
979 If there is more than one, put them inside a PARALLEL. */
980
981 if (nlabels > 0 && nclobbers == 0)
982 {
983 gcc_assert (noutputs == 0);
984 emit_jump_insn (body);
985 }
986 else if (noutputs == 0 && nclobbers == 0)
987 {
988 /* No output operands: put in a raw ASM_OPERANDS rtx. */
989 emit_insn (body);
990 }
991 else if (noutputs == 1 && nclobbers == 0)
992 {
993 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
994 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
995 }
996 else
997 {
998 rtx obody = body;
999 int num = noutputs;
1000
1001 if (num == 0)
1002 num = 1;
1003
1004 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1005
1006 /* For each output operand, store a SET. */
1007 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1008 {
1009 XVECEXP (body, 0, i)
1010 = gen_rtx_SET (VOIDmode,
1011 output_rtx[i],
1012 gen_rtx_ASM_OPERANDS
1013 (GET_MODE (output_rtx[i]),
1014 ggc_strdup (TREE_STRING_POINTER (string)),
1015 ggc_strdup (constraints[i]),
1016 i, argvec, constraintvec, labelvec, locus));
1017
1018 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1019 }
1020
1021 /* If there are no outputs (but there are some clobbers)
1022 store the bare ASM_OPERANDS into the PARALLEL. */
1023
1024 if (i == 0)
1025 XVECEXP (body, 0, i++) = obody;
1026
1027 /* Store (clobber REG) for each clobbered register specified. */
1028
1029 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1030 {
1031 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1032 int j = decode_reg_name (regname);
1033 rtx clobbered_reg;
1034
1035 if (j < 0)
1036 {
1037 if (j == -3) /* `cc', which is not a register */
1038 continue;
1039
1040 if (j == -4) /* `memory', don't cache memory across asm */
1041 {
1042 XVECEXP (body, 0, i++)
1043 = gen_rtx_CLOBBER (VOIDmode,
1044 gen_rtx_MEM
1045 (BLKmode,
1046 gen_rtx_SCRATCH (VOIDmode)));
1047 continue;
1048 }
1049
1050 /* Ignore unknown register, error already signaled. */
1051 continue;
1052 }
1053
1054 /* Use QImode since that's guaranteed to clobber just one reg. */
1055 clobbered_reg = gen_rtx_REG (QImode, j);
1056
1057 /* Do sanity check for overlap between clobbers and respectively
1058 input and outputs that hasn't been handled. Such overlap
1059 should have been detected and reported above. */
1060 if (!clobber_conflict_found)
1061 {
1062 int opno;
1063
1064 /* We test the old body (obody) contents to avoid tripping
1065 over the under-construction body. */
1066 for (opno = 0; opno < noutputs; opno++)
1067 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1068 internal_error ("asm clobber conflict with output operand");
1069
1070 for (opno = 0; opno < ninputs - ninout; opno++)
1071 if (reg_overlap_mentioned_p (clobbered_reg,
1072 ASM_OPERANDS_INPUT (obody, opno)))
1073 internal_error ("asm clobber conflict with input operand");
1074 }
1075
1076 XVECEXP (body, 0, i++)
1077 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1078 }
1079
1080 if (nlabels > 0)
1081 emit_jump_insn (body);
1082 else
1083 emit_insn (body);
1084 }
1085
1086 /* For any outputs that needed reloading into registers, spill them
1087 back to where they belong. */
1088 for (i = 0; i < noutputs; ++i)
1089 if (real_output_rtx[i])
1090 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1091
1092 crtl->has_asm_statement = 1;
1093 free_temp_slots ();
1094 }
1095
1096 void
1097 expand_asm_stmt (gimple stmt)
1098 {
1099 int noutputs;
1100 tree outputs, tail, t;
1101 tree *o;
1102 size_t i, n;
1103 const char *s;
1104 tree str, out, in, cl, labels;
1105 location_t locus = gimple_location (stmt);
1106
1107 /* Meh... convert the gimple asm operands into real tree lists.
1108 Eventually we should make all routines work on the vectors instead
1109 of relying on TREE_CHAIN. */
1110 out = NULL_TREE;
1111 n = gimple_asm_noutputs (stmt);
1112 if (n > 0)
1113 {
1114 t = out = gimple_asm_output_op (stmt, 0);
1115 for (i = 1; i < n; i++)
1116 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1117 }
1118
1119 in = NULL_TREE;
1120 n = gimple_asm_ninputs (stmt);
1121 if (n > 0)
1122 {
1123 t = in = gimple_asm_input_op (stmt, 0);
1124 for (i = 1; i < n; i++)
1125 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1126 }
1127
1128 cl = NULL_TREE;
1129 n = gimple_asm_nclobbers (stmt);
1130 if (n > 0)
1131 {
1132 t = cl = gimple_asm_clobber_op (stmt, 0);
1133 for (i = 1; i < n; i++)
1134 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1135 }
1136
1137 labels = NULL_TREE;
1138 n = gimple_asm_nlabels (stmt);
1139 if (n > 0)
1140 {
1141 t = labels = gimple_asm_label_op (stmt, 0);
1142 for (i = 1; i < n; i++)
1143 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1144 }
1145
1146 s = gimple_asm_string (stmt);
1147 str = build_string (strlen (s), s);
1148
1149 if (gimple_asm_input_p (stmt))
1150 {
1151 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1152 return;
1153 }
1154
1155 outputs = out;
1156 noutputs = gimple_asm_noutputs (stmt);
1157 /* o[I] is the place that output number I should be written. */
1158 o = (tree *) alloca (noutputs * sizeof (tree));
1159
1160 /* Record the contents of OUTPUTS before it is modified. */
1161 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1162 o[i] = TREE_VALUE (tail);
1163
1164 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1165 OUTPUTS some trees for where the values were actually stored. */
1166 expand_asm_operands (str, outputs, in, cl, labels,
1167 gimple_asm_volatile_p (stmt), locus);
1168
1169 /* Copy all the intermediate outputs into the specified outputs. */
1170 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1171 {
1172 if (o[i] != TREE_VALUE (tail))
1173 {
1174 expand_assignment (o[i], TREE_VALUE (tail), false);
1175 free_temp_slots ();
1176
1177 /* Restore the original value so that it's correct the next
1178 time we expand this function. */
1179 TREE_VALUE (tail) = o[i];
1180 }
1181 }
1182 }
1183
1184 /* A subroutine of expand_asm_operands. Check that all operands have
1185 the same number of alternatives. Return true if so. */
1186
1187 static bool
1188 check_operand_nalternatives (tree outputs, tree inputs)
1189 {
1190 if (outputs || inputs)
1191 {
1192 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1193 int nalternatives
1194 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1195 tree next = inputs;
1196
1197 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1198 {
1199 error ("too many alternatives in %<asm%>");
1200 return false;
1201 }
1202
1203 tmp = outputs;
1204 while (tmp)
1205 {
1206 const char *constraint
1207 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1208
1209 if (n_occurrences (',', constraint) != nalternatives)
1210 {
1211 error ("operand constraints for %<asm%> differ "
1212 "in number of alternatives");
1213 return false;
1214 }
1215
1216 if (TREE_CHAIN (tmp))
1217 tmp = TREE_CHAIN (tmp);
1218 else
1219 tmp = next, next = 0;
1220 }
1221 }
1222
1223 return true;
1224 }
1225
1226 /* A subroutine of expand_asm_operands. Check that all operand names
1227 are unique. Return true if so. We rely on the fact that these names
1228 are identifiers, and so have been canonicalized by get_identifier,
1229 so all we need are pointer comparisons. */
1230
1231 static bool
1232 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1233 {
1234 tree i, j;
1235
1236 for (i = outputs; i ; i = TREE_CHAIN (i))
1237 {
1238 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1239 if (! i_name)
1240 continue;
1241
1242 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1243 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1244 goto failure;
1245 }
1246
1247 for (i = inputs; i ; i = TREE_CHAIN (i))
1248 {
1249 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1250 if (! i_name)
1251 continue;
1252
1253 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1254 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1255 goto failure;
1256 for (j = outputs; j ; j = TREE_CHAIN (j))
1257 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1258 goto failure;
1259 }
1260
1261 for (i = labels; i ; i = TREE_CHAIN (i))
1262 {
1263 tree i_name = TREE_PURPOSE (i);
1264 if (! i_name)
1265 continue;
1266
1267 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1268 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1269 goto failure;
1270 for (j = inputs; j ; j = TREE_CHAIN (j))
1271 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1272 goto failure;
1273 }
1274
1275 return true;
1276
1277 failure:
1278 error ("duplicate asm operand name %qs",
1279 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1280 return false;
1281 }
1282
1283 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1284 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1285 STRING and in the constraints to those numbers. */
1286
1287 tree
1288 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1289 {
1290 char *buffer;
1291 char *p;
1292 const char *c;
1293 tree t;
1294
1295 check_unique_operand_names (outputs, inputs, labels);
1296
1297 /* Substitute [<name>] in input constraint strings. There should be no
1298 named operands in output constraints. */
1299 for (t = inputs; t ; t = TREE_CHAIN (t))
1300 {
1301 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1302 if (strchr (c, '[') != NULL)
1303 {
1304 p = buffer = xstrdup (c);
1305 while ((p = strchr (p, '[')) != NULL)
1306 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1307 TREE_VALUE (TREE_PURPOSE (t))
1308 = build_string (strlen (buffer), buffer);
1309 free (buffer);
1310 }
1311 }
1312
1313 /* Now check for any needed substitutions in the template. */
1314 c = TREE_STRING_POINTER (string);
1315 while ((c = strchr (c, '%')) != NULL)
1316 {
1317 if (c[1] == '[')
1318 break;
1319 else if (ISALPHA (c[1]) && c[2] == '[')
1320 break;
1321 else
1322 {
1323 c += 1 + (c[1] == '%');
1324 continue;
1325 }
1326 }
1327
1328 if (c)
1329 {
1330 /* OK, we need to make a copy so we can perform the substitutions.
1331 Assume that we will not need extra space--we get to remove '['
1332 and ']', which means we cannot have a problem until we have more
1333 than 999 operands. */
1334 buffer = xstrdup (TREE_STRING_POINTER (string));
1335 p = buffer + (c - TREE_STRING_POINTER (string));
1336
1337 while ((p = strchr (p, '%')) != NULL)
1338 {
1339 if (p[1] == '[')
1340 p += 1;
1341 else if (ISALPHA (p[1]) && p[2] == '[')
1342 p += 2;
1343 else
1344 {
1345 p += 1 + (p[1] == '%');
1346 continue;
1347 }
1348
1349 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1350 }
1351
1352 string = build_string (strlen (buffer), buffer);
1353 free (buffer);
1354 }
1355
1356 return string;
1357 }
1358
1359 /* A subroutine of resolve_operand_names. P points to the '[' for a
1360 potential named operand of the form [<name>]. In place, replace
1361 the name and brackets with a number. Return a pointer to the
1362 balance of the string after substitution. */
1363
1364 static char *
1365 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1366 {
1367 char *q;
1368 int op;
1369 tree t;
1370
1371 /* Collect the operand name. */
1372 q = strchr (++p, ']');
1373 if (!q)
1374 {
1375 error ("missing close brace for named operand");
1376 return strchr (p, '\0');
1377 }
1378 *q = '\0';
1379
1380 /* Resolve the name to a number. */
1381 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1382 {
1383 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1384 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1385 goto found;
1386 }
1387 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1388 {
1389 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1390 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1391 goto found;
1392 }
1393 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1394 {
1395 tree name = TREE_PURPOSE (t);
1396 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1397 goto found;
1398 }
1399
1400 error ("undefined named operand %qs", identifier_to_locale (p));
1401 op = 0;
1402
1403 found:
1404 /* Replace the name with the number. Unfortunately, not all libraries
1405 get the return value of sprintf correct, so search for the end of the
1406 generated string by hand. */
1407 sprintf (--p, "%d", op);
1408 p = strchr (p, '\0');
1409
1410 /* Verify the no extra buffer space assumption. */
1411 gcc_assert (p <= q);
1412
1413 /* Shift the rest of the buffer down to fill the gap. */
1414 memmove (p, q + 1, strlen (q + 1) + 1);
1415
1416 return p;
1417 }
1418 \f
1419 /* Generate RTL to evaluate the expression EXP. */
1420
1421 void
1422 expand_expr_stmt (tree exp)
1423 {
1424 rtx value;
1425 tree type;
1426
1427 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1428 type = TREE_TYPE (exp);
1429
1430 /* If all we do is reference a volatile value in memory,
1431 copy it to a register to be sure it is actually touched. */
1432 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1433 {
1434 if (TYPE_MODE (type) == VOIDmode)
1435 ;
1436 else if (TYPE_MODE (type) != BLKmode)
1437 value = copy_to_reg (value);
1438 else
1439 {
1440 rtx lab = gen_label_rtx ();
1441
1442 /* Compare the value with itself to reference it. */
1443 emit_cmp_and_jump_insns (value, value, EQ,
1444 expand_normal (TYPE_SIZE (type)),
1445 BLKmode, 0, lab);
1446 emit_label (lab);
1447 }
1448 }
1449
1450 /* Free any temporaries used to evaluate this expression. */
1451 free_temp_slots ();
1452 }
1453
1454 /* Warn if EXP contains any computations whose results are not used.
1455 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1456 (potential) location of the expression. */
1457
1458 int
1459 warn_if_unused_value (const_tree exp, location_t locus)
1460 {
1461 restart:
1462 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1463 return 0;
1464
1465 /* Don't warn about void constructs. This includes casting to void,
1466 void function calls, and statement expressions with a final cast
1467 to void. */
1468 if (VOID_TYPE_P (TREE_TYPE (exp)))
1469 return 0;
1470
1471 if (EXPR_HAS_LOCATION (exp))
1472 locus = EXPR_LOCATION (exp);
1473
1474 switch (TREE_CODE (exp))
1475 {
1476 case PREINCREMENT_EXPR:
1477 case POSTINCREMENT_EXPR:
1478 case PREDECREMENT_EXPR:
1479 case POSTDECREMENT_EXPR:
1480 case MODIFY_EXPR:
1481 case INIT_EXPR:
1482 case TARGET_EXPR:
1483 case CALL_EXPR:
1484 case TRY_CATCH_EXPR:
1485 case WITH_CLEANUP_EXPR:
1486 case EXIT_EXPR:
1487 case VA_ARG_EXPR:
1488 return 0;
1489
1490 case BIND_EXPR:
1491 /* For a binding, warn if no side effect within it. */
1492 exp = BIND_EXPR_BODY (exp);
1493 goto restart;
1494
1495 case SAVE_EXPR:
1496 case NON_LVALUE_EXPR:
1497 exp = TREE_OPERAND (exp, 0);
1498 goto restart;
1499
1500 case TRUTH_ORIF_EXPR:
1501 case TRUTH_ANDIF_EXPR:
1502 /* In && or ||, warn if 2nd operand has no side effect. */
1503 exp = TREE_OPERAND (exp, 1);
1504 goto restart;
1505
1506 case COMPOUND_EXPR:
1507 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1508 return 1;
1509 /* Let people do `(foo (), 0)' without a warning. */
1510 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1511 return 0;
1512 exp = TREE_OPERAND (exp, 1);
1513 goto restart;
1514
1515 case COND_EXPR:
1516 /* If this is an expression with side effects, don't warn; this
1517 case commonly appears in macro expansions. */
1518 if (TREE_SIDE_EFFECTS (exp))
1519 return 0;
1520 goto warn;
1521
1522 case INDIRECT_REF:
1523 /* Don't warn about automatic dereferencing of references, since
1524 the user cannot control it. */
1525 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1526 {
1527 exp = TREE_OPERAND (exp, 0);
1528 goto restart;
1529 }
1530 /* Fall through. */
1531
1532 default:
1533 /* Referencing a volatile value is a side effect, so don't warn. */
1534 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1535 && TREE_THIS_VOLATILE (exp))
1536 return 0;
1537
1538 /* If this is an expression which has no operands, there is no value
1539 to be unused. There are no such language-independent codes,
1540 but front ends may define such. */
1541 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1542 return 0;
1543
1544 warn:
1545 warning_at (locus, OPT_Wunused_value, "value computed is not used");
1546 return 1;
1547 }
1548 }
1549
1550 \f
1551 /* Generate RTL to return from the current function, with no value.
1552 (That is, we do not do anything about returning any value.) */
1553
1554 void
1555 expand_null_return (void)
1556 {
1557 /* If this function was declared to return a value, but we
1558 didn't, clobber the return registers so that they are not
1559 propagated live to the rest of the function. */
1560 clobber_return_register ();
1561
1562 expand_null_return_1 ();
1563 }
1564
1565 /* Generate RTL to return directly from the current function.
1566 (That is, we bypass any return value.) */
1567
1568 void
1569 expand_naked_return (void)
1570 {
1571 rtx end_label;
1572
1573 clear_pending_stack_adjust ();
1574 do_pending_stack_adjust ();
1575
1576 end_label = naked_return_label;
1577 if (end_label == 0)
1578 end_label = naked_return_label = gen_label_rtx ();
1579
1580 emit_jump (end_label);
1581 }
1582
1583 /* Generate RTL to return from the current function, with value VAL. */
1584
1585 static void
1586 expand_value_return (rtx val)
1587 {
1588 /* Copy the value to the return location unless it's already there. */
1589
1590 tree decl = DECL_RESULT (current_function_decl);
1591 rtx return_reg = DECL_RTL (decl);
1592 if (return_reg != val)
1593 {
1594 tree funtype = TREE_TYPE (current_function_decl);
1595 tree type = TREE_TYPE (decl);
1596 int unsignedp = TYPE_UNSIGNED (type);
1597 enum machine_mode old_mode = DECL_MODE (decl);
1598 enum machine_mode mode;
1599 if (DECL_BY_REFERENCE (decl))
1600 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1601 else
1602 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1603
1604 if (mode != old_mode)
1605 val = convert_modes (mode, old_mode, val, unsignedp);
1606
1607 if (GET_CODE (return_reg) == PARALLEL)
1608 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1609 else
1610 emit_move_insn (return_reg, val);
1611 }
1612
1613 expand_null_return_1 ();
1614 }
1615
1616 /* Output a return with no value. */
1617
1618 static void
1619 expand_null_return_1 (void)
1620 {
1621 clear_pending_stack_adjust ();
1622 do_pending_stack_adjust ();
1623 emit_jump (return_label);
1624 }
1625 \f
1626 /* Generate RTL to evaluate the expression RETVAL and return it
1627 from the current function. */
1628
1629 void
1630 expand_return (tree retval)
1631 {
1632 rtx result_rtl;
1633 rtx val = 0;
1634 tree retval_rhs;
1635
1636 /* If function wants no value, give it none. */
1637 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1638 {
1639 expand_normal (retval);
1640 expand_null_return ();
1641 return;
1642 }
1643
1644 if (retval == error_mark_node)
1645 {
1646 /* Treat this like a return of no value from a function that
1647 returns a value. */
1648 expand_null_return ();
1649 return;
1650 }
1651 else if ((TREE_CODE (retval) == MODIFY_EXPR
1652 || TREE_CODE (retval) == INIT_EXPR)
1653 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1654 retval_rhs = TREE_OPERAND (retval, 1);
1655 else
1656 retval_rhs = retval;
1657
1658 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1659
1660 /* If we are returning the RESULT_DECL, then the value has already
1661 been stored into it, so we don't have to do anything special. */
1662 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1663 expand_value_return (result_rtl);
1664
1665 /* If the result is an aggregate that is being returned in one (or more)
1666 registers, load the registers here. The compiler currently can't handle
1667 copying a BLKmode value into registers. We could put this code in a
1668 more general area (for use by everyone instead of just function
1669 call/return), but until this feature is generally usable it is kept here
1670 (and in expand_call). */
1671
1672 else if (retval_rhs != 0
1673 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1674 && REG_P (result_rtl))
1675 {
1676 int i;
1677 unsigned HOST_WIDE_INT bitpos, xbitpos;
1678 unsigned HOST_WIDE_INT padding_correction = 0;
1679 unsigned HOST_WIDE_INT bytes
1680 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1681 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1682 unsigned int bitsize
1683 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1684 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1685 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1686 rtx result_val = expand_normal (retval_rhs);
1687 enum machine_mode tmpmode, result_reg_mode;
1688
1689 if (bytes == 0)
1690 {
1691 expand_null_return ();
1692 return;
1693 }
1694
1695 /* If the structure doesn't take up a whole number of words, see
1696 whether the register value should be padded on the left or on
1697 the right. Set PADDING_CORRECTION to the number of padding
1698 bits needed on the left side.
1699
1700 In most ABIs, the structure will be returned at the least end of
1701 the register, which translates to right padding on little-endian
1702 targets and left padding on big-endian targets. The opposite
1703 holds if the structure is returned at the most significant
1704 end of the register. */
1705 if (bytes % UNITS_PER_WORD != 0
1706 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1707 ? !BYTES_BIG_ENDIAN
1708 : BYTES_BIG_ENDIAN))
1709 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1710 * BITS_PER_UNIT));
1711
1712 /* Copy the structure BITSIZE bits at a time. */
1713 for (bitpos = 0, xbitpos = padding_correction;
1714 bitpos < bytes * BITS_PER_UNIT;
1715 bitpos += bitsize, xbitpos += bitsize)
1716 {
1717 /* We need a new destination pseudo each time xbitpos is
1718 on a word boundary and when xbitpos == padding_correction
1719 (the first time through). */
1720 if (xbitpos % BITS_PER_WORD == 0
1721 || xbitpos == padding_correction)
1722 {
1723 /* Generate an appropriate register. */
1724 dst = gen_reg_rtx (word_mode);
1725 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1726
1727 /* Clear the destination before we move anything into it. */
1728 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1729 }
1730
1731 /* We need a new source operand each time bitpos is on a word
1732 boundary. */
1733 if (bitpos % BITS_PER_WORD == 0)
1734 src = operand_subword_force (result_val,
1735 bitpos / BITS_PER_WORD,
1736 BLKmode);
1737
1738 /* Use bitpos for the source extraction (left justified) and
1739 xbitpos for the destination store (right justified). */
1740 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1741 extract_bit_field (src, bitsize,
1742 bitpos % BITS_PER_WORD, 1,
1743 NULL_RTX, word_mode, word_mode));
1744 }
1745
1746 tmpmode = GET_MODE (result_rtl);
1747 if (tmpmode == BLKmode)
1748 {
1749 /* Find the smallest integer mode large enough to hold the
1750 entire structure and use that mode instead of BLKmode
1751 on the USE insn for the return register. */
1752 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1753 tmpmode != VOIDmode;
1754 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1755 /* Have we found a large enough mode? */
1756 if (GET_MODE_SIZE (tmpmode) >= bytes)
1757 break;
1758
1759 /* A suitable mode should have been found. */
1760 gcc_assert (tmpmode != VOIDmode);
1761
1762 PUT_MODE (result_rtl, tmpmode);
1763 }
1764
1765 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1766 result_reg_mode = word_mode;
1767 else
1768 result_reg_mode = tmpmode;
1769 result_reg = gen_reg_rtx (result_reg_mode);
1770
1771 for (i = 0; i < n_regs; i++)
1772 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1773 result_pseudos[i]);
1774
1775 if (tmpmode != result_reg_mode)
1776 result_reg = gen_lowpart (tmpmode, result_reg);
1777
1778 expand_value_return (result_reg);
1779 }
1780 else if (retval_rhs != 0
1781 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1782 && (REG_P (result_rtl)
1783 || (GET_CODE (result_rtl) == PARALLEL)))
1784 {
1785 /* Calculate the return value into a temporary (usually a pseudo
1786 reg). */
1787 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1788 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1789
1790 val = assign_temp (nt, 0, 0, 1);
1791 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1792 val = force_not_mem (val);
1793 /* Return the calculated value. */
1794 expand_value_return (val);
1795 }
1796 else
1797 {
1798 /* No hard reg used; calculate value into hard return reg. */
1799 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1800 expand_value_return (result_rtl);
1801 }
1802 }
1803 \f
1804 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1805 handler. */
1806 static void
1807 expand_nl_goto_receiver (void)
1808 {
1809 rtx chain;
1810
1811 /* Clobber the FP when we get here, so we have to make sure it's
1812 marked as used by this function. */
1813 emit_use (hard_frame_pointer_rtx);
1814
1815 /* Mark the static chain as clobbered here so life information
1816 doesn't get messed up for it. */
1817 chain = targetm.calls.static_chain (current_function_decl, true);
1818 if (chain && REG_P (chain))
1819 emit_clobber (chain);
1820
1821 #ifdef HAVE_nonlocal_goto
1822 if (! HAVE_nonlocal_goto)
1823 #endif
1824 /* First adjust our frame pointer to its actual value. It was
1825 previously set to the start of the virtual area corresponding to
1826 the stacked variables when we branched here and now needs to be
1827 adjusted to the actual hardware fp value.
1828
1829 Assignments are to virtual registers are converted by
1830 instantiate_virtual_regs into the corresponding assignment
1831 to the underlying register (fp in this case) that makes
1832 the original assignment true.
1833 So the following insn will actually be
1834 decrementing fp by STARTING_FRAME_OFFSET. */
1835 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1836
1837 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1838 if (fixed_regs[ARG_POINTER_REGNUM])
1839 {
1840 #ifdef ELIMINABLE_REGS
1841 /* If the argument pointer can be eliminated in favor of the
1842 frame pointer, we don't need to restore it. We assume here
1843 that if such an elimination is present, it can always be used.
1844 This is the case on all known machines; if we don't make this
1845 assumption, we do unnecessary saving on many machines. */
1846 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1847 size_t i;
1848
1849 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1850 if (elim_regs[i].from == ARG_POINTER_REGNUM
1851 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1852 break;
1853
1854 if (i == ARRAY_SIZE (elim_regs))
1855 #endif
1856 {
1857 /* Now restore our arg pointer from the address at which it
1858 was saved in our stack frame. */
1859 emit_move_insn (crtl->args.internal_arg_pointer,
1860 copy_to_reg (get_arg_pointer_save_area ()));
1861 }
1862 }
1863 #endif
1864
1865 #ifdef HAVE_nonlocal_goto_receiver
1866 if (HAVE_nonlocal_goto_receiver)
1867 emit_insn (gen_nonlocal_goto_receiver ());
1868 #endif
1869
1870 /* We must not allow the code we just generated to be reordered by
1871 scheduling. Specifically, the update of the frame pointer must
1872 happen immediately, not later. */
1873 emit_insn (gen_blockage ());
1874 }
1875 \f
1876 /* Generate RTL for the automatic variable declaration DECL.
1877 (Other kinds of declarations are simply ignored if seen here.) */
1878
1879 void
1880 expand_decl (tree decl)
1881 {
1882 tree type;
1883
1884 type = TREE_TYPE (decl);
1885
1886 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1887 type in case this node is used in a reference. */
1888 if (TREE_CODE (decl) == CONST_DECL)
1889 {
1890 DECL_MODE (decl) = TYPE_MODE (type);
1891 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1892 DECL_SIZE (decl) = TYPE_SIZE (type);
1893 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1894 return;
1895 }
1896
1897 /* Otherwise, only automatic variables need any expansion done. Static and
1898 external variables, and external functions, will be handled by
1899 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1900 nothing. PARM_DECLs are handled in `assign_parms'. */
1901 if (TREE_CODE (decl) != VAR_DECL)
1902 return;
1903
1904 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1905 return;
1906
1907 /* Create the RTL representation for the variable. */
1908
1909 if (type == error_mark_node)
1910 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1911
1912 else if (DECL_SIZE (decl) == 0)
1913 {
1914 /* Variable with incomplete type. */
1915 rtx x;
1916 if (DECL_INITIAL (decl) == 0)
1917 /* Error message was already done; now avoid a crash. */
1918 x = gen_rtx_MEM (BLKmode, const0_rtx);
1919 else
1920 /* An initializer is going to decide the size of this array.
1921 Until we know the size, represent its address with a reg. */
1922 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1923
1924 set_mem_attributes (x, decl, 1);
1925 SET_DECL_RTL (decl, x);
1926 }
1927 else if (use_register_for_decl (decl))
1928 {
1929 /* Automatic variable that can go in a register. */
1930 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1931
1932 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1933
1934 /* Note if the object is a user variable. */
1935 if (!DECL_ARTIFICIAL (decl))
1936 mark_user_reg (DECL_RTL (decl));
1937
1938 if (POINTER_TYPE_P (type))
1939 mark_reg_pointer (DECL_RTL (decl),
1940 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1941 }
1942
1943 else
1944 {
1945 rtx oldaddr = 0;
1946 rtx addr;
1947 rtx x;
1948
1949 /* Variable-sized decls are dealt with in the gimplifier. */
1950 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1951
1952 /* If we previously made RTL for this decl, it must be an array
1953 whose size was determined by the initializer.
1954 The old address was a register; set that register now
1955 to the proper address. */
1956 if (DECL_RTL_SET_P (decl))
1957 {
1958 gcc_assert (MEM_P (DECL_RTL (decl)));
1959 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1960 oldaddr = XEXP (DECL_RTL (decl), 0);
1961 }
1962
1963 /* Set alignment we actually gave this decl. */
1964 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1965 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1966 DECL_USER_ALIGN (decl) = 0;
1967
1968 x = assign_temp (decl, 1, 1, 1);
1969 set_mem_attributes (x, decl, 1);
1970 SET_DECL_RTL (decl, x);
1971
1972 if (oldaddr)
1973 {
1974 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1975 if (addr != oldaddr)
1976 emit_move_insn (oldaddr, addr);
1977 }
1978 }
1979 }
1980 \f
1981 /* Emit code to save the current value of stack. */
1982 rtx
1983 expand_stack_save (void)
1984 {
1985 rtx ret = NULL_RTX;
1986
1987 do_pending_stack_adjust ();
1988 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1989 return ret;
1990 }
1991
1992 /* Emit code to restore the current value of stack. */
1993 void
1994 expand_stack_restore (tree var)
1995 {
1996 rtx sa = expand_normal (var);
1997
1998 sa = convert_memory_address (Pmode, sa);
1999 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2000 }
2001 \f
2002 /* Do the insertion of a case label into case_list. The labels are
2003 fed to us in descending order from the sorted vector of case labels used
2004 in the tree part of the middle end. So the list we construct is
2005 sorted in ascending order. The bounds on the case range, LOW and HIGH,
2006 are converted to case's index type TYPE. */
2007
2008 static struct case_node *
2009 add_case_node (struct case_node *head, tree type, tree low, tree high,
2010 tree label, alloc_pool case_node_pool)
2011 {
2012 tree min_value, max_value;
2013 struct case_node *r;
2014
2015 gcc_assert (TREE_CODE (low) == INTEGER_CST);
2016 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2017
2018 min_value = TYPE_MIN_VALUE (type);
2019 max_value = TYPE_MAX_VALUE (type);
2020
2021 /* If there's no HIGH value, then this is not a case range; it's
2022 just a simple case label. But that's just a degenerate case
2023 range.
2024 If the bounds are equal, turn this into the one-value case. */
2025 if (!high || tree_int_cst_equal (low, high))
2026 {
2027 /* If the simple case value is unreachable, ignore it. */
2028 if ((TREE_CODE (min_value) == INTEGER_CST
2029 && tree_int_cst_compare (low, min_value) < 0)
2030 || (TREE_CODE (max_value) == INTEGER_CST
2031 && tree_int_cst_compare (low, max_value) > 0))
2032 return head;
2033 low = fold_convert (type, low);
2034 high = low;
2035 }
2036 else
2037 {
2038 /* If the entire case range is unreachable, ignore it. */
2039 if ((TREE_CODE (min_value) == INTEGER_CST
2040 && tree_int_cst_compare (high, min_value) < 0)
2041 || (TREE_CODE (max_value) == INTEGER_CST
2042 && tree_int_cst_compare (low, max_value) > 0))
2043 return head;
2044
2045 /* If the lower bound is less than the index type's minimum
2046 value, truncate the range bounds. */
2047 if (TREE_CODE (min_value) == INTEGER_CST
2048 && tree_int_cst_compare (low, min_value) < 0)
2049 low = min_value;
2050 low = fold_convert (type, low);
2051
2052 /* If the upper bound is greater than the index type's maximum
2053 value, truncate the range bounds. */
2054 if (TREE_CODE (max_value) == INTEGER_CST
2055 && tree_int_cst_compare (high, max_value) > 0)
2056 high = max_value;
2057 high = fold_convert (type, high);
2058 }
2059
2060
2061 /* Add this label to the chain. Make sure to drop overflow flags. */
2062 r = (struct case_node *) pool_alloc (case_node_pool);
2063 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2064 TREE_INT_CST_HIGH (low));
2065 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2066 TREE_INT_CST_HIGH (high));
2067 r->code_label = label;
2068 r->parent = r->left = NULL;
2069 r->right = head;
2070 return r;
2071 }
2072 \f
2073 /* Maximum number of case bit tests. */
2074 #define MAX_CASE_BIT_TESTS 3
2075
2076 /* By default, enable case bit tests on targets with ashlsi3. */
2077 #ifndef CASE_USE_BIT_TESTS
2078 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode) \
2079 != CODE_FOR_nothing)
2080 #endif
2081
2082
2083 /* A case_bit_test represents a set of case nodes that may be
2084 selected from using a bit-wise comparison. HI and LO hold
2085 the integer to be tested against, LABEL contains the label
2086 to jump to upon success and BITS counts the number of case
2087 nodes handled by this test, typically the number of bits
2088 set in HI:LO. */
2089
2090 struct case_bit_test
2091 {
2092 HOST_WIDE_INT hi;
2093 HOST_WIDE_INT lo;
2094 rtx label;
2095 int bits;
2096 };
2097
2098 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2099
2100 static
2101 bool lshift_cheap_p (void)
2102 {
2103 static bool init = false;
2104 static bool cheap = true;
2105
2106 if (!init)
2107 {
2108 rtx reg = gen_rtx_REG (word_mode, 10000);
2109 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2110 optimize_insn_for_speed_p ());
2111 cheap = cost < COSTS_N_INSNS (3);
2112 init = true;
2113 }
2114
2115 return cheap;
2116 }
2117
2118 /* Comparison function for qsort to order bit tests by decreasing
2119 number of case nodes, i.e. the node with the most cases gets
2120 tested first. */
2121
2122 static int
2123 case_bit_test_cmp (const void *p1, const void *p2)
2124 {
2125 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2126 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2127
2128 if (d2->bits != d1->bits)
2129 return d2->bits - d1->bits;
2130
2131 /* Stabilize the sort. */
2132 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2133 }
2134
2135 /* Expand a switch statement by a short sequence of bit-wise
2136 comparisons. "switch(x)" is effectively converted into
2137 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2138 integer constants.
2139
2140 INDEX_EXPR is the value being switched on, which is of
2141 type INDEX_TYPE. MINVAL is the lowest case value of in
2142 the case nodes, of INDEX_TYPE type, and RANGE is highest
2143 value minus MINVAL, also of type INDEX_TYPE. NODES is
2144 the set of case nodes, and DEFAULT_LABEL is the label to
2145 branch to should none of the cases match.
2146
2147 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2148 node targets. */
2149
2150 static void
2151 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2152 tree range, case_node_ptr nodes, rtx default_label)
2153 {
2154 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2155 enum machine_mode mode;
2156 rtx expr, index, label;
2157 unsigned int i,j,lo,hi;
2158 struct case_node *n;
2159 unsigned int count;
2160
2161 count = 0;
2162 for (n = nodes; n; n = n->right)
2163 {
2164 label = label_rtx (n->code_label);
2165 for (i = 0; i < count; i++)
2166 if (label == test[i].label)
2167 break;
2168
2169 if (i == count)
2170 {
2171 gcc_assert (count < MAX_CASE_BIT_TESTS);
2172 test[i].hi = 0;
2173 test[i].lo = 0;
2174 test[i].label = label;
2175 test[i].bits = 1;
2176 count++;
2177 }
2178 else
2179 test[i].bits++;
2180
2181 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2182 n->low, minval), 1);
2183 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2184 n->high, minval), 1);
2185 for (j = lo; j <= hi; j++)
2186 if (j >= HOST_BITS_PER_WIDE_INT)
2187 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2188 else
2189 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2190 }
2191
2192 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2193
2194 index_expr = fold_build2 (MINUS_EXPR, index_type,
2195 fold_convert (index_type, index_expr),
2196 fold_convert (index_type, minval));
2197 index = expand_normal (index_expr);
2198 do_pending_stack_adjust ();
2199
2200 mode = TYPE_MODE (index_type);
2201 expr = expand_normal (range);
2202 if (default_label)
2203 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2204 default_label);
2205
2206 index = convert_to_mode (word_mode, index, 0);
2207 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2208 index, NULL_RTX, 1, OPTAB_WIDEN);
2209
2210 for (i = 0; i < count; i++)
2211 {
2212 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2213 expr = expand_binop (word_mode, and_optab, index, expr,
2214 NULL_RTX, 1, OPTAB_WIDEN);
2215 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2216 word_mode, 1, test[i].label);
2217 }
2218
2219 if (default_label)
2220 emit_jump (default_label);
2221 }
2222
2223 #ifndef HAVE_casesi
2224 #define HAVE_casesi 0
2225 #endif
2226
2227 #ifndef HAVE_tablejump
2228 #define HAVE_tablejump 0
2229 #endif
2230
2231 /* Terminate a case (Pascal/Ada) or switch (C) statement
2232 in which ORIG_INDEX is the expression to be tested.
2233 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2234 type as given in the source before any compiler conversions.
2235 Generate the code to test it and jump to the right place. */
2236
2237 void
2238 expand_case (gimple stmt)
2239 {
2240 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2241 rtx default_label = 0;
2242 struct case_node *n;
2243 unsigned int count, uniq;
2244 rtx index;
2245 rtx table_label;
2246 int ncases;
2247 rtx *labelvec;
2248 int i;
2249 rtx before_case, end, lab;
2250
2251 tree index_expr = gimple_switch_index (stmt);
2252 tree index_type = TREE_TYPE (index_expr);
2253 int unsignedp = TYPE_UNSIGNED (index_type);
2254
2255 /* The insn after which the case dispatch should finally
2256 be emitted. Zero for a dummy. */
2257 rtx start;
2258
2259 /* A list of case labels; it is first built as a list and it may then
2260 be rearranged into a nearly balanced binary tree. */
2261 struct case_node *case_list = 0;
2262
2263 /* Label to jump to if no case matches. */
2264 tree default_label_decl = NULL_TREE;
2265
2266 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2267 sizeof (struct case_node),
2268 100);
2269
2270 do_pending_stack_adjust ();
2271
2272 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2273 if (index_type != error_mark_node)
2274 {
2275 tree elt;
2276 bitmap label_bitmap;
2277 int stopi = 0;
2278
2279 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2280 expressions being INTEGER_CST. */
2281 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2282
2283 /* The default case, if ever taken, is the first element. */
2284 elt = gimple_switch_label (stmt, 0);
2285 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2286 {
2287 default_label_decl = CASE_LABEL (elt);
2288 stopi = 1;
2289 }
2290
2291 for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2292 {
2293 tree low, high;
2294 elt = gimple_switch_label (stmt, i);
2295
2296 low = CASE_LOW (elt);
2297 gcc_assert (low);
2298 high = CASE_HIGH (elt);
2299
2300 /* Discard empty ranges. */
2301 if (high && tree_int_cst_lt (high, low))
2302 continue;
2303
2304 case_list = add_case_node (case_list, index_type, low, high,
2305 CASE_LABEL (elt), case_node_pool);
2306 }
2307
2308
2309 before_case = start = get_last_insn ();
2310 if (default_label_decl)
2311 default_label = label_rtx (default_label_decl);
2312
2313 /* Get upper and lower bounds of case values. */
2314
2315 uniq = 0;
2316 count = 0;
2317 label_bitmap = BITMAP_ALLOC (NULL);
2318 for (n = case_list; n; n = n->right)
2319 {
2320 /* Count the elements and track the largest and smallest
2321 of them (treating them as signed even if they are not). */
2322 if (count++ == 0)
2323 {
2324 minval = n->low;
2325 maxval = n->high;
2326 }
2327 else
2328 {
2329 if (tree_int_cst_lt (n->low, minval))
2330 minval = n->low;
2331 if (tree_int_cst_lt (maxval, n->high))
2332 maxval = n->high;
2333 }
2334 /* A range counts double, since it requires two compares. */
2335 if (! tree_int_cst_equal (n->low, n->high))
2336 count++;
2337
2338 /* If we have not seen this label yet, then increase the
2339 number of unique case node targets seen. */
2340 lab = label_rtx (n->code_label);
2341 if (bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)))
2342 uniq++;
2343 }
2344
2345 BITMAP_FREE (label_bitmap);
2346
2347 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2348 destination, such as one with a default case only. However,
2349 it doesn't remove cases that are out of range for the switch
2350 type, so we may still get a zero here. */
2351 if (count == 0)
2352 {
2353 if (default_label)
2354 emit_jump (default_label);
2355 free_alloc_pool (case_node_pool);
2356 return;
2357 }
2358
2359 /* Compute span of values. */
2360 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2361
2362 /* Try implementing this switch statement by a short sequence of
2363 bit-wise comparisons. However, we let the binary-tree case
2364 below handle constant index expressions. */
2365 if (CASE_USE_BIT_TESTS
2366 && ! TREE_CONSTANT (index_expr)
2367 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2368 && compare_tree_int (range, 0) > 0
2369 && lshift_cheap_p ()
2370 && ((uniq == 1 && count >= 3)
2371 || (uniq == 2 && count >= 5)
2372 || (uniq == 3 && count >= 6)))
2373 {
2374 /* Optimize the case where all the case values fit in a
2375 word without having to subtract MINVAL. In this case,
2376 we can optimize away the subtraction. */
2377 if (compare_tree_int (minval, 0) > 0
2378 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2379 {
2380 minval = build_int_cst (index_type, 0);
2381 range = maxval;
2382 }
2383 emit_case_bit_tests (index_type, index_expr, minval, range,
2384 case_list, default_label);
2385 }
2386
2387 /* If range of values is much bigger than number of values,
2388 make a sequence of conditional branches instead of a dispatch.
2389 If the switch-index is a constant, do it this way
2390 because we can optimize it. */
2391
2392 else if (count < targetm.case_values_threshold ()
2393 || compare_tree_int (range,
2394 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2395 /* RANGE may be signed, and really large ranges will show up
2396 as negative numbers. */
2397 || compare_tree_int (range, 0) < 0
2398 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2399 || flag_pic
2400 #endif
2401 || !flag_jump_tables
2402 || TREE_CONSTANT (index_expr)
2403 /* If neither casesi or tablejump is available, we can
2404 only go this way. */
2405 || (!HAVE_casesi && !HAVE_tablejump))
2406 {
2407 index = expand_normal (index_expr);
2408
2409 /* If the index is a short or char that we do not have
2410 an insn to handle comparisons directly, convert it to
2411 a full integer now, rather than letting each comparison
2412 generate the conversion. */
2413
2414 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2415 && ! have_insn_for (COMPARE, GET_MODE (index)))
2416 {
2417 enum machine_mode wider_mode;
2418 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2419 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2420 if (have_insn_for (COMPARE, wider_mode))
2421 {
2422 index = convert_to_mode (wider_mode, index, unsignedp);
2423 break;
2424 }
2425 }
2426
2427 do_pending_stack_adjust ();
2428
2429 if (MEM_P (index))
2430 index = copy_to_reg (index);
2431
2432 /* We generate a binary decision tree to select the
2433 appropriate target code. This is done as follows:
2434
2435 The list of cases is rearranged into a binary tree,
2436 nearly optimal assuming equal probability for each case.
2437
2438 The tree is transformed into RTL, eliminating
2439 redundant test conditions at the same time.
2440
2441 If program flow could reach the end of the
2442 decision tree an unconditional jump to the
2443 default code is emitted. */
2444
2445 use_cost_table = estimate_case_costs (case_list);
2446 balance_case_nodes (&case_list, NULL);
2447 emit_case_nodes (index, case_list, default_label, index_type);
2448 if (default_label)
2449 emit_jump (default_label);
2450 }
2451 else
2452 {
2453 rtx fallback_label = label_rtx (case_list->code_label);
2454 table_label = gen_label_rtx ();
2455 if (! try_casesi (index_type, index_expr, minval, range,
2456 table_label, default_label, fallback_label))
2457 {
2458 bool ok;
2459
2460 /* Index jumptables from zero for suitable values of
2461 minval to avoid a subtraction. */
2462 if (optimize_insn_for_speed_p ()
2463 && compare_tree_int (minval, 0) > 0
2464 && compare_tree_int (minval, 3) < 0)
2465 {
2466 minval = build_int_cst (index_type, 0);
2467 range = maxval;
2468 }
2469
2470 ok = try_tablejump (index_type, index_expr, minval, range,
2471 table_label, default_label);
2472 gcc_assert (ok);
2473 }
2474
2475 /* Get table of labels to jump to, in order of case index. */
2476
2477 ncases = tree_low_cst (range, 0) + 1;
2478 labelvec = XALLOCAVEC (rtx, ncases);
2479 memset (labelvec, 0, ncases * sizeof (rtx));
2480
2481 for (n = case_list; n; n = n->right)
2482 {
2483 /* Compute the low and high bounds relative to the minimum
2484 value since that should fit in a HOST_WIDE_INT while the
2485 actual values may not. */
2486 HOST_WIDE_INT i_low
2487 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2488 n->low, minval), 1);
2489 HOST_WIDE_INT i_high
2490 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2491 n->high, minval), 1);
2492 HOST_WIDE_INT i;
2493
2494 for (i = i_low; i <= i_high; i ++)
2495 labelvec[i]
2496 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2497 }
2498
2499 /* Fill in the gaps with the default. We may have gaps at
2500 the beginning if we tried to avoid the minval subtraction,
2501 so substitute some label even if the default label was
2502 deemed unreachable. */
2503 if (!default_label)
2504 default_label = fallback_label;
2505 for (i = 0; i < ncases; i++)
2506 if (labelvec[i] == 0)
2507 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2508
2509 /* Output the table. */
2510 emit_label (table_label);
2511
2512 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2513 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2514 gen_rtx_LABEL_REF (Pmode, table_label),
2515 gen_rtvec_v (ncases, labelvec),
2516 const0_rtx, const0_rtx));
2517 else
2518 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2519 gen_rtvec_v (ncases, labelvec)));
2520
2521 /* Record no drop-through after the table. */
2522 emit_barrier ();
2523 }
2524
2525 before_case = NEXT_INSN (before_case);
2526 end = get_last_insn ();
2527 reorder_insns (before_case, end, start);
2528 }
2529
2530 free_temp_slots ();
2531 free_alloc_pool (case_node_pool);
2532 }
2533
2534 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2535
2536 static void
2537 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2538 int unsignedp)
2539 {
2540 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2541 NULL_RTX, NULL_RTX, label, -1);
2542 }
2543 \f
2544 /* Not all case values are encountered equally. This function
2545 uses a heuristic to weight case labels, in cases where that
2546 looks like a reasonable thing to do.
2547
2548 Right now, all we try to guess is text, and we establish the
2549 following weights:
2550
2551 chars above space: 16
2552 digits: 16
2553 default: 12
2554 space, punct: 8
2555 tab: 4
2556 newline: 2
2557 other "\" chars: 1
2558 remaining chars: 0
2559
2560 If we find any cases in the switch that are not either -1 or in the range
2561 of valid ASCII characters, or are control characters other than those
2562 commonly used with "\", don't treat this switch scanning text.
2563
2564 Return 1 if these nodes are suitable for cost estimation, otherwise
2565 return 0. */
2566
2567 static int
2568 estimate_case_costs (case_node_ptr node)
2569 {
2570 tree min_ascii = integer_minus_one_node;
2571 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2572 case_node_ptr n;
2573 int i;
2574
2575 /* If we haven't already made the cost table, make it now. Note that the
2576 lower bound of the table is -1, not zero. */
2577
2578 if (! cost_table_initialized)
2579 {
2580 cost_table_initialized = 1;
2581
2582 for (i = 0; i < 128; i++)
2583 {
2584 if (ISALNUM (i))
2585 COST_TABLE (i) = 16;
2586 else if (ISPUNCT (i))
2587 COST_TABLE (i) = 8;
2588 else if (ISCNTRL (i))
2589 COST_TABLE (i) = -1;
2590 }
2591
2592 COST_TABLE (' ') = 8;
2593 COST_TABLE ('\t') = 4;
2594 COST_TABLE ('\0') = 4;
2595 COST_TABLE ('\n') = 2;
2596 COST_TABLE ('\f') = 1;
2597 COST_TABLE ('\v') = 1;
2598 COST_TABLE ('\b') = 1;
2599 }
2600
2601 /* See if all the case expressions look like text. It is text if the
2602 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2603 as signed arithmetic since we don't want to ever access cost_table with a
2604 value less than -1. Also check that none of the constants in a range
2605 are strange control characters. */
2606
2607 for (n = node; n; n = n->right)
2608 {
2609 if (tree_int_cst_lt (n->low, min_ascii)
2610 || tree_int_cst_lt (max_ascii, n->high))
2611 return 0;
2612
2613 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2614 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2615 if (COST_TABLE (i) < 0)
2616 return 0;
2617 }
2618
2619 /* All interesting values are within the range of interesting
2620 ASCII characters. */
2621 return 1;
2622 }
2623
2624 /* Take an ordered list of case nodes
2625 and transform them into a near optimal binary tree,
2626 on the assumption that any target code selection value is as
2627 likely as any other.
2628
2629 The transformation is performed by splitting the ordered
2630 list into two equal sections plus a pivot. The parts are
2631 then attached to the pivot as left and right branches. Each
2632 branch is then transformed recursively. */
2633
2634 static void
2635 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2636 {
2637 case_node_ptr np;
2638
2639 np = *head;
2640 if (np)
2641 {
2642 int cost = 0;
2643 int i = 0;
2644 int ranges = 0;
2645 case_node_ptr *npp;
2646 case_node_ptr left;
2647
2648 /* Count the number of entries on branch. Also count the ranges. */
2649
2650 while (np)
2651 {
2652 if (!tree_int_cst_equal (np->low, np->high))
2653 {
2654 ranges++;
2655 if (use_cost_table)
2656 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2657 }
2658
2659 if (use_cost_table)
2660 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2661
2662 i++;
2663 np = np->right;
2664 }
2665
2666 if (i > 2)
2667 {
2668 /* Split this list if it is long enough for that to help. */
2669 npp = head;
2670 left = *npp;
2671 if (use_cost_table)
2672 {
2673 /* Find the place in the list that bisects the list's total cost,
2674 Here I gets half the total cost. */
2675 int n_moved = 0;
2676 i = (cost + 1) / 2;
2677 while (1)
2678 {
2679 /* Skip nodes while their cost does not reach that amount. */
2680 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2681 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2682 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2683 if (i <= 0)
2684 break;
2685 npp = &(*npp)->right;
2686 n_moved += 1;
2687 }
2688 if (n_moved == 0)
2689 {
2690 /* Leave this branch lopsided, but optimize left-hand
2691 side and fill in `parent' fields for right-hand side. */
2692 np = *head;
2693 np->parent = parent;
2694 balance_case_nodes (&np->left, np);
2695 for (; np->right; np = np->right)
2696 np->right->parent = np;
2697 return;
2698 }
2699 }
2700 /* If there are just three nodes, split at the middle one. */
2701 else if (i == 3)
2702 npp = &(*npp)->right;
2703 else
2704 {
2705 /* Find the place in the list that bisects the list's total cost,
2706 where ranges count as 2.
2707 Here I gets half the total cost. */
2708 i = (i + ranges + 1) / 2;
2709 while (1)
2710 {
2711 /* Skip nodes while their cost does not reach that amount. */
2712 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2713 i--;
2714 i--;
2715 if (i <= 0)
2716 break;
2717 npp = &(*npp)->right;
2718 }
2719 }
2720 *head = np = *npp;
2721 *npp = 0;
2722 np->parent = parent;
2723 np->left = left;
2724
2725 /* Optimize each of the two split parts. */
2726 balance_case_nodes (&np->left, np);
2727 balance_case_nodes (&np->right, np);
2728 }
2729 else
2730 {
2731 /* Else leave this branch as one level,
2732 but fill in `parent' fields. */
2733 np = *head;
2734 np->parent = parent;
2735 for (; np->right; np = np->right)
2736 np->right->parent = np;
2737 }
2738 }
2739 }
2740 \f
2741 /* Search the parent sections of the case node tree
2742 to see if a test for the lower bound of NODE would be redundant.
2743 INDEX_TYPE is the type of the index expression.
2744
2745 The instructions to generate the case decision tree are
2746 output in the same order as nodes are processed so it is
2747 known that if a parent node checks the range of the current
2748 node minus one that the current node is bounded at its lower
2749 span. Thus the test would be redundant. */
2750
2751 static int
2752 node_has_low_bound (case_node_ptr node, tree index_type)
2753 {
2754 tree low_minus_one;
2755 case_node_ptr pnode;
2756
2757 /* If the lower bound of this node is the lowest value in the index type,
2758 we need not test it. */
2759
2760 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2761 return 1;
2762
2763 /* If this node has a left branch, the value at the left must be less
2764 than that at this node, so it cannot be bounded at the bottom and
2765 we need not bother testing any further. */
2766
2767 if (node->left)
2768 return 0;
2769
2770 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2771 node->low,
2772 build_int_cst (TREE_TYPE (node->low), 1));
2773
2774 /* If the subtraction above overflowed, we can't verify anything.
2775 Otherwise, look for a parent that tests our value - 1. */
2776
2777 if (! tree_int_cst_lt (low_minus_one, node->low))
2778 return 0;
2779
2780 for (pnode = node->parent; pnode; pnode = pnode->parent)
2781 if (tree_int_cst_equal (low_minus_one, pnode->high))
2782 return 1;
2783
2784 return 0;
2785 }
2786
2787 /* Search the parent sections of the case node tree
2788 to see if a test for the upper bound of NODE would be redundant.
2789 INDEX_TYPE is the type of the index expression.
2790
2791 The instructions to generate the case decision tree are
2792 output in the same order as nodes are processed so it is
2793 known that if a parent node checks the range of the current
2794 node plus one that the current node is bounded at its upper
2795 span. Thus the test would be redundant. */
2796
2797 static int
2798 node_has_high_bound (case_node_ptr node, tree index_type)
2799 {
2800 tree high_plus_one;
2801 case_node_ptr pnode;
2802
2803 /* If there is no upper bound, obviously no test is needed. */
2804
2805 if (TYPE_MAX_VALUE (index_type) == NULL)
2806 return 1;
2807
2808 /* If the upper bound of this node is the highest value in the type
2809 of the index expression, we need not test against it. */
2810
2811 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2812 return 1;
2813
2814 /* If this node has a right branch, the value at the right must be greater
2815 than that at this node, so it cannot be bounded at the top and
2816 we need not bother testing any further. */
2817
2818 if (node->right)
2819 return 0;
2820
2821 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2822 node->high,
2823 build_int_cst (TREE_TYPE (node->high), 1));
2824
2825 /* If the addition above overflowed, we can't verify anything.
2826 Otherwise, look for a parent that tests our value + 1. */
2827
2828 if (! tree_int_cst_lt (node->high, high_plus_one))
2829 return 0;
2830
2831 for (pnode = node->parent; pnode; pnode = pnode->parent)
2832 if (tree_int_cst_equal (high_plus_one, pnode->low))
2833 return 1;
2834
2835 return 0;
2836 }
2837
2838 /* Search the parent sections of the
2839 case node tree to see if both tests for the upper and lower
2840 bounds of NODE would be redundant. */
2841
2842 static int
2843 node_is_bounded (case_node_ptr node, tree index_type)
2844 {
2845 return (node_has_low_bound (node, index_type)
2846 && node_has_high_bound (node, index_type));
2847 }
2848 \f
2849 /* Emit step-by-step code to select a case for the value of INDEX.
2850 The thus generated decision tree follows the form of the
2851 case-node binary tree NODE, whose nodes represent test conditions.
2852 INDEX_TYPE is the type of the index of the switch.
2853
2854 Care is taken to prune redundant tests from the decision tree
2855 by detecting any boundary conditions already checked by
2856 emitted rtx. (See node_has_high_bound, node_has_low_bound
2857 and node_is_bounded, above.)
2858
2859 Where the test conditions can be shown to be redundant we emit
2860 an unconditional jump to the target code. As a further
2861 optimization, the subordinates of a tree node are examined to
2862 check for bounded nodes. In this case conditional and/or
2863 unconditional jumps as a result of the boundary check for the
2864 current node are arranged to target the subordinates associated
2865 code for out of bound conditions on the current node.
2866
2867 We can assume that when control reaches the code generated here,
2868 the index value has already been compared with the parents
2869 of this node, and determined to be on the same side of each parent
2870 as this node is. Thus, if this node tests for the value 51,
2871 and a parent tested for 52, we don't need to consider
2872 the possibility of a value greater than 51. If another parent
2873 tests for the value 50, then this node need not test anything. */
2874
2875 static void
2876 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2877 tree index_type)
2878 {
2879 /* If INDEX has an unsigned type, we must make unsigned branches. */
2880 int unsignedp = TYPE_UNSIGNED (index_type);
2881 enum machine_mode mode = GET_MODE (index);
2882 enum machine_mode imode = TYPE_MODE (index_type);
2883
2884 /* Handle indices detected as constant during RTL expansion. */
2885 if (mode == VOIDmode)
2886 mode = imode;
2887
2888 /* See if our parents have already tested everything for us.
2889 If they have, emit an unconditional jump for this node. */
2890 if (node_is_bounded (node, index_type))
2891 emit_jump (label_rtx (node->code_label));
2892
2893 else if (tree_int_cst_equal (node->low, node->high))
2894 {
2895 /* Node is single valued. First see if the index expression matches
2896 this node and then check our children, if any. */
2897
2898 do_jump_if_equal (mode, index,
2899 convert_modes (mode, imode,
2900 expand_normal (node->low),
2901 unsignedp),
2902 label_rtx (node->code_label), unsignedp);
2903
2904 if (node->right != 0 && node->left != 0)
2905 {
2906 /* This node has children on both sides.
2907 Dispatch to one side or the other
2908 by comparing the index value with this node's value.
2909 If one subtree is bounded, check that one first,
2910 so we can avoid real branches in the tree. */
2911
2912 if (node_is_bounded (node->right, index_type))
2913 {
2914 emit_cmp_and_jump_insns (index,
2915 convert_modes
2916 (mode, imode,
2917 expand_normal (node->high),
2918 unsignedp),
2919 GT, NULL_RTX, mode, unsignedp,
2920 label_rtx (node->right->code_label));
2921 emit_case_nodes (index, node->left, default_label, index_type);
2922 }
2923
2924 else if (node_is_bounded (node->left, index_type))
2925 {
2926 emit_cmp_and_jump_insns (index,
2927 convert_modes
2928 (mode, imode,
2929 expand_normal (node->high),
2930 unsignedp),
2931 LT, NULL_RTX, mode, unsignedp,
2932 label_rtx (node->left->code_label));
2933 emit_case_nodes (index, node->right, default_label, index_type);
2934 }
2935
2936 /* If both children are single-valued cases with no
2937 children, finish up all the work. This way, we can save
2938 one ordered comparison. */
2939 else if (tree_int_cst_equal (node->right->low, node->right->high)
2940 && node->right->left == 0
2941 && node->right->right == 0
2942 && tree_int_cst_equal (node->left->low, node->left->high)
2943 && node->left->left == 0
2944 && node->left->right == 0)
2945 {
2946 /* Neither node is bounded. First distinguish the two sides;
2947 then emit the code for one side at a time. */
2948
2949 /* See if the value matches what the right hand side
2950 wants. */
2951 do_jump_if_equal (mode, index,
2952 convert_modes (mode, imode,
2953 expand_normal (node->right->low),
2954 unsignedp),
2955 label_rtx (node->right->code_label),
2956 unsignedp);
2957
2958 /* See if the value matches what the left hand side
2959 wants. */
2960 do_jump_if_equal (mode, index,
2961 convert_modes (mode, imode,
2962 expand_normal (node->left->low),
2963 unsignedp),
2964 label_rtx (node->left->code_label),
2965 unsignedp);
2966 }
2967
2968 else
2969 {
2970 /* Neither node is bounded. First distinguish the two sides;
2971 then emit the code for one side at a time. */
2972
2973 tree test_label
2974 = build_decl (CURR_INSN_LOCATION,
2975 LABEL_DECL, NULL_TREE, NULL_TREE);
2976
2977 /* See if the value is on the right. */
2978 emit_cmp_and_jump_insns (index,
2979 convert_modes
2980 (mode, imode,
2981 expand_normal (node->high),
2982 unsignedp),
2983 GT, NULL_RTX, mode, unsignedp,
2984 label_rtx (test_label));
2985
2986 /* Value must be on the left.
2987 Handle the left-hand subtree. */
2988 emit_case_nodes (index, node->left, default_label, index_type);
2989 /* If left-hand subtree does nothing,
2990 go to default. */
2991 if (default_label)
2992 emit_jump (default_label);
2993
2994 /* Code branches here for the right-hand subtree. */
2995 expand_label (test_label);
2996 emit_case_nodes (index, node->right, default_label, index_type);
2997 }
2998 }
2999
3000 else if (node->right != 0 && node->left == 0)
3001 {
3002 /* Here we have a right child but no left so we issue a conditional
3003 branch to default and process the right child.
3004
3005 Omit the conditional branch to default if the right child
3006 does not have any children and is single valued; it would
3007 cost too much space to save so little time. */
3008
3009 if (node->right->right || node->right->left
3010 || !tree_int_cst_equal (node->right->low, node->right->high))
3011 {
3012 if (!node_has_low_bound (node, index_type))
3013 {
3014 emit_cmp_and_jump_insns (index,
3015 convert_modes
3016 (mode, imode,
3017 expand_normal (node->high),
3018 unsignedp),
3019 LT, NULL_RTX, mode, unsignedp,
3020 default_label);
3021 }
3022
3023 emit_case_nodes (index, node->right, default_label, index_type);
3024 }
3025 else
3026 /* We cannot process node->right normally
3027 since we haven't ruled out the numbers less than
3028 this node's value. So handle node->right explicitly. */
3029 do_jump_if_equal (mode, index,
3030 convert_modes
3031 (mode, imode,
3032 expand_normal (node->right->low),
3033 unsignedp),
3034 label_rtx (node->right->code_label), unsignedp);
3035 }
3036
3037 else if (node->right == 0 && node->left != 0)
3038 {
3039 /* Just one subtree, on the left. */
3040 if (node->left->left || node->left->right
3041 || !tree_int_cst_equal (node->left->low, node->left->high))
3042 {
3043 if (!node_has_high_bound (node, index_type))
3044 {
3045 emit_cmp_and_jump_insns (index,
3046 convert_modes
3047 (mode, imode,
3048 expand_normal (node->high),
3049 unsignedp),
3050 GT, NULL_RTX, mode, unsignedp,
3051 default_label);
3052 }
3053
3054 emit_case_nodes (index, node->left, default_label, index_type);
3055 }
3056 else
3057 /* We cannot process node->left normally
3058 since we haven't ruled out the numbers less than
3059 this node's value. So handle node->left explicitly. */
3060 do_jump_if_equal (mode, index,
3061 convert_modes
3062 (mode, imode,
3063 expand_normal (node->left->low),
3064 unsignedp),
3065 label_rtx (node->left->code_label), unsignedp);
3066 }
3067 }
3068 else
3069 {
3070 /* Node is a range. These cases are very similar to those for a single
3071 value, except that we do not start by testing whether this node
3072 is the one to branch to. */
3073
3074 if (node->right != 0 && node->left != 0)
3075 {
3076 /* Node has subtrees on both sides.
3077 If the right-hand subtree is bounded,
3078 test for it first, since we can go straight there.
3079 Otherwise, we need to make a branch in the control structure,
3080 then handle the two subtrees. */
3081 tree test_label = 0;
3082
3083 if (node_is_bounded (node->right, index_type))
3084 /* Right hand node is fully bounded so we can eliminate any
3085 testing and branch directly to the target code. */
3086 emit_cmp_and_jump_insns (index,
3087 convert_modes
3088 (mode, imode,
3089 expand_normal (node->high),
3090 unsignedp),
3091 GT, NULL_RTX, mode, unsignedp,
3092 label_rtx (node->right->code_label));
3093 else
3094 {
3095 /* Right hand node requires testing.
3096 Branch to a label where we will handle it later. */
3097
3098 test_label = build_decl (CURR_INSN_LOCATION,
3099 LABEL_DECL, NULL_TREE, NULL_TREE);
3100 emit_cmp_and_jump_insns (index,
3101 convert_modes
3102 (mode, imode,
3103 expand_normal (node->high),
3104 unsignedp),
3105 GT, NULL_RTX, mode, unsignedp,
3106 label_rtx (test_label));
3107 }
3108
3109 /* Value belongs to this node or to the left-hand subtree. */
3110
3111 emit_cmp_and_jump_insns (index,
3112 convert_modes
3113 (mode, imode,
3114 expand_normal (node->low),
3115 unsignedp),
3116 GE, NULL_RTX, mode, unsignedp,
3117 label_rtx (node->code_label));
3118
3119 /* Handle the left-hand subtree. */
3120 emit_case_nodes (index, node->left, default_label, index_type);
3121
3122 /* If right node had to be handled later, do that now. */
3123
3124 if (test_label)
3125 {
3126 /* If the left-hand subtree fell through,
3127 don't let it fall into the right-hand subtree. */
3128 if (default_label)
3129 emit_jump (default_label);
3130
3131 expand_label (test_label);
3132 emit_case_nodes (index, node->right, default_label, index_type);
3133 }
3134 }
3135
3136 else if (node->right != 0 && node->left == 0)
3137 {
3138 /* Deal with values to the left of this node,
3139 if they are possible. */
3140 if (!node_has_low_bound (node, index_type))
3141 {
3142 emit_cmp_and_jump_insns (index,
3143 convert_modes
3144 (mode, imode,
3145 expand_normal (node->low),
3146 unsignedp),
3147 LT, NULL_RTX, mode, unsignedp,
3148 default_label);
3149 }
3150
3151 /* Value belongs to this node or to the right-hand subtree. */
3152
3153 emit_cmp_and_jump_insns (index,
3154 convert_modes
3155 (mode, imode,
3156 expand_normal (node->high),
3157 unsignedp),
3158 LE, NULL_RTX, mode, unsignedp,
3159 label_rtx (node->code_label));
3160
3161 emit_case_nodes (index, node->right, default_label, index_type);
3162 }
3163
3164 else if (node->right == 0 && node->left != 0)
3165 {
3166 /* Deal with values to the right of this node,
3167 if they are possible. */
3168 if (!node_has_high_bound (node, index_type))
3169 {
3170 emit_cmp_and_jump_insns (index,
3171 convert_modes
3172 (mode, imode,
3173 expand_normal (node->high),
3174 unsignedp),
3175 GT, NULL_RTX, mode, unsignedp,
3176 default_label);
3177 }
3178
3179 /* Value belongs to this node or to the left-hand subtree. */
3180
3181 emit_cmp_and_jump_insns (index,
3182 convert_modes
3183 (mode, imode,
3184 expand_normal (node->low),
3185 unsignedp),
3186 GE, NULL_RTX, mode, unsignedp,
3187 label_rtx (node->code_label));
3188
3189 emit_case_nodes (index, node->left, default_label, index_type);
3190 }
3191
3192 else
3193 {
3194 /* Node has no children so we check low and high bounds to remove
3195 redundant tests. Only one of the bounds can exist,
3196 since otherwise this node is bounded--a case tested already. */
3197 int high_bound = node_has_high_bound (node, index_type);
3198 int low_bound = node_has_low_bound (node, index_type);
3199
3200 if (!high_bound && low_bound)
3201 {
3202 emit_cmp_and_jump_insns (index,
3203 convert_modes
3204 (mode, imode,
3205 expand_normal (node->high),
3206 unsignedp),
3207 GT, NULL_RTX, mode, unsignedp,
3208 default_label);
3209 }
3210
3211 else if (!low_bound && high_bound)
3212 {
3213 emit_cmp_and_jump_insns (index,
3214 convert_modes
3215 (mode, imode,
3216 expand_normal (node->low),
3217 unsignedp),
3218 LT, NULL_RTX, mode, unsignedp,
3219 default_label);
3220 }
3221 else if (!low_bound && !high_bound)
3222 {
3223 /* Widen LOW and HIGH to the same width as INDEX. */
3224 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3225 tree low = build1 (CONVERT_EXPR, type, node->low);
3226 tree high = build1 (CONVERT_EXPR, type, node->high);
3227 rtx low_rtx, new_index, new_bound;
3228
3229 /* Instead of doing two branches, emit one unsigned branch for
3230 (index-low) > (high-low). */
3231 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3232 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3233 NULL_RTX, unsignedp,
3234 OPTAB_WIDEN);
3235 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3236 high, low),
3237 NULL_RTX, mode, EXPAND_NORMAL);
3238
3239 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3240 mode, 1, default_label);
3241 }
3242
3243 emit_jump (label_rtx (node->code_label));
3244 }
3245 }
3246 }