]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/stmt.c
Merge in trunk.
[thirdparty/gcc.git] / gcc / stmt.c
1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
22 The functions whose names start with `expand_' are called by the
23 expander to generate RTL instructions for various kinds of constructs. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29
30 #include "rtl.h"
31 #include "hard-reg-set.h"
32 #include "tree.h"
33 #include "tm_p.h"
34 #include "flags.h"
35 #include "except.h"
36 #include "function.h"
37 #include "insn-config.h"
38 #include "expr.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "machmode.h"
42 #include "diagnostic-core.h"
43 #include "output.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "predict.h"
47 #include "optabs.h"
48 #include "target.h"
49 #include "gimple.h"
50 #include "regs.h"
51 #include "alloc-pool.h"
52 #include "pretty-print.h"
53 #include "pointer-set.h"
54 #include "params.h"
55 #include "dumpfile.h"
56
57 \f
58 /* Functions and data structures for expanding case statements. */
59
60 /* Case label structure, used to hold info on labels within case
61 statements. We handle "range" labels; for a single-value label
62 as in C, the high and low limits are the same.
63
64 We start with a vector of case nodes sorted in ascending order, and
65 the default label as the last element in the vector. Before expanding
66 to RTL, we transform this vector into a list linked via the RIGHT
67 fields in the case_node struct. Nodes with higher case values are
68 later in the list.
69
70 Switch statements can be output in three forms. A branch table is
71 used if there are more than a few labels and the labels are dense
72 within the range between the smallest and largest case value. If a
73 branch table is used, no further manipulations are done with the case
74 node chain.
75
76 The alternative to the use of a branch table is to generate a series
77 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
78 and PARENT fields to hold a binary tree. Initially the tree is
79 totally unbalanced, with everything on the right. We balance the tree
80 with nodes on the left having lower case values than the parent
81 and nodes on the right having higher values. We then output the tree
82 in order.
83
84 For very small, suitable switch statements, we can generate a series
85 of simple bit test and branches instead. */
86
87 struct case_node
88 {
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int prob; /* Probability of taking this case. */
96 /* Probability of reaching subtree rooted at this node */
97 int subtree_prob;
98 };
99
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
102
103 extern basic_block label_to_block_fn (struct function *, tree);
104 \f
105 static int n_occurrences (int, const char *);
106 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
107 static bool check_operand_nalternatives (tree, tree);
108 static bool check_unique_operand_names (tree, tree, tree);
109 static char *resolve_operand_name_1 (char *, tree, tree, tree);
110 static void expand_null_return_1 (void);
111 static void expand_value_return (rtx);
112 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
113 static int node_has_low_bound (case_node_ptr, tree);
114 static int node_has_high_bound (case_node_ptr, tree);
115 static int node_is_bounded (case_node_ptr, tree);
116 static void emit_case_nodes (rtx, case_node_ptr, rtx, int, tree);
117 \f
118 /* Return the rtx-label that corresponds to a LABEL_DECL,
119 creating it if necessary. */
120
121 rtx
122 label_rtx (tree label)
123 {
124 gcc_assert (TREE_CODE (label) == LABEL_DECL);
125
126 if (!DECL_RTL_SET_P (label))
127 {
128 rtx r = gen_label_rtx ();
129 SET_DECL_RTL (label, r);
130 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
131 LABEL_PRESERVE_P (r) = 1;
132 }
133
134 return DECL_RTL (label);
135 }
136
137 /* As above, but also put it on the forced-reference list of the
138 function that contains it. */
139 rtx
140 force_label_rtx (tree label)
141 {
142 rtx ref = label_rtx (label);
143 tree function = decl_function_context (label);
144
145 gcc_assert (function);
146
147 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
148 return ref;
149 }
150
151 /* Add an unconditional jump to LABEL as the next sequential instruction. */
152
153 void
154 emit_jump (rtx label)
155 {
156 do_pending_stack_adjust ();
157 emit_jump_insn (gen_jump (label));
158 emit_barrier ();
159 }
160
161 /* Emit code to jump to the address
162 specified by the pointer expression EXP. */
163
164 void
165 expand_computed_goto (tree exp)
166 {
167 rtx x = expand_normal (exp);
168
169 x = convert_memory_address (Pmode, x);
170
171 do_pending_stack_adjust ();
172 emit_indirect_jump (x);
173 }
174 \f
175 /* Handle goto statements and the labels that they can go to. */
176
177 /* Specify the location in the RTL code of a label LABEL,
178 which is a LABEL_DECL tree node.
179
180 This is used for the kind of label that the user can jump to with a
181 goto statement, and for alternatives of a switch or case statement.
182 RTL labels generated for loops and conditionals don't go through here;
183 they are generated directly at the RTL level, by other functions below.
184
185 Note that this has nothing to do with defining label *names*.
186 Languages vary in how they do that and what that even means. */
187
188 void
189 expand_label (tree label)
190 {
191 rtx label_r = label_rtx (label);
192
193 do_pending_stack_adjust ();
194 emit_label (label_r);
195 if (DECL_NAME (label))
196 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
197
198 if (DECL_NONLOCAL (label))
199 {
200 expand_builtin_setjmp_receiver (NULL);
201 nonlocal_goto_handler_labels
202 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
203 nonlocal_goto_handler_labels);
204 }
205
206 if (FORCED_LABEL (label))
207 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
208
209 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
210 maybe_set_first_label_num (label_r);
211 }
212
213 /* Generate RTL code for a `goto' statement with target label LABEL.
214 LABEL should be a LABEL_DECL tree node that was or will later be
215 defined with `expand_label'. */
216
217 void
218 expand_goto (tree label)
219 {
220 #ifdef ENABLE_CHECKING
221 /* Check for a nonlocal goto to a containing function. Should have
222 gotten translated to __builtin_nonlocal_goto. */
223 tree context = decl_function_context (label);
224 gcc_assert (!context || context == current_function_decl);
225 #endif
226
227 emit_jump (label_rtx (label));
228 }
229 \f
230 /* Return the number of times character C occurs in string S. */
231 static int
232 n_occurrences (int c, const char *s)
233 {
234 int n = 0;
235 while (*s)
236 n += (*s++ == c);
237 return n;
238 }
239 \f
240 /* Generate RTL for an asm statement (explicit assembler code).
241 STRING is a STRING_CST node containing the assembler code text,
242 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
243 insn is volatile; don't optimize it. */
244
245 static void
246 expand_asm_loc (tree string, int vol, location_t locus)
247 {
248 rtx body;
249
250 if (TREE_CODE (string) == ADDR_EXPR)
251 string = TREE_OPERAND (string, 0);
252
253 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
254 ggc_strdup (TREE_STRING_POINTER (string)),
255 locus);
256
257 MEM_VOLATILE_P (body) = vol;
258
259 emit_insn (body);
260 }
261
262 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
263 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
264 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
265 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
266 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
267 constraint allows the use of a register operand. And, *IS_INOUT
268 will be true if the operand is read-write, i.e., if it is used as
269 an input as well as an output. If *CONSTRAINT_P is not in
270 canonical form, it will be made canonical. (Note that `+' will be
271 replaced with `=' as part of this process.)
272
273 Returns TRUE if all went well; FALSE if an error occurred. */
274
275 bool
276 parse_output_constraint (const char **constraint_p, int operand_num,
277 int ninputs, int noutputs, bool *allows_mem,
278 bool *allows_reg, bool *is_inout)
279 {
280 const char *constraint = *constraint_p;
281 const char *p;
282
283 /* Assume the constraint doesn't allow the use of either a register
284 or memory. */
285 *allows_mem = false;
286 *allows_reg = false;
287
288 /* Allow the `=' or `+' to not be at the beginning of the string,
289 since it wasn't explicitly documented that way, and there is a
290 large body of code that puts it last. Swap the character to
291 the front, so as not to uglify any place else. */
292 p = strchr (constraint, '=');
293 if (!p)
294 p = strchr (constraint, '+');
295
296 /* If the string doesn't contain an `=', issue an error
297 message. */
298 if (!p)
299 {
300 error ("output operand constraint lacks %<=%>");
301 return false;
302 }
303
304 /* If the constraint begins with `+', then the operand is both read
305 from and written to. */
306 *is_inout = (*p == '+');
307
308 /* Canonicalize the output constraint so that it begins with `='. */
309 if (p != constraint || *is_inout)
310 {
311 char *buf;
312 size_t c_len = strlen (constraint);
313
314 if (p != constraint)
315 warning (0, "output constraint %qc for operand %d "
316 "is not at the beginning",
317 *p, operand_num);
318
319 /* Make a copy of the constraint. */
320 buf = XALLOCAVEC (char, c_len + 1);
321 strcpy (buf, constraint);
322 /* Swap the first character and the `=' or `+'. */
323 buf[p - constraint] = buf[0];
324 /* Make sure the first character is an `='. (Until we do this,
325 it might be a `+'.) */
326 buf[0] = '=';
327 /* Replace the constraint with the canonicalized string. */
328 *constraint_p = ggc_alloc_string (buf, c_len);
329 constraint = *constraint_p;
330 }
331
332 /* Loop through the constraint string. */
333 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
334 switch (*p)
335 {
336 case '+':
337 case '=':
338 error ("operand constraint contains incorrectly positioned "
339 "%<+%> or %<=%>");
340 return false;
341
342 case '%':
343 if (operand_num + 1 == ninputs + noutputs)
344 {
345 error ("%<%%%> constraint used with last operand");
346 return false;
347 }
348 break;
349
350 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
351 *allows_mem = true;
352 break;
353
354 case '?': case '!': case '*': case '&': case '#':
355 case 'E': case 'F': case 'G': case 'H':
356 case 's': case 'i': case 'n':
357 case 'I': case 'J': case 'K': case 'L': case 'M':
358 case 'N': case 'O': case 'P': case ',':
359 break;
360
361 case '0': case '1': case '2': case '3': case '4':
362 case '5': case '6': case '7': case '8': case '9':
363 case '[':
364 error ("matching constraint not valid in output operand");
365 return false;
366
367 case '<': case '>':
368 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
369 excepting those that expand_call created. So match memory
370 and hope. */
371 *allows_mem = true;
372 break;
373
374 case 'g': case 'X':
375 *allows_reg = true;
376 *allows_mem = true;
377 break;
378
379 case 'p': case 'r':
380 *allows_reg = true;
381 break;
382
383 default:
384 if (!ISALPHA (*p))
385 break;
386 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
387 *allows_reg = true;
388 #ifdef EXTRA_CONSTRAINT_STR
389 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
390 *allows_reg = true;
391 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
392 *allows_mem = true;
393 else
394 {
395 /* Otherwise we can't assume anything about the nature of
396 the constraint except that it isn't purely registers.
397 Treat it like "g" and hope for the best. */
398 *allows_reg = true;
399 *allows_mem = true;
400 }
401 #endif
402 break;
403 }
404
405 return true;
406 }
407
408 /* Similar, but for input constraints. */
409
410 bool
411 parse_input_constraint (const char **constraint_p, int input_num,
412 int ninputs, int noutputs, int ninout,
413 const char * const * constraints,
414 bool *allows_mem, bool *allows_reg)
415 {
416 const char *constraint = *constraint_p;
417 const char *orig_constraint = constraint;
418 size_t c_len = strlen (constraint);
419 size_t j;
420 bool saw_match = false;
421
422 /* Assume the constraint doesn't allow the use of either
423 a register or memory. */
424 *allows_mem = false;
425 *allows_reg = false;
426
427 /* Make sure constraint has neither `=', `+', nor '&'. */
428
429 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
430 switch (constraint[j])
431 {
432 case '+': case '=': case '&':
433 if (constraint == orig_constraint)
434 {
435 error ("input operand constraint contains %qc", constraint[j]);
436 return false;
437 }
438 break;
439
440 case '%':
441 if (constraint == orig_constraint
442 && input_num + 1 == ninputs - ninout)
443 {
444 error ("%<%%%> constraint used with last operand");
445 return false;
446 }
447 break;
448
449 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
450 *allows_mem = true;
451 break;
452
453 case '<': case '>':
454 case '?': case '!': case '*': case '#':
455 case 'E': case 'F': case 'G': case 'H':
456 case 's': case 'i': case 'n':
457 case 'I': case 'J': case 'K': case 'L': case 'M':
458 case 'N': case 'O': case 'P': case ',':
459 break;
460
461 /* Whether or not a numeric constraint allows a register is
462 decided by the matching constraint, and so there is no need
463 to do anything special with them. We must handle them in
464 the default case, so that we don't unnecessarily force
465 operands to memory. */
466 case '0': case '1': case '2': case '3': case '4':
467 case '5': case '6': case '7': case '8': case '9':
468 {
469 char *end;
470 unsigned long match;
471
472 saw_match = true;
473
474 match = strtoul (constraint + j, &end, 10);
475 if (match >= (unsigned long) noutputs)
476 {
477 error ("matching constraint references invalid operand number");
478 return false;
479 }
480
481 /* Try and find the real constraint for this dup. Only do this
482 if the matching constraint is the only alternative. */
483 if (*end == '\0'
484 && (j == 0 || (j == 1 && constraint[0] == '%')))
485 {
486 constraint = constraints[match];
487 *constraint_p = constraint;
488 c_len = strlen (constraint);
489 j = 0;
490 /* ??? At the end of the loop, we will skip the first part of
491 the matched constraint. This assumes not only that the
492 other constraint is an output constraint, but also that
493 the '=' or '+' come first. */
494 break;
495 }
496 else
497 j = end - constraint;
498 /* Anticipate increment at end of loop. */
499 j--;
500 }
501 /* Fall through. */
502
503 case 'p': case 'r':
504 *allows_reg = true;
505 break;
506
507 case 'g': case 'X':
508 *allows_reg = true;
509 *allows_mem = true;
510 break;
511
512 default:
513 if (! ISALPHA (constraint[j]))
514 {
515 error ("invalid punctuation %qc in constraint", constraint[j]);
516 return false;
517 }
518 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
519 != NO_REGS)
520 *allows_reg = true;
521 #ifdef EXTRA_CONSTRAINT_STR
522 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
523 *allows_reg = true;
524 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
525 *allows_mem = true;
526 else
527 {
528 /* Otherwise we can't assume anything about the nature of
529 the constraint except that it isn't purely registers.
530 Treat it like "g" and hope for the best. */
531 *allows_reg = true;
532 *allows_mem = true;
533 }
534 #endif
535 break;
536 }
537
538 if (saw_match && !*allows_reg)
539 warning (0, "matching constraint does not allow a register");
540
541 return true;
542 }
543
544 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
545 can be an asm-declared register. Called via walk_tree. */
546
547 static tree
548 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
549 void *data)
550 {
551 tree decl = *declp;
552 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
553
554 if (TREE_CODE (decl) == VAR_DECL)
555 {
556 if (DECL_HARD_REGISTER (decl)
557 && REG_P (DECL_RTL (decl))
558 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
559 {
560 rtx reg = DECL_RTL (decl);
561
562 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
563 return decl;
564 }
565 walk_subtrees = 0;
566 }
567 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
568 walk_subtrees = 0;
569 return NULL_TREE;
570 }
571
572 /* If there is an overlap between *REGS and DECL, return the first overlap
573 found. */
574 tree
575 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
576 {
577 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
578 }
579
580 /* Check for overlap between registers marked in CLOBBERED_REGS and
581 anything inappropriate in T. Emit error and return the register
582 variable definition for error, NULL_TREE for ok. */
583
584 static bool
585 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
586 {
587 /* Conflicts between asm-declared register variables and the clobber
588 list are not allowed. */
589 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
590
591 if (overlap)
592 {
593 error ("asm-specifier for variable %qE conflicts with asm clobber list",
594 DECL_NAME (overlap));
595
596 /* Reset registerness to stop multiple errors emitted for a single
597 variable. */
598 DECL_REGISTER (overlap) = 0;
599 return true;
600 }
601
602 return false;
603 }
604
605 /* Generate RTL for an asm statement with arguments.
606 STRING is the instruction template.
607 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
608 Each output or input has an expression in the TREE_VALUE and
609 a tree list in TREE_PURPOSE which in turn contains a constraint
610 name in TREE_VALUE (or NULL_TREE) and a constraint string
611 in TREE_PURPOSE.
612 CLOBBERS is a list of STRING_CST nodes each naming a hard register
613 that is clobbered by this insn.
614
615 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
616 should be the fallthru basic block of the asm goto.
617
618 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
619 Some elements of OUTPUTS may be replaced with trees representing temporary
620 values. The caller should copy those temporary values to the originally
621 specified lvalues.
622
623 VOL nonzero means the insn is volatile; don't optimize it. */
624
625 static void
626 expand_asm_operands (tree string, tree outputs, tree inputs,
627 tree clobbers, tree labels, basic_block fallthru_bb,
628 int vol, location_t locus)
629 {
630 rtvec argvec, constraintvec, labelvec;
631 rtx body;
632 int ninputs = list_length (inputs);
633 int noutputs = list_length (outputs);
634 int nlabels = list_length (labels);
635 int ninout;
636 int nclobbers;
637 HARD_REG_SET clobbered_regs;
638 int clobber_conflict_found = 0;
639 tree tail;
640 tree t;
641 int i;
642 /* Vector of RTX's of evaluated output operands. */
643 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
644 int *inout_opnum = XALLOCAVEC (int, noutputs);
645 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
646 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
647 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
648 int old_generating_concat_p = generating_concat_p;
649 rtx fallthru_label = NULL_RTX;
650
651 /* An ASM with no outputs needs to be treated as volatile, for now. */
652 if (noutputs == 0)
653 vol = 1;
654
655 if (! check_operand_nalternatives (outputs, inputs))
656 return;
657
658 string = resolve_asm_operand_names (string, outputs, inputs, labels);
659
660 /* Collect constraints. */
661 i = 0;
662 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
663 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
664 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
665 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
666
667 /* Sometimes we wish to automatically clobber registers across an asm.
668 Case in point is when the i386 backend moved from cc0 to a hard reg --
669 maintaining source-level compatibility means automatically clobbering
670 the flags register. */
671 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
672
673 /* Count the number of meaningful clobbered registers, ignoring what
674 we would ignore later. */
675 nclobbers = 0;
676 CLEAR_HARD_REG_SET (clobbered_regs);
677 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
678 {
679 const char *regname;
680 int nregs;
681
682 if (TREE_VALUE (tail) == error_mark_node)
683 return;
684 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
685
686 i = decode_reg_name_and_count (regname, &nregs);
687 if (i == -4)
688 ++nclobbers;
689 else if (i == -2)
690 error ("unknown register name %qs in %<asm%>", regname);
691
692 /* Mark clobbered registers. */
693 if (i >= 0)
694 {
695 int reg;
696
697 for (reg = i; reg < i + nregs; reg++)
698 {
699 ++nclobbers;
700
701 /* Clobbering the PIC register is an error. */
702 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
703 {
704 error ("PIC register clobbered by %qs in %<asm%>", regname);
705 return;
706 }
707
708 SET_HARD_REG_BIT (clobbered_regs, reg);
709 }
710 }
711 }
712
713 /* First pass over inputs and outputs checks validity and sets
714 mark_addressable if needed. */
715
716 ninout = 0;
717 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
718 {
719 tree val = TREE_VALUE (tail);
720 tree type = TREE_TYPE (val);
721 const char *constraint;
722 bool is_inout;
723 bool allows_reg;
724 bool allows_mem;
725
726 /* If there's an erroneous arg, emit no insn. */
727 if (type == error_mark_node)
728 return;
729
730 /* Try to parse the output constraint. If that fails, there's
731 no point in going further. */
732 constraint = constraints[i];
733 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
734 &allows_mem, &allows_reg, &is_inout))
735 return;
736
737 if (! allows_reg
738 && (allows_mem
739 || is_inout
740 || (DECL_P (val)
741 && REG_P (DECL_RTL (val))
742 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
743 mark_addressable (val);
744
745 if (is_inout)
746 ninout++;
747 }
748
749 ninputs += ninout;
750 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
751 {
752 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
753 return;
754 }
755
756 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
757 {
758 bool allows_reg, allows_mem;
759 const char *constraint;
760
761 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
762 would get VOIDmode and that could cause a crash in reload. */
763 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
764 return;
765
766 constraint = constraints[i + noutputs];
767 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
768 constraints, &allows_mem, &allows_reg))
769 return;
770
771 if (! allows_reg && allows_mem)
772 mark_addressable (TREE_VALUE (tail));
773 }
774
775 /* Second pass evaluates arguments. */
776
777 /* Make sure stack is consistent for asm goto. */
778 if (nlabels > 0)
779 do_pending_stack_adjust ();
780
781 ninout = 0;
782 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
783 {
784 tree val = TREE_VALUE (tail);
785 tree type = TREE_TYPE (val);
786 bool is_inout;
787 bool allows_reg;
788 bool allows_mem;
789 rtx op;
790 bool ok;
791
792 ok = parse_output_constraint (&constraints[i], i, ninputs,
793 noutputs, &allows_mem, &allows_reg,
794 &is_inout);
795 gcc_assert (ok);
796
797 /* If an output operand is not a decl or indirect ref and our constraint
798 allows a register, make a temporary to act as an intermediate.
799 Make the asm insn write into that, then our caller will copy it to
800 the real output operand. Likewise for promoted variables. */
801
802 generating_concat_p = 0;
803
804 real_output_rtx[i] = NULL_RTX;
805 if ((TREE_CODE (val) == INDIRECT_REF
806 && allows_mem)
807 || (DECL_P (val)
808 && (allows_mem || REG_P (DECL_RTL (val)))
809 && ! (REG_P (DECL_RTL (val))
810 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
811 || ! allows_reg
812 || is_inout)
813 {
814 op = expand_expr (val, NULL_RTX, VOIDmode,
815 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
816 if (MEM_P (op))
817 op = validize_mem (op);
818
819 if (! allows_reg && !MEM_P (op))
820 error ("output number %d not directly addressable", i);
821 if ((! allows_mem && MEM_P (op))
822 || GET_CODE (op) == CONCAT)
823 {
824 real_output_rtx[i] = op;
825 op = gen_reg_rtx (GET_MODE (op));
826 if (is_inout)
827 emit_move_insn (op, real_output_rtx[i]);
828 }
829 }
830 else
831 {
832 op = assign_temp (type, 0, 1);
833 op = validize_mem (op);
834 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
835 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
836 TREE_VALUE (tail) = make_tree (type, op);
837 }
838 output_rtx[i] = op;
839
840 generating_concat_p = old_generating_concat_p;
841
842 if (is_inout)
843 {
844 inout_mode[ninout] = TYPE_MODE (type);
845 inout_opnum[ninout++] = i;
846 }
847
848 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
849 clobber_conflict_found = 1;
850 }
851
852 /* Make vectors for the expression-rtx, constraint strings,
853 and named operands. */
854
855 argvec = rtvec_alloc (ninputs);
856 constraintvec = rtvec_alloc (ninputs);
857 labelvec = rtvec_alloc (nlabels);
858
859 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
860 : GET_MODE (output_rtx[0])),
861 ggc_strdup (TREE_STRING_POINTER (string)),
862 empty_string, 0, argvec, constraintvec,
863 labelvec, locus);
864
865 MEM_VOLATILE_P (body) = vol;
866
867 /* Eval the inputs and put them into ARGVEC.
868 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
869
870 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
871 {
872 bool allows_reg, allows_mem;
873 const char *constraint;
874 tree val, type;
875 rtx op;
876 bool ok;
877
878 constraint = constraints[i + noutputs];
879 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
880 constraints, &allows_mem, &allows_reg);
881 gcc_assert (ok);
882
883 generating_concat_p = 0;
884
885 val = TREE_VALUE (tail);
886 type = TREE_TYPE (val);
887 /* EXPAND_INITIALIZER will not generate code for valid initializer
888 constants, but will still generate code for other types of operand.
889 This is the behavior we want for constant constraints. */
890 op = expand_expr (val, NULL_RTX, VOIDmode,
891 allows_reg ? EXPAND_NORMAL
892 : allows_mem ? EXPAND_MEMORY
893 : EXPAND_INITIALIZER);
894
895 /* Never pass a CONCAT to an ASM. */
896 if (GET_CODE (op) == CONCAT)
897 op = force_reg (GET_MODE (op), op);
898 else if (MEM_P (op))
899 op = validize_mem (op);
900
901 if (asm_operand_ok (op, constraint, NULL) <= 0)
902 {
903 if (allows_reg && TYPE_MODE (type) != BLKmode)
904 op = force_reg (TYPE_MODE (type), op);
905 else if (!allows_mem)
906 warning (0, "asm operand %d probably doesn%'t match constraints",
907 i + noutputs);
908 else if (MEM_P (op))
909 {
910 /* We won't recognize either volatile memory or memory
911 with a queued address as available a memory_operand
912 at this point. Ignore it: clearly this *is* a memory. */
913 }
914 else
915 gcc_unreachable ();
916 }
917
918 generating_concat_p = old_generating_concat_p;
919 ASM_OPERANDS_INPUT (body, i) = op;
920
921 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
922 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
923 ggc_strdup (constraints[i + noutputs]));
924
925 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
926 clobber_conflict_found = 1;
927 }
928
929 /* Protect all the operands from the queue now that they have all been
930 evaluated. */
931
932 generating_concat_p = 0;
933
934 /* For in-out operands, copy output rtx to input rtx. */
935 for (i = 0; i < ninout; i++)
936 {
937 int j = inout_opnum[i];
938 char buffer[16];
939
940 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
941 = output_rtx[j];
942
943 sprintf (buffer, "%d", j);
944 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
945 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
946 }
947
948 /* Copy labels to the vector. */
949 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
950 {
951 rtx r;
952 /* If asm goto has any labels in the fallthru basic block, use
953 a label that we emit immediately after the asm goto. Expansion
954 may insert further instructions into the same basic block after
955 asm goto and if we don't do this, insertion of instructions on
956 the fallthru edge might misbehave. See PR58670. */
957 if (fallthru_bb
958 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
959 {
960 if (fallthru_label == NULL_RTX)
961 fallthru_label = gen_label_rtx ();
962 r = fallthru_label;
963 }
964 else
965 r = label_rtx (TREE_VALUE (tail));
966 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
967 }
968
969 generating_concat_p = old_generating_concat_p;
970
971 /* Now, for each output, construct an rtx
972 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
973 ARGVEC CONSTRAINTS OPNAMES))
974 If there is more than one, put them inside a PARALLEL. */
975
976 if (nlabels > 0 && nclobbers == 0)
977 {
978 gcc_assert (noutputs == 0);
979 emit_jump_insn (body);
980 }
981 else if (noutputs == 0 && nclobbers == 0)
982 {
983 /* No output operands: put in a raw ASM_OPERANDS rtx. */
984 emit_insn (body);
985 }
986 else if (noutputs == 1 && nclobbers == 0)
987 {
988 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
989 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
990 }
991 else
992 {
993 rtx obody = body;
994 int num = noutputs;
995
996 if (num == 0)
997 num = 1;
998
999 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1000
1001 /* For each output operand, store a SET. */
1002 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1003 {
1004 XVECEXP (body, 0, i)
1005 = gen_rtx_SET (VOIDmode,
1006 output_rtx[i],
1007 gen_rtx_ASM_OPERANDS
1008 (GET_MODE (output_rtx[i]),
1009 ggc_strdup (TREE_STRING_POINTER (string)),
1010 ggc_strdup (constraints[i]),
1011 i, argvec, constraintvec, labelvec, locus));
1012
1013 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1014 }
1015
1016 /* If there are no outputs (but there are some clobbers)
1017 store the bare ASM_OPERANDS into the PARALLEL. */
1018
1019 if (i == 0)
1020 XVECEXP (body, 0, i++) = obody;
1021
1022 /* Store (clobber REG) for each clobbered register specified. */
1023
1024 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1025 {
1026 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1027 int reg, nregs;
1028 int j = decode_reg_name_and_count (regname, &nregs);
1029 rtx clobbered_reg;
1030
1031 if (j < 0)
1032 {
1033 if (j == -3) /* `cc', which is not a register */
1034 continue;
1035
1036 if (j == -4) /* `memory', don't cache memory across asm */
1037 {
1038 XVECEXP (body, 0, i++)
1039 = gen_rtx_CLOBBER (VOIDmode,
1040 gen_rtx_MEM
1041 (BLKmode,
1042 gen_rtx_SCRATCH (VOIDmode)));
1043 continue;
1044 }
1045
1046 /* Ignore unknown register, error already signaled. */
1047 continue;
1048 }
1049
1050 for (reg = j; reg < j + nregs; reg++)
1051 {
1052 /* Use QImode since that's guaranteed to clobber just
1053 * one reg. */
1054 clobbered_reg = gen_rtx_REG (QImode, reg);
1055
1056 /* Do sanity check for overlap between clobbers and
1057 respectively input and outputs that hasn't been
1058 handled. Such overlap should have been detected and
1059 reported above. */
1060 if (!clobber_conflict_found)
1061 {
1062 int opno;
1063
1064 /* We test the old body (obody) contents to avoid
1065 tripping over the under-construction body. */
1066 for (opno = 0; opno < noutputs; opno++)
1067 if (reg_overlap_mentioned_p (clobbered_reg,
1068 output_rtx[opno]))
1069 internal_error
1070 ("asm clobber conflict with output operand");
1071
1072 for (opno = 0; opno < ninputs - ninout; opno++)
1073 if (reg_overlap_mentioned_p (clobbered_reg,
1074 ASM_OPERANDS_INPUT (obody,
1075 opno)))
1076 internal_error
1077 ("asm clobber conflict with input operand");
1078 }
1079
1080 XVECEXP (body, 0, i++)
1081 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1082 }
1083 }
1084
1085 if (nlabels > 0)
1086 emit_jump_insn (body);
1087 else
1088 emit_insn (body);
1089 }
1090
1091 if (fallthru_label)
1092 emit_label (fallthru_label);
1093
1094 /* For any outputs that needed reloading into registers, spill them
1095 back to where they belong. */
1096 for (i = 0; i < noutputs; ++i)
1097 if (real_output_rtx[i])
1098 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1099
1100 crtl->has_asm_statement = 1;
1101 free_temp_slots ();
1102 }
1103
1104 void
1105 expand_asm_stmt (gimple stmt)
1106 {
1107 int noutputs;
1108 tree outputs, tail, t;
1109 tree *o;
1110 size_t i, n;
1111 const char *s;
1112 tree str, out, in, cl, labels;
1113 location_t locus = gimple_location (stmt);
1114 basic_block fallthru_bb = NULL;
1115
1116 /* Meh... convert the gimple asm operands into real tree lists.
1117 Eventually we should make all routines work on the vectors instead
1118 of relying on TREE_CHAIN. */
1119 out = NULL_TREE;
1120 n = gimple_asm_noutputs (stmt);
1121 if (n > 0)
1122 {
1123 t = out = gimple_asm_output_op (stmt, 0);
1124 for (i = 1; i < n; i++)
1125 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1126 }
1127
1128 in = NULL_TREE;
1129 n = gimple_asm_ninputs (stmt);
1130 if (n > 0)
1131 {
1132 t = in = gimple_asm_input_op (stmt, 0);
1133 for (i = 1; i < n; i++)
1134 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1135 }
1136
1137 cl = NULL_TREE;
1138 n = gimple_asm_nclobbers (stmt);
1139 if (n > 0)
1140 {
1141 t = cl = gimple_asm_clobber_op (stmt, 0);
1142 for (i = 1; i < n; i++)
1143 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1144 }
1145
1146 labels = NULL_TREE;
1147 n = gimple_asm_nlabels (stmt);
1148 if (n > 0)
1149 {
1150 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
1151 if (fallthru)
1152 fallthru_bb = fallthru->dest;
1153 t = labels = gimple_asm_label_op (stmt, 0);
1154 for (i = 1; i < n; i++)
1155 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1156 }
1157
1158 s = gimple_asm_string (stmt);
1159 str = build_string (strlen (s), s);
1160
1161 if (gimple_asm_input_p (stmt))
1162 {
1163 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1164 return;
1165 }
1166
1167 outputs = out;
1168 noutputs = gimple_asm_noutputs (stmt);
1169 /* o[I] is the place that output number I should be written. */
1170 o = (tree *) alloca (noutputs * sizeof (tree));
1171
1172 /* Record the contents of OUTPUTS before it is modified. */
1173 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1174 o[i] = TREE_VALUE (tail);
1175
1176 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1177 OUTPUTS some trees for where the values were actually stored. */
1178 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
1179 gimple_asm_volatile_p (stmt), locus);
1180
1181 /* Copy all the intermediate outputs into the specified outputs. */
1182 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1183 {
1184 if (o[i] != TREE_VALUE (tail))
1185 {
1186 expand_assignment (o[i], TREE_VALUE (tail), false);
1187 free_temp_slots ();
1188
1189 /* Restore the original value so that it's correct the next
1190 time we expand this function. */
1191 TREE_VALUE (tail) = o[i];
1192 }
1193 }
1194 }
1195
1196 /* A subroutine of expand_asm_operands. Check that all operands have
1197 the same number of alternatives. Return true if so. */
1198
1199 static bool
1200 check_operand_nalternatives (tree outputs, tree inputs)
1201 {
1202 if (outputs || inputs)
1203 {
1204 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1205 int nalternatives
1206 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1207 tree next = inputs;
1208
1209 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1210 {
1211 error ("too many alternatives in %<asm%>");
1212 return false;
1213 }
1214
1215 tmp = outputs;
1216 while (tmp)
1217 {
1218 const char *constraint
1219 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1220
1221 if (n_occurrences (',', constraint) != nalternatives)
1222 {
1223 error ("operand constraints for %<asm%> differ "
1224 "in number of alternatives");
1225 return false;
1226 }
1227
1228 if (TREE_CHAIN (tmp))
1229 tmp = TREE_CHAIN (tmp);
1230 else
1231 tmp = next, next = 0;
1232 }
1233 }
1234
1235 return true;
1236 }
1237
1238 /* A subroutine of expand_asm_operands. Check that all operand names
1239 are unique. Return true if so. We rely on the fact that these names
1240 are identifiers, and so have been canonicalized by get_identifier,
1241 so all we need are pointer comparisons. */
1242
1243 static bool
1244 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1245 {
1246 tree i, j, i_name = NULL_TREE;
1247
1248 for (i = outputs; i ; i = TREE_CHAIN (i))
1249 {
1250 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1251 if (! i_name)
1252 continue;
1253
1254 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1255 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1256 goto failure;
1257 }
1258
1259 for (i = inputs; i ; i = TREE_CHAIN (i))
1260 {
1261 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1262 if (! i_name)
1263 continue;
1264
1265 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1266 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1267 goto failure;
1268 for (j = outputs; j ; j = TREE_CHAIN (j))
1269 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1270 goto failure;
1271 }
1272
1273 for (i = labels; i ; i = TREE_CHAIN (i))
1274 {
1275 i_name = TREE_PURPOSE (i);
1276 if (! i_name)
1277 continue;
1278
1279 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1280 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1281 goto failure;
1282 for (j = inputs; j ; j = TREE_CHAIN (j))
1283 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1284 goto failure;
1285 }
1286
1287 return true;
1288
1289 failure:
1290 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name));
1291 return false;
1292 }
1293
1294 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1295 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1296 STRING and in the constraints to those numbers. */
1297
1298 tree
1299 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1300 {
1301 char *buffer;
1302 char *p;
1303 const char *c;
1304 tree t;
1305
1306 check_unique_operand_names (outputs, inputs, labels);
1307
1308 /* Substitute [<name>] in input constraint strings. There should be no
1309 named operands in output constraints. */
1310 for (t = inputs; t ; t = TREE_CHAIN (t))
1311 {
1312 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1313 if (strchr (c, '[') != NULL)
1314 {
1315 p = buffer = xstrdup (c);
1316 while ((p = strchr (p, '[')) != NULL)
1317 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1318 TREE_VALUE (TREE_PURPOSE (t))
1319 = build_string (strlen (buffer), buffer);
1320 free (buffer);
1321 }
1322 }
1323
1324 /* Now check for any needed substitutions in the template. */
1325 c = TREE_STRING_POINTER (string);
1326 while ((c = strchr (c, '%')) != NULL)
1327 {
1328 if (c[1] == '[')
1329 break;
1330 else if (ISALPHA (c[1]) && c[2] == '[')
1331 break;
1332 else
1333 {
1334 c += 1 + (c[1] == '%');
1335 continue;
1336 }
1337 }
1338
1339 if (c)
1340 {
1341 /* OK, we need to make a copy so we can perform the substitutions.
1342 Assume that we will not need extra space--we get to remove '['
1343 and ']', which means we cannot have a problem until we have more
1344 than 999 operands. */
1345 buffer = xstrdup (TREE_STRING_POINTER (string));
1346 p = buffer + (c - TREE_STRING_POINTER (string));
1347
1348 while ((p = strchr (p, '%')) != NULL)
1349 {
1350 if (p[1] == '[')
1351 p += 1;
1352 else if (ISALPHA (p[1]) && p[2] == '[')
1353 p += 2;
1354 else
1355 {
1356 p += 1 + (p[1] == '%');
1357 continue;
1358 }
1359
1360 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1361 }
1362
1363 string = build_string (strlen (buffer), buffer);
1364 free (buffer);
1365 }
1366
1367 return string;
1368 }
1369
1370 /* A subroutine of resolve_operand_names. P points to the '[' for a
1371 potential named operand of the form [<name>]. In place, replace
1372 the name and brackets with a number. Return a pointer to the
1373 balance of the string after substitution. */
1374
1375 static char *
1376 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1377 {
1378 char *q;
1379 int op;
1380 tree t;
1381
1382 /* Collect the operand name. */
1383 q = strchr (++p, ']');
1384 if (!q)
1385 {
1386 error ("missing close brace for named operand");
1387 return strchr (p, '\0');
1388 }
1389 *q = '\0';
1390
1391 /* Resolve the name to a number. */
1392 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1393 {
1394 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1395 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1396 goto found;
1397 }
1398 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1399 {
1400 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1401 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1402 goto found;
1403 }
1404 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1405 {
1406 tree name = TREE_PURPOSE (t);
1407 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1408 goto found;
1409 }
1410
1411 error ("undefined named operand %qs", identifier_to_locale (p));
1412 op = 0;
1413
1414 found:
1415 /* Replace the name with the number. Unfortunately, not all libraries
1416 get the return value of sprintf correct, so search for the end of the
1417 generated string by hand. */
1418 sprintf (--p, "%d", op);
1419 p = strchr (p, '\0');
1420
1421 /* Verify the no extra buffer space assumption. */
1422 gcc_assert (p <= q);
1423
1424 /* Shift the rest of the buffer down to fill the gap. */
1425 memmove (p, q + 1, strlen (q + 1) + 1);
1426
1427 return p;
1428 }
1429 \f
1430 /* Generate RTL to return from the current function, with no value.
1431 (That is, we do not do anything about returning any value.) */
1432
1433 void
1434 expand_null_return (void)
1435 {
1436 /* If this function was declared to return a value, but we
1437 didn't, clobber the return registers so that they are not
1438 propagated live to the rest of the function. */
1439 clobber_return_register ();
1440
1441 expand_null_return_1 ();
1442 }
1443
1444 /* Generate RTL to return directly from the current function.
1445 (That is, we bypass any return value.) */
1446
1447 void
1448 expand_naked_return (void)
1449 {
1450 rtx end_label;
1451
1452 clear_pending_stack_adjust ();
1453 do_pending_stack_adjust ();
1454
1455 end_label = naked_return_label;
1456 if (end_label == 0)
1457 end_label = naked_return_label = gen_label_rtx ();
1458
1459 emit_jump (end_label);
1460 }
1461
1462 /* Generate RTL to return from the current function, with value VAL. */
1463
1464 static void
1465 expand_value_return (rtx val)
1466 {
1467 /* Copy the value to the return location unless it's already there. */
1468
1469 tree decl = DECL_RESULT (current_function_decl);
1470 rtx return_reg = DECL_RTL (decl);
1471 if (return_reg != val)
1472 {
1473 tree funtype = TREE_TYPE (current_function_decl);
1474 tree type = TREE_TYPE (decl);
1475 int unsignedp = TYPE_UNSIGNED (type);
1476 enum machine_mode old_mode = DECL_MODE (decl);
1477 enum machine_mode mode;
1478 if (DECL_BY_REFERENCE (decl))
1479 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1480 else
1481 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1482
1483 if (mode != old_mode)
1484 val = convert_modes (mode, old_mode, val, unsignedp);
1485
1486 if (GET_CODE (return_reg) == PARALLEL)
1487 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1488 else
1489 emit_move_insn (return_reg, val);
1490 }
1491
1492 expand_null_return_1 ();
1493 }
1494
1495 /* Output a return with no value. */
1496
1497 static void
1498 expand_null_return_1 (void)
1499 {
1500 clear_pending_stack_adjust ();
1501 do_pending_stack_adjust ();
1502 emit_jump (return_label);
1503 }
1504 \f
1505 /* Generate RTL to evaluate the expression RETVAL and return it
1506 from the current function. */
1507
1508 void
1509 expand_return (tree retval)
1510 {
1511 rtx result_rtl;
1512 rtx val = 0;
1513 tree retval_rhs;
1514
1515 /* If function wants no value, give it none. */
1516 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1517 {
1518 expand_normal (retval);
1519 expand_null_return ();
1520 return;
1521 }
1522
1523 if (retval == error_mark_node)
1524 {
1525 /* Treat this like a return of no value from a function that
1526 returns a value. */
1527 expand_null_return ();
1528 return;
1529 }
1530 else if ((TREE_CODE (retval) == MODIFY_EXPR
1531 || TREE_CODE (retval) == INIT_EXPR)
1532 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1533 retval_rhs = TREE_OPERAND (retval, 1);
1534 else
1535 retval_rhs = retval;
1536
1537 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1538
1539 /* If we are returning the RESULT_DECL, then the value has already
1540 been stored into it, so we don't have to do anything special. */
1541 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1542 expand_value_return (result_rtl);
1543
1544 /* If the result is an aggregate that is being returned in one (or more)
1545 registers, load the registers here. */
1546
1547 else if (retval_rhs != 0
1548 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1549 && REG_P (result_rtl))
1550 {
1551 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
1552 if (val)
1553 {
1554 /* Use the mode of the result value on the return register. */
1555 PUT_MODE (result_rtl, GET_MODE (val));
1556 expand_value_return (val);
1557 }
1558 else
1559 expand_null_return ();
1560 }
1561 else if (retval_rhs != 0
1562 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1563 && (REG_P (result_rtl)
1564 || (GET_CODE (result_rtl) == PARALLEL)))
1565 {
1566 /* Calculate the return value into a temporary (usually a pseudo
1567 reg). */
1568 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1569 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1570
1571 val = assign_temp (nt, 0, 1);
1572 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1573 val = force_not_mem (val);
1574 /* Return the calculated value. */
1575 expand_value_return (val);
1576 }
1577 else
1578 {
1579 /* No hard reg used; calculate value into hard return reg. */
1580 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1581 expand_value_return (result_rtl);
1582 }
1583 }
1584 \f
1585 \f
1586 /* Emit code to save the current value of stack. */
1587 rtx
1588 expand_stack_save (void)
1589 {
1590 rtx ret = NULL_RTX;
1591
1592 do_pending_stack_adjust ();
1593 emit_stack_save (SAVE_BLOCK, &ret);
1594 return ret;
1595 }
1596
1597 /* Emit code to restore the current value of stack. */
1598 void
1599 expand_stack_restore (tree var)
1600 {
1601 rtx prev, sa = expand_normal (var);
1602
1603 sa = convert_memory_address (Pmode, sa);
1604
1605 prev = get_last_insn ();
1606 emit_stack_restore (SAVE_BLOCK, sa);
1607 fixup_args_size_notes (prev, get_last_insn (), 0);
1608 }
1609
1610 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
1611 is the probability of jumping to LABEL. */
1612 static void
1613 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
1614 int unsignedp, int prob)
1615 {
1616 gcc_assert (prob <= REG_BR_PROB_BASE);
1617 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
1618 NULL_RTX, NULL_RTX, label, prob);
1619 }
1620 \f
1621 /* Do the insertion of a case label into case_list. The labels are
1622 fed to us in descending order from the sorted vector of case labels used
1623 in the tree part of the middle end. So the list we construct is
1624 sorted in ascending order.
1625
1626 LABEL is the case label to be inserted. LOW and HIGH are the bounds
1627 against which the index is compared to jump to LABEL and PROB is the
1628 estimated probability LABEL is reached from the switch statement. */
1629
1630 static struct case_node *
1631 add_case_node (struct case_node *head, tree low, tree high,
1632 tree label, int prob, alloc_pool case_node_pool)
1633 {
1634 struct case_node *r;
1635
1636 gcc_checking_assert (low);
1637 gcc_checking_assert (high && (TREE_TYPE (low) == TREE_TYPE (high)));
1638
1639 /* Add this label to the chain. */
1640 r = (struct case_node *) pool_alloc (case_node_pool);
1641 r->low = low;
1642 r->high = high;
1643 r->code_label = label;
1644 r->parent = r->left = NULL;
1645 r->prob = prob;
1646 r->subtree_prob = prob;
1647 r->right = head;
1648 return r;
1649 }
1650 \f
1651 /* Dump ROOT, a list or tree of case nodes, to file. */
1652
1653 static void
1654 dump_case_nodes (FILE *f, struct case_node *root,
1655 int indent_step, int indent_level)
1656 {
1657 HOST_WIDE_INT low, high;
1658
1659 if (root == 0)
1660 return;
1661 indent_level++;
1662
1663 dump_case_nodes (f, root->left, indent_step, indent_level);
1664
1665 low = tree_to_shwi (root->low);
1666 high = tree_to_shwi (root->high);
1667
1668 fputs (";; ", f);
1669 if (high == low)
1670 fprintf (f, "%*s" HOST_WIDE_INT_PRINT_DEC,
1671 indent_step * indent_level, "", low);
1672 else
1673 fprintf (f, "%*s" HOST_WIDE_INT_PRINT_DEC " ... " HOST_WIDE_INT_PRINT_DEC,
1674 indent_step * indent_level, "", low, high);
1675 fputs ("\n", f);
1676
1677 dump_case_nodes (f, root->right, indent_step, indent_level);
1678 }
1679 \f
1680 #ifndef HAVE_casesi
1681 #define HAVE_casesi 0
1682 #endif
1683
1684 #ifndef HAVE_tablejump
1685 #define HAVE_tablejump 0
1686 #endif
1687
1688 /* Return the smallest number of different values for which it is best to use a
1689 jump-table instead of a tree of conditional branches. */
1690
1691 static unsigned int
1692 case_values_threshold (void)
1693 {
1694 unsigned int threshold = PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD);
1695
1696 if (threshold == 0)
1697 threshold = targetm.case_values_threshold ();
1698
1699 return threshold;
1700 }
1701
1702 /* Return true if a switch should be expanded as a decision tree.
1703 RANGE is the difference between highest and lowest case.
1704 UNIQ is number of unique case node targets, not counting the default case.
1705 COUNT is the number of comparisons needed, not counting the default case. */
1706
1707 static bool
1708 expand_switch_as_decision_tree_p (tree range,
1709 unsigned int uniq ATTRIBUTE_UNUSED,
1710 unsigned int count)
1711 {
1712 int max_ratio;
1713
1714 /* If neither casesi or tablejump is available, or flag_jump_tables
1715 over-ruled us, we really have no choice. */
1716 if (!HAVE_casesi && !HAVE_tablejump)
1717 return true;
1718 if (!flag_jump_tables)
1719 return true;
1720 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
1721 if (flag_pic)
1722 return true;
1723 #endif
1724
1725 /* If the switch is relatively small such that the cost of one
1726 indirect jump on the target are higher than the cost of a
1727 decision tree, go with the decision tree.
1728
1729 If range of values is much bigger than number of values,
1730 or if it is too large to represent in a HOST_WIDE_INT,
1731 make a sequence of conditional branches instead of a dispatch.
1732
1733 The definition of "much bigger" depends on whether we are
1734 optimizing for size or for speed. If the former, the maximum
1735 ratio range/count = 3, because this was found to be the optimal
1736 ratio for size on i686-pc-linux-gnu, see PR11823. The ratio
1737 10 is much older, and was probably selected after an extensive
1738 benchmarking investigation on numerous platforms. Or maybe it
1739 just made sense to someone at some point in the history of GCC,
1740 who knows... */
1741 max_ratio = optimize_insn_for_size_p () ? 3 : 10;
1742 if (count < case_values_threshold ()
1743 || ! tree_fits_uhwi_p (range)
1744 || compare_tree_int (range, max_ratio * count) > 0)
1745 return true;
1746
1747 return false;
1748 }
1749
1750 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1751 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1752 DEFAULT_PROB is the estimated probability that it jumps to
1753 DEFAULT_LABEL.
1754
1755 We generate a binary decision tree to select the appropriate target
1756 code. This is done as follows:
1757
1758 If the index is a short or char that we do not have
1759 an insn to handle comparisons directly, convert it to
1760 a full integer now, rather than letting each comparison
1761 generate the conversion.
1762
1763 Load the index into a register.
1764
1765 The list of cases is rearranged into a binary tree,
1766 nearly optimal assuming equal probability for each case.
1767
1768 The tree is transformed into RTL, eliminating redundant
1769 test conditions at the same time.
1770
1771 If program flow could reach the end of the decision tree
1772 an unconditional jump to the default code is emitted.
1773
1774 The above process is unaware of the CFG. The caller has to fix up
1775 the CFG itself. This is done in cfgexpand.c. */
1776
1777 static void
1778 emit_case_decision_tree (tree index_expr, tree index_type,
1779 struct case_node *case_list, rtx default_label,
1780 int default_prob)
1781 {
1782 rtx index = expand_normal (index_expr);
1783
1784 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
1785 && ! have_insn_for (COMPARE, GET_MODE (index)))
1786 {
1787 int unsignedp = TYPE_UNSIGNED (index_type);
1788 enum machine_mode wider_mode;
1789 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
1790 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
1791 if (have_insn_for (COMPARE, wider_mode))
1792 {
1793 index = convert_to_mode (wider_mode, index, unsignedp);
1794 break;
1795 }
1796 }
1797
1798 do_pending_stack_adjust ();
1799
1800 if (MEM_P (index))
1801 {
1802 index = copy_to_reg (index);
1803 if (TREE_CODE (index_expr) == SSA_NAME)
1804 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr), index);
1805 }
1806
1807 balance_case_nodes (&case_list, NULL);
1808
1809 if (dump_file && (dump_flags & TDF_DETAILS))
1810 {
1811 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1812 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1813 dump_case_nodes (dump_file, case_list, indent_step, 0);
1814 }
1815
1816 emit_case_nodes (index, case_list, default_label, default_prob, index_type);
1817 if (default_label)
1818 emit_jump (default_label);
1819 }
1820
1821 /* Return the sum of probabilities of outgoing edges of basic block BB. */
1822
1823 static int
1824 get_outgoing_edge_probs (basic_block bb)
1825 {
1826 edge e;
1827 edge_iterator ei;
1828 int prob_sum = 0;
1829 if (!bb)
1830 return 0;
1831 FOR_EACH_EDGE (e, ei, bb->succs)
1832 prob_sum += e->probability;
1833 return prob_sum;
1834 }
1835
1836 /* Computes the conditional probability of jumping to a target if the branch
1837 instruction is executed.
1838 TARGET_PROB is the estimated probability of jumping to a target relative
1839 to some basic block BB.
1840 BASE_PROB is the probability of reaching the branch instruction relative
1841 to the same basic block BB. */
1842
1843 static inline int
1844 conditional_probability (int target_prob, int base_prob)
1845 {
1846 if (base_prob > 0)
1847 {
1848 gcc_assert (target_prob >= 0);
1849 gcc_assert (target_prob <= base_prob);
1850 return GCOV_COMPUTE_SCALE (target_prob, base_prob);
1851 }
1852 return -1;
1853 }
1854
1855 /* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
1856 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1857 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
1858 labels in CASE_LIST. STMT_BB is the basic block containing the statement.
1859
1860 First, a jump insn is emitted. First we try "casesi". If that
1861 fails, try "tablejump". A target *must* have one of them (or both).
1862
1863 Then, a table with the target labels is emitted.
1864
1865 The process is unaware of the CFG. The caller has to fix up
1866 the CFG itself. This is done in cfgexpand.c. */
1867
1868 static void
1869 emit_case_dispatch_table (tree index_expr, tree index_type,
1870 struct case_node *case_list, rtx default_label,
1871 tree minval, tree maxval, tree range,
1872 basic_block stmt_bb)
1873 {
1874 int i, ncases;
1875 struct case_node *n;
1876 rtx *labelvec;
1877 rtx fallback_label = label_rtx (case_list->code_label);
1878 rtx table_label = gen_label_rtx ();
1879 bool has_gaps = false;
1880 edge default_edge = stmt_bb ? EDGE_SUCC (stmt_bb, 0) : NULL;
1881 int default_prob = default_edge ? default_edge->probability : 0;
1882 int base = get_outgoing_edge_probs (stmt_bb);
1883 bool try_with_tablejump = false;
1884
1885 int new_default_prob = conditional_probability (default_prob,
1886 base);
1887
1888 if (! try_casesi (index_type, index_expr, minval, range,
1889 table_label, default_label, fallback_label,
1890 new_default_prob))
1891 {
1892 /* Index jumptables from zero for suitable values of minval to avoid
1893 a subtraction. For the rationale see:
1894 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
1895 if (optimize_insn_for_speed_p ()
1896 && compare_tree_int (minval, 0) > 0
1897 && compare_tree_int (minval, 3) < 0)
1898 {
1899 minval = build_int_cst (index_type, 0);
1900 range = maxval;
1901 has_gaps = true;
1902 }
1903 try_with_tablejump = true;
1904 }
1905
1906 /* Get table of labels to jump to, in order of case index. */
1907
1908 ncases = tree_to_shwi (range) + 1;
1909 labelvec = XALLOCAVEC (rtx, ncases);
1910 memset (labelvec, 0, ncases * sizeof (rtx));
1911
1912 for (n = case_list; n; n = n->right)
1913 {
1914 /* Compute the low and high bounds relative to the minimum
1915 value since that should fit in a HOST_WIDE_INT while the
1916 actual values may not. */
1917 HOST_WIDE_INT i_low
1918 = tree_to_uhwi (fold_build2 (MINUS_EXPR, index_type,
1919 n->low, minval));
1920 HOST_WIDE_INT i_high
1921 = tree_to_uhwi (fold_build2 (MINUS_EXPR, index_type,
1922 n->high, minval));
1923 HOST_WIDE_INT i;
1924
1925 for (i = i_low; i <= i_high; i ++)
1926 labelvec[i]
1927 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
1928 }
1929
1930 /* Fill in the gaps with the default. We may have gaps at
1931 the beginning if we tried to avoid the minval subtraction,
1932 so substitute some label even if the default label was
1933 deemed unreachable. */
1934 if (!default_label)
1935 default_label = fallback_label;
1936 for (i = 0; i < ncases; i++)
1937 if (labelvec[i] == 0)
1938 {
1939 has_gaps = true;
1940 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
1941 }
1942
1943 if (has_gaps)
1944 {
1945 /* There is at least one entry in the jump table that jumps
1946 to default label. The default label can either be reached
1947 through the indirect jump or the direct conditional jump
1948 before that. Split the probability of reaching the
1949 default label among these two jumps. */
1950 new_default_prob = conditional_probability (default_prob/2,
1951 base);
1952 default_prob /= 2;
1953 base -= default_prob;
1954 }
1955 else
1956 {
1957 base -= default_prob;
1958 default_prob = 0;
1959 }
1960
1961 if (default_edge)
1962 default_edge->probability = default_prob;
1963
1964 /* We have altered the probability of the default edge. So the probabilities
1965 of all other edges need to be adjusted so that it sums up to
1966 REG_BR_PROB_BASE. */
1967 if (base)
1968 {
1969 edge e;
1970 edge_iterator ei;
1971 FOR_EACH_EDGE (e, ei, stmt_bb->succs)
1972 e->probability = GCOV_COMPUTE_SCALE (e->probability, base);
1973 }
1974
1975 if (try_with_tablejump)
1976 {
1977 bool ok = try_tablejump (index_type, index_expr, minval, range,
1978 table_label, default_label, new_default_prob);
1979 gcc_assert (ok);
1980 }
1981 /* Output the table. */
1982 emit_label (table_label);
1983
1984 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
1985 emit_jump_table_data (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
1986 gen_rtx_LABEL_REF (Pmode,
1987 table_label),
1988 gen_rtvec_v (ncases, labelvec),
1989 const0_rtx, const0_rtx));
1990 else
1991 emit_jump_table_data (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
1992 gen_rtvec_v (ncases, labelvec)));
1993
1994 /* Record no drop-through after the table. */
1995 emit_barrier ();
1996 }
1997
1998 /* Reset the aux field of all outgoing edges of basic block BB. */
1999
2000 static inline void
2001 reset_out_edges_aux (basic_block bb)
2002 {
2003 edge e;
2004 edge_iterator ei;
2005 FOR_EACH_EDGE (e, ei, bb->succs)
2006 e->aux = (void *)0;
2007 }
2008
2009 /* Compute the number of case labels that correspond to each outgoing edge of
2010 STMT. Record this information in the aux field of the edge. */
2011
2012 static inline void
2013 compute_cases_per_edge (gimple stmt)
2014 {
2015 basic_block bb = gimple_bb (stmt);
2016 reset_out_edges_aux (bb);
2017 int ncases = gimple_switch_num_labels (stmt);
2018 for (int i = ncases - 1; i >= 1; --i)
2019 {
2020 tree elt = gimple_switch_label (stmt, i);
2021 tree lab = CASE_LABEL (elt);
2022 basic_block case_bb = label_to_block_fn (cfun, lab);
2023 edge case_edge = find_edge (bb, case_bb);
2024 case_edge->aux = (void *)((intptr_t)(case_edge->aux) + 1);
2025 }
2026 }
2027
2028 /* Terminate a case (Pascal/Ada) or switch (C) statement
2029 in which ORIG_INDEX is the expression to be tested.
2030 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2031 type as given in the source before any compiler conversions.
2032 Generate the code to test it and jump to the right place. */
2033
2034 void
2035 expand_case (gimple stmt)
2036 {
2037 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2038 rtx default_label = NULL_RTX;
2039 unsigned int count, uniq;
2040 int i;
2041 int ncases = gimple_switch_num_labels (stmt);
2042 tree index_expr = gimple_switch_index (stmt);
2043 tree index_type = TREE_TYPE (index_expr);
2044 tree elt;
2045 basic_block bb = gimple_bb (stmt);
2046
2047 /* A list of case labels; it is first built as a list and it may then
2048 be rearranged into a nearly balanced binary tree. */
2049 struct case_node *case_list = 0;
2050
2051 /* A pool for case nodes. */
2052 alloc_pool case_node_pool;
2053
2054 /* An ERROR_MARK occurs for various reasons including invalid data type.
2055 ??? Can this still happen, with GIMPLE and all? */
2056 if (index_type == error_mark_node)
2057 return;
2058
2059 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2060 expressions being INTEGER_CST. */
2061 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2062
2063 case_node_pool = create_alloc_pool ("struct case_node pool",
2064 sizeof (struct case_node),
2065 100);
2066
2067 do_pending_stack_adjust ();
2068
2069 /* Find the default case target label. */
2070 default_label = label_rtx (CASE_LABEL (gimple_switch_default_label (stmt)));
2071 edge default_edge = EDGE_SUCC (bb, 0);
2072 int default_prob = default_edge->probability;
2073
2074 /* Get upper and lower bounds of case values. */
2075 elt = gimple_switch_label (stmt, 1);
2076 minval = fold_convert (index_type, CASE_LOW (elt));
2077 elt = gimple_switch_label (stmt, ncases - 1);
2078 if (CASE_HIGH (elt))
2079 maxval = fold_convert (index_type, CASE_HIGH (elt));
2080 else
2081 maxval = fold_convert (index_type, CASE_LOW (elt));
2082
2083 /* Compute span of values. */
2084 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2085
2086 /* Listify the labels queue and gather some numbers to decide
2087 how to expand this switch(). */
2088 uniq = 0;
2089 count = 0;
2090 struct pointer_set_t *seen_labels = pointer_set_create ();
2091 compute_cases_per_edge (stmt);
2092
2093 for (i = ncases - 1; i >= 1; --i)
2094 {
2095 elt = gimple_switch_label (stmt, i);
2096 tree low = CASE_LOW (elt);
2097 gcc_assert (low);
2098 tree high = CASE_HIGH (elt);
2099 gcc_assert (! high || tree_int_cst_lt (low, high));
2100 tree lab = CASE_LABEL (elt);
2101
2102 /* Count the elements.
2103 A range counts double, since it requires two compares. */
2104 count++;
2105 if (high)
2106 count++;
2107
2108 /* If we have not seen this label yet, then increase the
2109 number of unique case node targets seen. */
2110 if (!pointer_set_insert (seen_labels, lab))
2111 uniq++;
2112
2113 /* The bounds on the case range, LOW and HIGH, have to be converted
2114 to case's index type TYPE. Note that the original type of the
2115 case index in the source code is usually "lost" during
2116 gimplification due to type promotion, but the case labels retain the
2117 original type. Make sure to drop overflow flags. */
2118 low = fold_convert (index_type, low);
2119 if (TREE_OVERFLOW (low))
2120 low = wide_int_to_tree (index_type, low);
2121
2122 /* The canonical from of a case label in GIMPLE is that a simple case
2123 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2124 the back ends want simple cases to have high == low. */
2125 if (! high)
2126 high = low;
2127 high = fold_convert (index_type, high);
2128 if (TREE_OVERFLOW (high))
2129 high = wide_int_to_tree (index_type, high);
2130
2131 basic_block case_bb = label_to_block_fn (cfun, lab);
2132 edge case_edge = find_edge (bb, case_bb);
2133 case_list = add_case_node (
2134 case_list, low, high, lab,
2135 case_edge->probability / (intptr_t)(case_edge->aux),
2136 case_node_pool);
2137 }
2138 pointer_set_destroy (seen_labels);
2139 reset_out_edges_aux (bb);
2140
2141 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2142 destination, such as one with a default case only.
2143 It also removes cases that are out of range for the switch
2144 type, so we should never get a zero here. */
2145 gcc_assert (count > 0);
2146
2147 rtx before_case = get_last_insn ();
2148
2149 /* Decide how to expand this switch.
2150 The two options at this point are a dispatch table (casesi or
2151 tablejump) or a decision tree. */
2152
2153 if (expand_switch_as_decision_tree_p (range, uniq, count))
2154 emit_case_decision_tree (index_expr, index_type,
2155 case_list, default_label,
2156 default_prob);
2157 else
2158 emit_case_dispatch_table (index_expr, index_type,
2159 case_list, default_label,
2160 minval, maxval, range, bb);
2161
2162 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2163
2164 free_temp_slots ();
2165 free_alloc_pool (case_node_pool);
2166 }
2167
2168 /* Expand the dispatch to a short decrement chain if there are few cases
2169 to dispatch to. Likewise if neither casesi nor tablejump is available,
2170 or if flag_jump_tables is set. Otherwise, expand as a casesi or a
2171 tablejump. The index mode is always the mode of integer_type_node.
2172 Trap if no case matches the index.
2173
2174 DISPATCH_INDEX is the index expression to switch on. It should be a
2175 memory or register operand.
2176
2177 DISPATCH_TABLE is a set of case labels. The set should be sorted in
2178 ascending order, be contiguous, starting with value 0, and contain only
2179 single-valued case labels. */
2180
2181 void
2182 expand_sjlj_dispatch_table (rtx dispatch_index,
2183 vec<tree> dispatch_table)
2184 {
2185 tree index_type = integer_type_node;
2186 enum machine_mode index_mode = TYPE_MODE (index_type);
2187
2188 int ncases = dispatch_table.length ();
2189
2190 do_pending_stack_adjust ();
2191 rtx before_case = get_last_insn ();
2192
2193 /* Expand as a decrement-chain if there are 5 or fewer dispatch
2194 labels. This covers more than 98% of the cases in libjava,
2195 and seems to be a reasonable compromise between the "old way"
2196 of expanding as a decision tree or dispatch table vs. the "new
2197 way" with decrement chain or dispatch table. */
2198 if (dispatch_table.length () <= 5
2199 || (!HAVE_casesi && !HAVE_tablejump)
2200 || !flag_jump_tables)
2201 {
2202 /* Expand the dispatch as a decrement chain:
2203
2204 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"
2205
2206 ==>
2207
2208 if (index == 0) do_0; else index--;
2209 if (index == 0) do_1; else index--;
2210 ...
2211 if (index == 0) do_N; else index--;
2212
2213 This is more efficient than a dispatch table on most machines.
2214 The last "index--" is redundant but the code is trivially dead
2215 and will be cleaned up by later passes. */
2216 rtx index = copy_to_mode_reg (index_mode, dispatch_index);
2217 rtx zero = CONST0_RTX (index_mode);
2218 for (int i = 0; i < ncases; i++)
2219 {
2220 tree elt = dispatch_table[i];
2221 rtx lab = label_rtx (CASE_LABEL (elt));
2222 do_jump_if_equal (index_mode, index, zero, lab, 0, -1);
2223 force_expand_binop (index_mode, sub_optab,
2224 index, CONST1_RTX (index_mode),
2225 index, 0, OPTAB_DIRECT);
2226 }
2227 }
2228 else
2229 {
2230 /* Similar to expand_case, but much simpler. */
2231 struct case_node *case_list = 0;
2232 alloc_pool case_node_pool = create_alloc_pool ("struct sjlj_case pool",
2233 sizeof (struct case_node),
2234 ncases);
2235 tree index_expr = make_tree (index_type, dispatch_index);
2236 tree minval = build_int_cst (index_type, 0);
2237 tree maxval = CASE_LOW (dispatch_table.last ());
2238 tree range = maxval;
2239 rtx default_label = gen_label_rtx ();
2240
2241 for (int i = ncases - 1; i >= 0; --i)
2242 {
2243 tree elt = dispatch_table[i];
2244 tree low = CASE_LOW (elt);
2245 tree lab = CASE_LABEL (elt);
2246 case_list = add_case_node (case_list, low, low, lab, 0, case_node_pool);
2247 }
2248
2249 emit_case_dispatch_table (index_expr, index_type,
2250 case_list, default_label,
2251 minval, maxval, range,
2252 BLOCK_FOR_INSN (before_case));
2253 emit_label (default_label);
2254 free_alloc_pool (case_node_pool);
2255 }
2256
2257 /* Dispatching something not handled? Trap! */
2258 expand_builtin_trap ();
2259
2260 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2261
2262 free_temp_slots ();
2263 }
2264
2265 \f
2266 /* Take an ordered list of case nodes
2267 and transform them into a near optimal binary tree,
2268 on the assumption that any target code selection value is as
2269 likely as any other.
2270
2271 The transformation is performed by splitting the ordered
2272 list into two equal sections plus a pivot. The parts are
2273 then attached to the pivot as left and right branches. Each
2274 branch is then transformed recursively. */
2275
2276 static void
2277 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2278 {
2279 case_node_ptr np;
2280
2281 np = *head;
2282 if (np)
2283 {
2284 int i = 0;
2285 int ranges = 0;
2286 case_node_ptr *npp;
2287 case_node_ptr left;
2288
2289 /* Count the number of entries on branch. Also count the ranges. */
2290
2291 while (np)
2292 {
2293 if (!tree_int_cst_equal (np->low, np->high))
2294 ranges++;
2295
2296 i++;
2297 np = np->right;
2298 }
2299
2300 if (i > 2)
2301 {
2302 /* Split this list if it is long enough for that to help. */
2303 npp = head;
2304 left = *npp;
2305
2306 /* If there are just three nodes, split at the middle one. */
2307 if (i == 3)
2308 npp = &(*npp)->right;
2309 else
2310 {
2311 /* Find the place in the list that bisects the list's total cost,
2312 where ranges count as 2.
2313 Here I gets half the total cost. */
2314 i = (i + ranges + 1) / 2;
2315 while (1)
2316 {
2317 /* Skip nodes while their cost does not reach that amount. */
2318 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2319 i--;
2320 i--;
2321 if (i <= 0)
2322 break;
2323 npp = &(*npp)->right;
2324 }
2325 }
2326 *head = np = *npp;
2327 *npp = 0;
2328 np->parent = parent;
2329 np->left = left;
2330
2331 /* Optimize each of the two split parts. */
2332 balance_case_nodes (&np->left, np);
2333 balance_case_nodes (&np->right, np);
2334 np->subtree_prob = np->prob;
2335 np->subtree_prob += np->left->subtree_prob;
2336 np->subtree_prob += np->right->subtree_prob;
2337 }
2338 else
2339 {
2340 /* Else leave this branch as one level,
2341 but fill in `parent' fields. */
2342 np = *head;
2343 np->parent = parent;
2344 np->subtree_prob = np->prob;
2345 for (; np->right; np = np->right)
2346 {
2347 np->right->parent = np;
2348 (*head)->subtree_prob += np->right->subtree_prob;
2349 }
2350 }
2351 }
2352 }
2353 \f
2354 /* Search the parent sections of the case node tree
2355 to see if a test for the lower bound of NODE would be redundant.
2356 INDEX_TYPE is the type of the index expression.
2357
2358 The instructions to generate the case decision tree are
2359 output in the same order as nodes are processed so it is
2360 known that if a parent node checks the range of the current
2361 node minus one that the current node is bounded at its lower
2362 span. Thus the test would be redundant. */
2363
2364 static int
2365 node_has_low_bound (case_node_ptr node, tree index_type)
2366 {
2367 tree low_minus_one;
2368 case_node_ptr pnode;
2369
2370 /* If the lower bound of this node is the lowest value in the index type,
2371 we need not test it. */
2372
2373 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2374 return 1;
2375
2376 /* If this node has a left branch, the value at the left must be less
2377 than that at this node, so it cannot be bounded at the bottom and
2378 we need not bother testing any further. */
2379
2380 if (node->left)
2381 return 0;
2382
2383 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2384 node->low,
2385 build_int_cst (TREE_TYPE (node->low), 1));
2386
2387 /* If the subtraction above overflowed, we can't verify anything.
2388 Otherwise, look for a parent that tests our value - 1. */
2389
2390 if (! tree_int_cst_lt (low_minus_one, node->low))
2391 return 0;
2392
2393 for (pnode = node->parent; pnode; pnode = pnode->parent)
2394 if (tree_int_cst_equal (low_minus_one, pnode->high))
2395 return 1;
2396
2397 return 0;
2398 }
2399
2400 /* Search the parent sections of the case node tree
2401 to see if a test for the upper bound of NODE would be redundant.
2402 INDEX_TYPE is the type of the index expression.
2403
2404 The instructions to generate the case decision tree are
2405 output in the same order as nodes are processed so it is
2406 known that if a parent node checks the range of the current
2407 node plus one that the current node is bounded at its upper
2408 span. Thus the test would be redundant. */
2409
2410 static int
2411 node_has_high_bound (case_node_ptr node, tree index_type)
2412 {
2413 tree high_plus_one;
2414 case_node_ptr pnode;
2415
2416 /* If there is no upper bound, obviously no test is needed. */
2417
2418 if (TYPE_MAX_VALUE (index_type) == NULL)
2419 return 1;
2420
2421 /* If the upper bound of this node is the highest value in the type
2422 of the index expression, we need not test against it. */
2423
2424 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2425 return 1;
2426
2427 /* If this node has a right branch, the value at the right must be greater
2428 than that at this node, so it cannot be bounded at the top and
2429 we need not bother testing any further. */
2430
2431 if (node->right)
2432 return 0;
2433
2434 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2435 node->high,
2436 build_int_cst (TREE_TYPE (node->high), 1));
2437
2438 /* If the addition above overflowed, we can't verify anything.
2439 Otherwise, look for a parent that tests our value + 1. */
2440
2441 if (! tree_int_cst_lt (node->high, high_plus_one))
2442 return 0;
2443
2444 for (pnode = node->parent; pnode; pnode = pnode->parent)
2445 if (tree_int_cst_equal (high_plus_one, pnode->low))
2446 return 1;
2447
2448 return 0;
2449 }
2450
2451 /* Search the parent sections of the
2452 case node tree to see if both tests for the upper and lower
2453 bounds of NODE would be redundant. */
2454
2455 static int
2456 node_is_bounded (case_node_ptr node, tree index_type)
2457 {
2458 return (node_has_low_bound (node, index_type)
2459 && node_has_high_bound (node, index_type));
2460 }
2461 \f
2462
2463 /* Emit step-by-step code to select a case for the value of INDEX.
2464 The thus generated decision tree follows the form of the
2465 case-node binary tree NODE, whose nodes represent test conditions.
2466 INDEX_TYPE is the type of the index of the switch.
2467
2468 Care is taken to prune redundant tests from the decision tree
2469 by detecting any boundary conditions already checked by
2470 emitted rtx. (See node_has_high_bound, node_has_low_bound
2471 and node_is_bounded, above.)
2472
2473 Where the test conditions can be shown to be redundant we emit
2474 an unconditional jump to the target code. As a further
2475 optimization, the subordinates of a tree node are examined to
2476 check for bounded nodes. In this case conditional and/or
2477 unconditional jumps as a result of the boundary check for the
2478 current node are arranged to target the subordinates associated
2479 code for out of bound conditions on the current node.
2480
2481 We can assume that when control reaches the code generated here,
2482 the index value has already been compared with the parents
2483 of this node, and determined to be on the same side of each parent
2484 as this node is. Thus, if this node tests for the value 51,
2485 and a parent tested for 52, we don't need to consider
2486 the possibility of a value greater than 51. If another parent
2487 tests for the value 50, then this node need not test anything. */
2488
2489 static void
2490 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2491 int default_prob, tree index_type)
2492 {
2493 /* If INDEX has an unsigned type, we must make unsigned branches. */
2494 int unsignedp = TYPE_UNSIGNED (index_type);
2495 int probability;
2496 int prob = node->prob, subtree_prob = node->subtree_prob;
2497 enum machine_mode mode = GET_MODE (index);
2498 enum machine_mode imode = TYPE_MODE (index_type);
2499
2500 /* Handle indices detected as constant during RTL expansion. */
2501 if (mode == VOIDmode)
2502 mode = imode;
2503
2504 /* See if our parents have already tested everything for us.
2505 If they have, emit an unconditional jump for this node. */
2506 if (node_is_bounded (node, index_type))
2507 emit_jump (label_rtx (node->code_label));
2508
2509 else if (tree_int_cst_equal (node->low, node->high))
2510 {
2511 probability = conditional_probability (prob, subtree_prob + default_prob);
2512 /* Node is single valued. First see if the index expression matches
2513 this node and then check our children, if any. */
2514 do_jump_if_equal (mode, index,
2515 convert_modes (mode, imode,
2516 expand_normal (node->low),
2517 unsignedp),
2518 label_rtx (node->code_label), unsignedp, probability);
2519 /* Since this case is taken at this point, reduce its weight from
2520 subtree_weight. */
2521 subtree_prob -= prob;
2522 if (node->right != 0 && node->left != 0)
2523 {
2524 /* This node has children on both sides.
2525 Dispatch to one side or the other
2526 by comparing the index value with this node's value.
2527 If one subtree is bounded, check that one first,
2528 so we can avoid real branches in the tree. */
2529
2530 if (node_is_bounded (node->right, index_type))
2531 {
2532 probability = conditional_probability (
2533 node->right->prob,
2534 subtree_prob + default_prob);
2535 emit_cmp_and_jump_insns (index,
2536 convert_modes
2537 (mode, imode,
2538 expand_normal (node->high),
2539 unsignedp),
2540 GT, NULL_RTX, mode, unsignedp,
2541 label_rtx (node->right->code_label),
2542 probability);
2543 emit_case_nodes (index, node->left, default_label, default_prob,
2544 index_type);
2545 }
2546
2547 else if (node_is_bounded (node->left, index_type))
2548 {
2549 probability = conditional_probability (
2550 node->left->prob,
2551 subtree_prob + default_prob);
2552 emit_cmp_and_jump_insns (index,
2553 convert_modes
2554 (mode, imode,
2555 expand_normal (node->high),
2556 unsignedp),
2557 LT, NULL_RTX, mode, unsignedp,
2558 label_rtx (node->left->code_label),
2559 probability);
2560 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2561 }
2562
2563 /* If both children are single-valued cases with no
2564 children, finish up all the work. This way, we can save
2565 one ordered comparison. */
2566 else if (tree_int_cst_equal (node->right->low, node->right->high)
2567 && node->right->left == 0
2568 && node->right->right == 0
2569 && tree_int_cst_equal (node->left->low, node->left->high)
2570 && node->left->left == 0
2571 && node->left->right == 0)
2572 {
2573 /* Neither node is bounded. First distinguish the two sides;
2574 then emit the code for one side at a time. */
2575
2576 /* See if the value matches what the right hand side
2577 wants. */
2578 probability = conditional_probability (
2579 node->right->prob,
2580 subtree_prob + default_prob);
2581 do_jump_if_equal (mode, index,
2582 convert_modes (mode, imode,
2583 expand_normal (node->right->low),
2584 unsignedp),
2585 label_rtx (node->right->code_label),
2586 unsignedp, probability);
2587
2588 /* See if the value matches what the left hand side
2589 wants. */
2590 probability = conditional_probability (
2591 node->left->prob,
2592 subtree_prob + default_prob);
2593 do_jump_if_equal (mode, index,
2594 convert_modes (mode, imode,
2595 expand_normal (node->left->low),
2596 unsignedp),
2597 label_rtx (node->left->code_label),
2598 unsignedp, probability);
2599 }
2600
2601 else
2602 {
2603 /* Neither node is bounded. First distinguish the two sides;
2604 then emit the code for one side at a time. */
2605
2606 tree test_label
2607 = build_decl (curr_insn_location (),
2608 LABEL_DECL, NULL_TREE, NULL_TREE);
2609
2610 /* The default label could be reached either through the right
2611 subtree or the left subtree. Divide the probability
2612 equally. */
2613 probability = conditional_probability (
2614 node->right->subtree_prob + default_prob/2,
2615 subtree_prob + default_prob);
2616 /* See if the value is on the right. */
2617 emit_cmp_and_jump_insns (index,
2618 convert_modes
2619 (mode, imode,
2620 expand_normal (node->high),
2621 unsignedp),
2622 GT, NULL_RTX, mode, unsignedp,
2623 label_rtx (test_label),
2624 probability);
2625 default_prob /= 2;
2626
2627 /* Value must be on the left.
2628 Handle the left-hand subtree. */
2629 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2630 /* If left-hand subtree does nothing,
2631 go to default. */
2632 if (default_label)
2633 emit_jump (default_label);
2634
2635 /* Code branches here for the right-hand subtree. */
2636 expand_label (test_label);
2637 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2638 }
2639 }
2640
2641 else if (node->right != 0 && node->left == 0)
2642 {
2643 /* Here we have a right child but no left so we issue a conditional
2644 branch to default and process the right child.
2645
2646 Omit the conditional branch to default if the right child
2647 does not have any children and is single valued; it would
2648 cost too much space to save so little time. */
2649
2650 if (node->right->right || node->right->left
2651 || !tree_int_cst_equal (node->right->low, node->right->high))
2652 {
2653 if (!node_has_low_bound (node, index_type))
2654 {
2655 probability = conditional_probability (
2656 default_prob/2,
2657 subtree_prob + default_prob);
2658 emit_cmp_and_jump_insns (index,
2659 convert_modes
2660 (mode, imode,
2661 expand_normal (node->high),
2662 unsignedp),
2663 LT, NULL_RTX, mode, unsignedp,
2664 default_label,
2665 probability);
2666 default_prob /= 2;
2667 }
2668
2669 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2670 }
2671 else
2672 {
2673 probability = conditional_probability (
2674 node->right->subtree_prob,
2675 subtree_prob + default_prob);
2676 /* We cannot process node->right normally
2677 since we haven't ruled out the numbers less than
2678 this node's value. So handle node->right explicitly. */
2679 do_jump_if_equal (mode, index,
2680 convert_modes
2681 (mode, imode,
2682 expand_normal (node->right->low),
2683 unsignedp),
2684 label_rtx (node->right->code_label), unsignedp, probability);
2685 }
2686 }
2687
2688 else if (node->right == 0 && node->left != 0)
2689 {
2690 /* Just one subtree, on the left. */
2691 if (node->left->left || node->left->right
2692 || !tree_int_cst_equal (node->left->low, node->left->high))
2693 {
2694 if (!node_has_high_bound (node, index_type))
2695 {
2696 probability = conditional_probability (
2697 default_prob/2,
2698 subtree_prob + default_prob);
2699 emit_cmp_and_jump_insns (index,
2700 convert_modes
2701 (mode, imode,
2702 expand_normal (node->high),
2703 unsignedp),
2704 GT, NULL_RTX, mode, unsignedp,
2705 default_label,
2706 probability);
2707 default_prob /= 2;
2708 }
2709
2710 emit_case_nodes (index, node->left, default_label,
2711 default_prob, index_type);
2712 }
2713 else
2714 {
2715 probability = conditional_probability (
2716 node->left->subtree_prob,
2717 subtree_prob + default_prob);
2718 /* We cannot process node->left normally
2719 since we haven't ruled out the numbers less than
2720 this node's value. So handle node->left explicitly. */
2721 do_jump_if_equal (mode, index,
2722 convert_modes
2723 (mode, imode,
2724 expand_normal (node->left->low),
2725 unsignedp),
2726 label_rtx (node->left->code_label), unsignedp, probability);
2727 }
2728 }
2729 }
2730 else
2731 {
2732 /* Node is a range. These cases are very similar to those for a single
2733 value, except that we do not start by testing whether this node
2734 is the one to branch to. */
2735
2736 if (node->right != 0 && node->left != 0)
2737 {
2738 /* Node has subtrees on both sides.
2739 If the right-hand subtree is bounded,
2740 test for it first, since we can go straight there.
2741 Otherwise, we need to make a branch in the control structure,
2742 then handle the two subtrees. */
2743 tree test_label = 0;
2744
2745 if (node_is_bounded (node->right, index_type))
2746 {
2747 /* Right hand node is fully bounded so we can eliminate any
2748 testing and branch directly to the target code. */
2749 probability = conditional_probability (
2750 node->right->subtree_prob,
2751 subtree_prob + default_prob);
2752 emit_cmp_and_jump_insns (index,
2753 convert_modes
2754 (mode, imode,
2755 expand_normal (node->high),
2756 unsignedp),
2757 GT, NULL_RTX, mode, unsignedp,
2758 label_rtx (node->right->code_label),
2759 probability);
2760 }
2761 else
2762 {
2763 /* Right hand node requires testing.
2764 Branch to a label where we will handle it later. */
2765
2766 test_label = build_decl (curr_insn_location (),
2767 LABEL_DECL, NULL_TREE, NULL_TREE);
2768 probability = conditional_probability (
2769 node->right->subtree_prob + default_prob/2,
2770 subtree_prob + default_prob);
2771 emit_cmp_and_jump_insns (index,
2772 convert_modes
2773 (mode, imode,
2774 expand_normal (node->high),
2775 unsignedp),
2776 GT, NULL_RTX, mode, unsignedp,
2777 label_rtx (test_label),
2778 probability);
2779 default_prob /= 2;
2780 }
2781
2782 /* Value belongs to this node or to the left-hand subtree. */
2783
2784 probability = conditional_probability (
2785 prob,
2786 subtree_prob + default_prob);
2787 emit_cmp_and_jump_insns (index,
2788 convert_modes
2789 (mode, imode,
2790 expand_normal (node->low),
2791 unsignedp),
2792 GE, NULL_RTX, mode, unsignedp,
2793 label_rtx (node->code_label),
2794 probability);
2795
2796 /* Handle the left-hand subtree. */
2797 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2798
2799 /* If right node had to be handled later, do that now. */
2800
2801 if (test_label)
2802 {
2803 /* If the left-hand subtree fell through,
2804 don't let it fall into the right-hand subtree. */
2805 if (default_label)
2806 emit_jump (default_label);
2807
2808 expand_label (test_label);
2809 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2810 }
2811 }
2812
2813 else if (node->right != 0 && node->left == 0)
2814 {
2815 /* Deal with values to the left of this node,
2816 if they are possible. */
2817 if (!node_has_low_bound (node, index_type))
2818 {
2819 probability = conditional_probability (
2820 default_prob/2,
2821 subtree_prob + default_prob);
2822 emit_cmp_and_jump_insns (index,
2823 convert_modes
2824 (mode, imode,
2825 expand_normal (node->low),
2826 unsignedp),
2827 LT, NULL_RTX, mode, unsignedp,
2828 default_label,
2829 probability);
2830 default_prob /= 2;
2831 }
2832
2833 /* Value belongs to this node or to the right-hand subtree. */
2834
2835 probability = conditional_probability (
2836 prob,
2837 subtree_prob + default_prob);
2838 emit_cmp_and_jump_insns (index,
2839 convert_modes
2840 (mode, imode,
2841 expand_normal (node->high),
2842 unsignedp),
2843 LE, NULL_RTX, mode, unsignedp,
2844 label_rtx (node->code_label),
2845 probability);
2846
2847 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2848 }
2849
2850 else if (node->right == 0 && node->left != 0)
2851 {
2852 /* Deal with values to the right of this node,
2853 if they are possible. */
2854 if (!node_has_high_bound (node, index_type))
2855 {
2856 probability = conditional_probability (
2857 default_prob/2,
2858 subtree_prob + default_prob);
2859 emit_cmp_and_jump_insns (index,
2860 convert_modes
2861 (mode, imode,
2862 expand_normal (node->high),
2863 unsignedp),
2864 GT, NULL_RTX, mode, unsignedp,
2865 default_label,
2866 probability);
2867 default_prob /= 2;
2868 }
2869
2870 /* Value belongs to this node or to the left-hand subtree. */
2871
2872 probability = conditional_probability (
2873 prob,
2874 subtree_prob + default_prob);
2875 emit_cmp_and_jump_insns (index,
2876 convert_modes
2877 (mode, imode,
2878 expand_normal (node->low),
2879 unsignedp),
2880 GE, NULL_RTX, mode, unsignedp,
2881 label_rtx (node->code_label),
2882 probability);
2883
2884 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2885 }
2886
2887 else
2888 {
2889 /* Node has no children so we check low and high bounds to remove
2890 redundant tests. Only one of the bounds can exist,
2891 since otherwise this node is bounded--a case tested already. */
2892 int high_bound = node_has_high_bound (node, index_type);
2893 int low_bound = node_has_low_bound (node, index_type);
2894
2895 if (!high_bound && low_bound)
2896 {
2897 probability = conditional_probability (
2898 default_prob,
2899 subtree_prob + default_prob);
2900 emit_cmp_and_jump_insns (index,
2901 convert_modes
2902 (mode, imode,
2903 expand_normal (node->high),
2904 unsignedp),
2905 GT, NULL_RTX, mode, unsignedp,
2906 default_label,
2907 probability);
2908 }
2909
2910 else if (!low_bound && high_bound)
2911 {
2912 probability = conditional_probability (
2913 default_prob,
2914 subtree_prob + default_prob);
2915 emit_cmp_and_jump_insns (index,
2916 convert_modes
2917 (mode, imode,
2918 expand_normal (node->low),
2919 unsignedp),
2920 LT, NULL_RTX, mode, unsignedp,
2921 default_label,
2922 probability);
2923 }
2924 else if (!low_bound && !high_bound)
2925 {
2926 /* Widen LOW and HIGH to the same width as INDEX. */
2927 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
2928 tree low = build1 (CONVERT_EXPR, type, node->low);
2929 tree high = build1 (CONVERT_EXPR, type, node->high);
2930 rtx low_rtx, new_index, new_bound;
2931
2932 /* Instead of doing two branches, emit one unsigned branch for
2933 (index-low) > (high-low). */
2934 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
2935 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
2936 NULL_RTX, unsignedp,
2937 OPTAB_WIDEN);
2938 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
2939 high, low),
2940 NULL_RTX, mode, EXPAND_NORMAL);
2941
2942 probability = conditional_probability (
2943 default_prob,
2944 subtree_prob + default_prob);
2945 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
2946 mode, 1, default_label, probability);
2947 }
2948
2949 emit_jump (label_rtx (node->code_label));
2950 }
2951 }
2952 }