]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/dojump.c
expr.c (enqueue_insn, [...]): Remove.
[thirdparty/gcc.git] / gcc / dojump.c
CommitLineData
1cff8964
AE
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
ae2bcd98 3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
1cff8964
AE
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "function.h"
30#include "insn-config.h"
31#include "insn-attr.h"
32/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33#include "expr.h"
34#include "optabs.h"
35#include "langhooks.h"
dbf833ee 36#include "ggc.h"
1cff8964 37
dbf833ee 38static bool prefer_and_bit_test (enum machine_mode, int);
7080f735
AJ
39static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40static void do_jump_by_parts_equality (tree, rtx, rtx);
41static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
1cff8964
AE
43
44/* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
46
47void
7080f735 48init_pending_stack_adjust (void)
1cff8964
AE
49{
50 pending_stack_adjust = 0;
51}
52
53/* When exiting from function, if safe, clear out any pending stack adjust
54 so the adjustment won't get done.
55
56 Note, if the current function calls alloca, then it must have a
57 frame pointer regardless of the value of flag_omit_frame_pointer. */
58
59void
7080f735 60clear_pending_stack_adjust (void)
1cff8964 61{
1cff8964
AE
62 if (optimize > 0
63 && (! flag_omit_frame_pointer || current_function_calls_alloca)
64 && EXIT_IGNORE_STACK
65 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
66 && ! flag_inline_functions)
67 {
68 stack_pointer_delta -= pending_stack_adjust,
69 pending_stack_adjust = 0;
70 }
1cff8964
AE
71}
72
73/* Pop any previously-pushed arguments that have not been popped yet. */
74
75void
7080f735 76do_pending_stack_adjust (void)
1cff8964
AE
77{
78 if (inhibit_defer_pop == 0)
79 {
80 if (pending_stack_adjust != 0)
81 adjust_stack (GEN_INT (pending_stack_adjust));
82 pending_stack_adjust = 0;
83 }
84}
85\f
86/* Expand conditional expressions. */
87
88/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
90 functions here. */
91
92void
7080f735 93jumpifnot (tree exp, rtx label)
1cff8964
AE
94{
95 do_jump (exp, label, NULL_RTX);
96}
97
98/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
99
100void
7080f735 101jumpif (tree exp, rtx label)
1cff8964
AE
102{
103 do_jump (exp, NULL_RTX, label);
104}
105
dbf833ee
RS
106/* Used internally by prefer_and_bit_test. */
107
108static GTY(()) rtx and_reg;
109static GTY(()) rtx and_test;
110static GTY(()) rtx shift_test;
111
112/* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
113 where X is an arbitrary register of mode MODE. Return true if the former
114 is preferred. */
115
116static bool
117prefer_and_bit_test (enum machine_mode mode, int bitnum)
118{
119 if (and_test == 0)
120 {
121 /* Set up rtxes for the two variations. Use NULL as a placeholder
122 for the BITNUM-based constants. */
123 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
124 and_test = gen_rtx_AND (mode, and_reg, NULL);
125 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
126 const1_rtx);
127 }
128 else
129 {
130 /* Change the mode of the previously-created rtxes. */
131 PUT_MODE (and_reg, mode);
132 PUT_MODE (and_test, mode);
133 PUT_MODE (shift_test, mode);
134 PUT_MODE (XEXP (shift_test, 0), mode);
135 }
136
137 /* Fill in the integers. */
0f005f33 138 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
dbf833ee
RS
139 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
140
141 return (rtx_cost (and_test, IF_THEN_ELSE)
142 <= rtx_cost (shift_test, IF_THEN_ELSE));
143}
144
1cff8964
AE
145/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
146 the result is zero, or IF_TRUE_LABEL if the result is one.
147 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
148 meaning fall through in that case.
149
150 do_jump always does any pending stack adjust except when it does not
151 actually perform a jump. An example where there is no jump
152 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
153
154 This function is responsible for optimizing cases such as
155 &&, || and comparison operators in EXP. */
156
157void
7080f735 158do_jump (tree exp, rtx if_false_label, rtx if_true_label)
1cff8964
AE
159{
160 enum tree_code code = TREE_CODE (exp);
161 /* Some cases need to create a label to jump to
162 in order to properly fall through.
163 These cases set DROP_THROUGH_LABEL nonzero. */
164 rtx drop_through_label = 0;
165 rtx temp;
166 int i;
167 tree type;
168 enum machine_mode mode;
169
1cff8964
AE
170 switch (code)
171 {
172 case ERROR_MARK:
173 break;
174
175 case INTEGER_CST:
176 temp = integer_zerop (exp) ? if_false_label : if_true_label;
177 if (temp)
178 emit_jump (temp);
179 break;
180
181#if 0
182 /* This is not true with #pragma weak */
183 case ADDR_EXPR:
184 /* The address of something can never be zero. */
185 if (if_true_label)
186 emit_jump (if_true_label);
187 break;
188#endif
189
2ff1fb45
JJ
190 case UNSAVE_EXPR:
191 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
192 TREE_OPERAND (exp, 0)
ae2bcd98 193 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
2ff1fb45
JJ
194 break;
195
1cff8964
AE
196 case NOP_EXPR:
197 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
198 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
199 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
200 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
201 goto normal;
202 case CONVERT_EXPR:
203 /* If we are narrowing the operand, we have to do the compare in the
204 narrower mode. */
205 if ((TYPE_PRECISION (TREE_TYPE (exp))
206 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
207 goto normal;
208 case NON_LVALUE_EXPR:
1cff8964
AE
209 case ABS_EXPR:
210 case NEGATE_EXPR:
211 case LROTATE_EXPR:
212 case RROTATE_EXPR:
213 /* These cannot change zero->nonzero or vice versa. */
214 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
215 break;
216
1cff8964
AE
217#if 0
218 /* This is never less insns than evaluating the PLUS_EXPR followed by
219 a test and can be longer if the test is eliminated. */
220 case PLUS_EXPR:
221 /* Reduce to minus. */
222 exp = build (MINUS_EXPR, TREE_TYPE (exp),
223 TREE_OPERAND (exp, 0),
224 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
225 TREE_OPERAND (exp, 1))));
226 /* Process as MINUS. */
227#endif
228
229 case MINUS_EXPR:
230 /* Nonzero iff operands of minus differ. */
231 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
232 TREE_OPERAND (exp, 0),
233 TREE_OPERAND (exp, 1)),
234 NE, NE, if_false_label, if_true_label);
235 break;
236
237 case BIT_AND_EXPR:
dbf833ee
RS
238 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
239 See if the former is preferred for jump tests and restore it
240 if so. */
241 if (TREE_CODE (TREE_OPERAND (exp, 0)) == RSHIFT_EXPR
242 && integer_onep (TREE_OPERAND (exp, 1)))
243 {
244 tree arg = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
245 tree shift = TREE_OPERAND (TREE_OPERAND (exp, 0), 1);
246 tree one = TREE_OPERAND (exp, 1);
247 tree argtype = TREE_TYPE (arg);
248 if (TREE_CODE (shift) == INTEGER_CST
249 && compare_tree_int (shift, 0) > 0
250 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
251 && prefer_and_bit_test (TYPE_MODE (argtype),
252 TREE_INT_CST_LOW (shift)))
253 {
254 do_jump (build (BIT_AND_EXPR, argtype, arg,
255 fold (build (LSHIFT_EXPR, argtype, one, shift))),
256 if_false_label, if_true_label);
257 break;
258 }
259 }
260
1cff8964
AE
261 /* If we are AND'ing with a small constant, do this comparison in the
262 smallest type that fits. If the machine doesn't have comparisons
263 that small, it will be converted back to the wider comparison.
264 This helps if we are testing the sign bit of a narrower object.
265 combine can't do this for us because it can't know whether a
266 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
267
268 if (! SLOW_BYTE_ACCESS
269 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
270 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
271 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
272 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
ae2bcd98 273 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
1cff8964
AE
274 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
275 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
276 != CODE_FOR_nothing))
277 {
278 do_jump (convert (type, exp), if_false_label, if_true_label);
279 break;
280 }
281 goto normal;
282
283 case TRUTH_NOT_EXPR:
284 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
285 break;
286
287 case TRUTH_ANDIF_EXPR:
288 if (if_false_label == 0)
289 if_false_label = drop_through_label = gen_label_rtx ();
290 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
1cff8964 291 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
1cff8964
AE
292 break;
293
294 case TRUTH_ORIF_EXPR:
295 if (if_true_label == 0)
296 if_true_label = drop_through_label = gen_label_rtx ();
297 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
1cff8964 298 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
1cff8964
AE
299 break;
300
301 case COMPOUND_EXPR:
302 push_temp_slots ();
303 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
304 preserve_temp_slots (NULL_RTX);
305 free_temp_slots ();
306 pop_temp_slots ();
1cff8964
AE
307 do_pending_stack_adjust ();
308 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
309 break;
310
311 case COMPONENT_REF:
312 case BIT_FIELD_REF:
313 case ARRAY_REF:
314 case ARRAY_RANGE_REF:
315 {
316 HOST_WIDE_INT bitsize, bitpos;
317 int unsignedp;
318 enum machine_mode mode;
319 tree type;
320 tree offset;
321 int volatilep = 0;
322
323 /* Get description of this reference. We don't actually care
324 about the underlying object here. */
325 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
326 &unsignedp, &volatilep);
327
ae2bcd98 328 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
1cff8964
AE
329 if (! SLOW_BYTE_ACCESS
330 && type != 0 && bitsize >= 0
331 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
332 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
59068a89 333 != CODE_FOR_nothing))
1cff8964
AE
334 {
335 do_jump (convert (type, exp), if_false_label, if_true_label);
336 break;
337 }
338 goto normal;
339 }
340
341 case COND_EXPR:
342 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
343 if (integer_onep (TREE_OPERAND (exp, 1))
344 && integer_zerop (TREE_OPERAND (exp, 2)))
345 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
346
347 else if (integer_zerop (TREE_OPERAND (exp, 1))
348 && integer_onep (TREE_OPERAND (exp, 2)))
349 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
350
351 else
352 {
353 rtx label1 = gen_label_rtx ();
354 drop_through_label = gen_label_rtx ();
355
356 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
357
1cff8964
AE
358 /* Now the THEN-expression. */
359 do_jump (TREE_OPERAND (exp, 1),
360 if_false_label ? if_false_label : drop_through_label,
361 if_true_label ? if_true_label : drop_through_label);
362 /* In case the do_jump just above never jumps. */
363 do_pending_stack_adjust ();
364 emit_label (label1);
365
366 /* Now the ELSE-expression. */
367 do_jump (TREE_OPERAND (exp, 2),
368 if_false_label ? if_false_label : drop_through_label,
369 if_true_label ? if_true_label : drop_through_label);
1cff8964
AE
370 }
371 break;
372
373 case EQ_EXPR:
374 {
375 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
376
377 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
378 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
379 {
380 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
381 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
382 do_jump
383 (fold
384 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
385 fold (build (EQ_EXPR, TREE_TYPE (exp),
386 fold (build1 (REALPART_EXPR,
387 TREE_TYPE (inner_type),
388 exp0)),
389 fold (build1 (REALPART_EXPR,
390 TREE_TYPE (inner_type),
391 exp1)))),
392 fold (build (EQ_EXPR, TREE_TYPE (exp),
393 fold (build1 (IMAGPART_EXPR,
394 TREE_TYPE (inner_type),
395 exp0)),
396 fold (build1 (IMAGPART_EXPR,
397 TREE_TYPE (inner_type),
398 exp1)))))),
399 if_false_label, if_true_label);
400 }
401
402 else if (integer_zerop (TREE_OPERAND (exp, 1)))
403 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
404
405 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
406 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
407 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
408 else
409 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
410 break;
411 }
412
413 case NE_EXPR:
414 {
415 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
416
417 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
418 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
419 {
420 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
421 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
422 do_jump
423 (fold
424 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
425 fold (build (NE_EXPR, TREE_TYPE (exp),
426 fold (build1 (REALPART_EXPR,
427 TREE_TYPE (inner_type),
428 exp0)),
429 fold (build1 (REALPART_EXPR,
430 TREE_TYPE (inner_type),
431 exp1)))),
432 fold (build (NE_EXPR, TREE_TYPE (exp),
433 fold (build1 (IMAGPART_EXPR,
434 TREE_TYPE (inner_type),
435 exp0)),
436 fold (build1 (IMAGPART_EXPR,
437 TREE_TYPE (inner_type),
438 exp1)))))),
439 if_false_label, if_true_label);
440 }
441
442 else if (integer_zerop (TREE_OPERAND (exp, 1)))
443 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
444
445 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
446 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
447 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
448 else
449 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
450 break;
451 }
452
453 case LT_EXPR:
454 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
455 if (GET_MODE_CLASS (mode) == MODE_INT
456 && ! can_compare_p (LT, mode, ccp_jump))
457 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
458 else
459 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
460 break;
461
462 case LE_EXPR:
463 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
464 if (GET_MODE_CLASS (mode) == MODE_INT
465 && ! can_compare_p (LE, mode, ccp_jump))
466 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
467 else
468 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
469 break;
470
471 case GT_EXPR:
472 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
473 if (GET_MODE_CLASS (mode) == MODE_INT
474 && ! can_compare_p (GT, mode, ccp_jump))
475 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
476 else
477 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
478 break;
479
480 case GE_EXPR:
481 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
482 if (GET_MODE_CLASS (mode) == MODE_INT
483 && ! can_compare_p (GE, mode, ccp_jump))
484 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
485 else
486 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
487 break;
488
489 case UNORDERED_EXPR:
490 case ORDERED_EXPR:
491 {
492 enum rtx_code cmp, rcmp;
493 int do_rev;
494
495 if (code == UNORDERED_EXPR)
496 cmp = UNORDERED, rcmp = ORDERED;
497 else
498 cmp = ORDERED, rcmp = UNORDERED;
499 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
500
501 do_rev = 0;
502 if (! can_compare_p (cmp, mode, ccp_jump)
503 && (can_compare_p (rcmp, mode, ccp_jump)
504 /* If the target doesn't provide either UNORDERED or ORDERED
505 comparisons, canonicalize on UNORDERED for the library. */
506 || rcmp == UNORDERED))
507 do_rev = 1;
508
509 if (! do_rev)
510 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
511 else
512 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
513 }
514 break;
515
516 {
517 enum rtx_code rcode1;
14bbffa1 518 enum tree_code tcode1, tcode2;
1cff8964
AE
519
520 case UNLT_EXPR:
521 rcode1 = UNLT;
14bbffa1 522 tcode1 = UNORDERED_EXPR;
1cff8964
AE
523 tcode2 = LT_EXPR;
524 goto unordered_bcc;
525 case UNLE_EXPR:
526 rcode1 = UNLE;
14bbffa1 527 tcode1 = UNORDERED_EXPR;
1cff8964
AE
528 tcode2 = LE_EXPR;
529 goto unordered_bcc;
530 case UNGT_EXPR:
531 rcode1 = UNGT;
14bbffa1 532 tcode1 = UNORDERED_EXPR;
1cff8964
AE
533 tcode2 = GT_EXPR;
534 goto unordered_bcc;
535 case UNGE_EXPR:
536 rcode1 = UNGE;
14bbffa1 537 tcode1 = UNORDERED_EXPR;
1cff8964
AE
538 tcode2 = GE_EXPR;
539 goto unordered_bcc;
540 case UNEQ_EXPR:
541 rcode1 = UNEQ;
14bbffa1 542 tcode1 = UNORDERED_EXPR;
1cff8964
AE
543 tcode2 = EQ_EXPR;
544 goto unordered_bcc;
d1a7edaf
PB
545 case LTGT_EXPR:
546 /* It is ok for LTGT_EXPR to trap when the result is unordered,
547 so expand to (a < b) || (a > b). */
548 rcode1 = LTGT;
549 tcode1 = LT_EXPR;
550 tcode2 = GT_EXPR;
551 goto unordered_bcc;
1cff8964
AE
552
553 unordered_bcc:
554 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
555 if (can_compare_p (rcode1, mode, ccp_jump))
556 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
557 if_true_label);
558 else
559 {
560 tree op0 = save_expr (TREE_OPERAND (exp, 0));
561 tree op1 = save_expr (TREE_OPERAND (exp, 1));
562 tree cmp0, cmp1;
563
564 /* If the target doesn't support combined unordered
d1a7edaf
PB
565 compares, decompose into two comparisons. */
566 cmp0 = fold (build (tcode1, TREE_TYPE (exp), op0, op1));
1cff8964
AE
567 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
568 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
569 do_jump (exp, if_false_label, if_true_label);
570 }
571 }
572 break;
573
574 /* Special case:
575 __builtin_expect (<test>, 0) and
576 __builtin_expect (<test>, 1)
577
578 We need to do this here, so that <test> is not converted to a SCC
579 operation on machines that use condition code registers and COMPARE
580 like the PowerPC, and then the jump is done based on whether the SCC
581 operation produced a 1 or 0. */
582 case CALL_EXPR:
583 /* Check for a built-in function. */
2f503025
JM
584 {
585 tree fndecl = get_callee_fndecl (exp);
586 tree arglist = TREE_OPERAND (exp, 1);
587
588 if (fndecl
589 && DECL_BUILT_IN (fndecl)
590 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
591 && arglist != NULL_TREE
592 && TREE_CHAIN (arglist) != NULL_TREE)
593 {
594 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
595 if_true_label);
596
597 if (seq != NULL_RTX)
598 {
599 emit_insn (seq);
600 return;
601 }
602 }
603 }
938d968e 604 /* Fall through and generate the normal code. */
1cff8964
AE
605
606 default:
607 normal:
608 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
609#if 0
610 /* This is not needed any more and causes poor code since it causes
611 comparisons and tests from non-SI objects to have different code
612 sequences. */
613 /* Copy to register to avoid generating bad insns by cse
614 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
3c0cb5de 615 if (!cse_not_expected && MEM_P (temp))
1cff8964
AE
616 temp = copy_to_reg (temp);
617#endif
618 do_pending_stack_adjust ();
1cff8964
AE
619
620 if (GET_CODE (temp) == CONST_INT
621 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
622 || GET_CODE (temp) == LABEL_REF)
623 {
624 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
625 if (target)
626 emit_jump (target);
627 }
628 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
629 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
630 /* Note swapping the labels gives us not-equal. */
631 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
632 else if (GET_MODE (temp) != VOIDmode)
9311f3f6
RS
633 {
634 /* The RTL optimizers prefer comparisons against pseudos. */
635 if (GET_CODE (temp) == SUBREG)
2a67ff5e
RS
636 {
637 /* Compare promoted variables in their promoted mode. */
638 if (SUBREG_PROMOTED_VAR_P (temp)
f8cfc6aa 639 && REG_P (XEXP (temp, 0)))
2a67ff5e
RS
640 temp = XEXP (temp, 0);
641 else
642 temp = copy_to_reg (temp);
643 }
9311f3f6 644 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
8df83eae 645 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
9311f3f6
RS
646 GET_MODE (temp), NULL_RTX,
647 if_false_label, if_true_label);
648 }
1cff8964
AE
649 else
650 abort ();
651 }
652
653 if (drop_through_label)
654 {
655 /* If do_jump produces code that might be jumped around,
656 do any stack adjusts from that code, before the place
657 where control merges in. */
658 do_pending_stack_adjust ();
659 emit_label (drop_through_label);
660 }
661}
662\f
663/* Given a comparison expression EXP for values too wide to be compared
664 with one insn, test the comparison and jump to the appropriate label.
665 The code of EXP is ignored; we always test GT if SWAP is 0,
666 and LT if SWAP is 1. */
667
668static void
7080f735
AJ
669do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
670 rtx if_true_label)
1cff8964
AE
671{
672 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
673 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
674 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8df83eae 675 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
1cff8964 676
8df83eae
RK
677 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
678 if_true_label);
1cff8964
AE
679}
680
681/* Compare OP0 with OP1, word at a time, in mode MODE.
682 UNSIGNEDP says to do unsigned comparison.
683 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
684
685void
7080f735
AJ
686do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
687 rtx op1, rtx if_false_label, rtx if_true_label)
1cff8964
AE
688{
689 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
690 rtx drop_through_label = 0;
691 int i;
692
693 if (! if_true_label || ! if_false_label)
694 drop_through_label = gen_label_rtx ();
695 if (! if_true_label)
696 if_true_label = drop_through_label;
697 if (! if_false_label)
698 if_false_label = drop_through_label;
699
700 /* Compare a word at a time, high order first. */
701 for (i = 0; i < nwords; i++)
702 {
703 rtx op0_word, op1_word;
704
705 if (WORDS_BIG_ENDIAN)
706 {
707 op0_word = operand_subword_force (op0, i, mode);
708 op1_word = operand_subword_force (op1, i, mode);
709 }
710 else
711 {
712 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
713 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
714 }
715
716 /* All but high-order word must be compared as unsigned. */
717 do_compare_rtx_and_jump (op0_word, op1_word, GT,
718 (unsignedp || i > 0), word_mode, NULL_RTX,
719 NULL_RTX, if_true_label);
720
721 /* Consider lower words only if these are equal. */
722 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
723 NULL_RTX, NULL_RTX, if_false_label);
724 }
725
726 if (if_false_label)
727 emit_jump (if_false_label);
728 if (drop_through_label)
729 emit_label (drop_through_label);
730}
731
732/* Given an EQ_EXPR expression EXP for values too wide to be compared
733 with one insn, test the comparison and jump to the appropriate label. */
734
735static void
7080f735 736do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
1cff8964
AE
737{
738 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
739 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
740 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
741 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
742 int i;
743 rtx drop_through_label = 0;
744
745 if (! if_false_label)
746 drop_through_label = if_false_label = gen_label_rtx ();
747
748 for (i = 0; i < nwords; i++)
749 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
750 operand_subword_force (op1, i, mode),
8df83eae 751 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
1cff8964
AE
752 word_mode, NULL_RTX, if_false_label, NULL_RTX);
753
754 if (if_true_label)
755 emit_jump (if_true_label);
756 if (drop_through_label)
757 emit_label (drop_through_label);
758}
759\f
760/* Jump according to whether OP0 is 0.
761 We assume that OP0 has an integer mode that is too wide
762 for the available compare insns. */
763
764void
7080f735 765do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
1cff8964
AE
766{
767 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
768 rtx part;
769 int i;
770 rtx drop_through_label = 0;
771
772 /* The fastest way of doing this comparison on almost any machine is to
773 "or" all the words and compare the result. If all have to be loaded
774 from memory and this is a very wide item, it's possible this may
775 be slower, but that's highly unlikely. */
776
777 part = gen_reg_rtx (word_mode);
778 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
779 for (i = 1; i < nwords && part != 0; i++)
780 part = expand_binop (word_mode, ior_optab, part,
781 operand_subword_force (op0, i, GET_MODE (op0)),
782 part, 1, OPTAB_WIDEN);
783
784 if (part != 0)
785 {
786 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
787 NULL_RTX, if_false_label, if_true_label);
788
789 return;
790 }
791
792 /* If we couldn't do the "or" simply, do this with a series of compares. */
793 if (! if_false_label)
794 drop_through_label = if_false_label = gen_label_rtx ();
795
796 for (i = 0; i < nwords; i++)
797 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
798 const0_rtx, EQ, 1, word_mode, NULL_RTX,
799 if_false_label, NULL_RTX);
800
801 if (if_true_label)
802 emit_jump (if_true_label);
803
804 if (drop_through_label)
805 emit_label (drop_through_label);
806}
807\f
808/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
809 (including code to compute the values to be compared)
810 and set (CC0) according to the result.
811 The decision as to signed or unsigned comparison must be made by the caller.
812
813 We force a stack adjustment unless there are currently
814 things pushed on the stack that aren't yet used.
815
816 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
817 compared. */
818
819rtx
7080f735
AJ
820compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
821 enum machine_mode mode, rtx size)
1cff8964 822{
1cff8964
AE
823 rtx tem;
824
825 /* If one operand is constant, make it the second one. Only do this
826 if the other operand is not constant as well. */
827
828 if (swap_commutative_operands_p (op0, op1))
829 {
830 tem = op0;
831 op0 = op1;
832 op1 = tem;
833 code = swap_condition (code);
834 }
835
836 if (flag_force_mem)
837 {
838 op0 = force_not_mem (op0);
839 op1 = force_not_mem (op1);
840 }
841
842 do_pending_stack_adjust ();
843
c6fb08ad
PB
844 code = unsignedp ? unsigned_condition (code) : code;
845 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
846 op0, op1)))
1cff8964 847 {
c6fb08ad
PB
848 if (CONSTANT_P (tem))
849 return tem;
850
851 code = GET_CODE (tem);
852 mode = GET_MODE (tem);
853 op0 = XEXP (tem, 0);
854 op1 = XEXP (tem, 1);
855 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1cff8964 856 }
1cff8964
AE
857
858 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
859
860#if HAVE_cc0
861 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
862#else
863 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
864#endif
865}
866
867/* Like do_compare_and_jump but expects the values to compare as two rtx's.
868 The decision as to signed or unsigned comparison must be made by the caller.
869
870 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
871 compared. */
872
873void
7080f735
AJ
874do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
875 enum machine_mode mode, rtx size, rtx if_false_label,
876 rtx if_true_label)
1cff8964 877{
1cff8964
AE
878 rtx tem;
879 int dummy_true_label = 0;
880
881 /* Reverse the comparison if that is safe and we want to jump if it is
882 false. */
883 if (! if_true_label && ! FLOAT_MODE_P (mode))
884 {
885 if_true_label = if_false_label;
886 if_false_label = 0;
887 code = reverse_condition (code);
888 }
889
890 /* If one operand is constant, make it the second one. Only do this
891 if the other operand is not constant as well. */
892
893 if (swap_commutative_operands_p (op0, op1))
894 {
895 tem = op0;
896 op0 = op1;
897 op1 = tem;
898 code = swap_condition (code);
899 }
900
901 if (flag_force_mem)
902 {
903 op0 = force_not_mem (op0);
904 op1 = force_not_mem (op1);
905 }
906
907 do_pending_stack_adjust ();
908
c6fb08ad
PB
909 code = unsignedp ? unsigned_condition (code) : code;
910 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
911 op0, op1)))
1cff8964 912 {
c6fb08ad
PB
913 if (CONSTANT_P (tem))
914 {
915 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
916 ? if_false_label : if_true_label;
917 if (label)
918 emit_jump (label);
919 return;
920 }
1cff8964 921
c6fb08ad
PB
922 code = GET_CODE (tem);
923 mode = GET_MODE (tem);
924 op0 = XEXP (tem, 0);
925 op1 = XEXP (tem, 1);
926 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1cff8964 927 }
1cff8964
AE
928
929 if (! if_true_label)
930 {
931 dummy_true_label = 1;
932 if_true_label = gen_label_rtx ();
933 }
934
935 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
936 if_true_label);
937
938 if (if_false_label)
939 emit_jump (if_false_label);
940 if (dummy_true_label)
941 emit_label (if_true_label);
942}
943
944/* Generate code for a comparison expression EXP (including code to compute
945 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
946 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
947 generated code will drop through.
948 SIGNED_CODE should be the rtx operation for this comparison for
949 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
950
951 We force a stack adjustment unless there are currently
952 things pushed on the stack that aren't yet used. */
953
954static void
7080f735
AJ
955do_compare_and_jump (tree exp, enum rtx_code signed_code,
956 enum rtx_code unsigned_code, rtx if_false_label,
957 rtx if_true_label)
1cff8964
AE
958{
959 rtx op0, op1;
960 tree type;
961 enum machine_mode mode;
962 int unsignedp;
963 enum rtx_code code;
964
965 /* Don't crash if the comparison was erroneous. */
966 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
967 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
968 return;
969
970 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
971 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
972 return;
973
974 type = TREE_TYPE (TREE_OPERAND (exp, 0));
975 mode = TYPE_MODE (type);
976 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
977 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
978 || (GET_MODE_BITSIZE (mode)
979 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
980 1)))))))
981 {
982 /* op0 might have been replaced by promoted constant, in which
983 case the type of second argument should be used. */
984 type = TREE_TYPE (TREE_OPERAND (exp, 1));
985 mode = TYPE_MODE (type);
986 }
8df83eae 987 unsignedp = TYPE_UNSIGNED (type);
1cff8964
AE
988 code = unsignedp ? unsigned_code : signed_code;
989
990#ifdef HAVE_canonicalize_funcptr_for_compare
991 /* If function pointers need to be "canonicalized" before they can
992 be reliably compared, then canonicalize them. */
993 if (HAVE_canonicalize_funcptr_for_compare
994 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
995 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
996 == FUNCTION_TYPE))
997 {
998 rtx new_op0 = gen_reg_rtx (mode);
999
1000 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1001 op0 = new_op0;
1002 }
1003
1004 if (HAVE_canonicalize_funcptr_for_compare
1005 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1006 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1007 == FUNCTION_TYPE))
1008 {
1009 rtx new_op1 = gen_reg_rtx (mode);
1010
1011 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1012 op1 = new_op1;
1013 }
1014#endif
1015
1cff8964
AE
1016 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1017 ((mode == BLKmode)
1018 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1019 if_false_label, if_true_label);
1020}
dbf833ee
RS
1021
1022#include "gt-dojump.h"