]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/dojump.c
2003-03-13 Ansgar Esztermann <ansgar@thphy.uni-duesseldorf.de>
[thirdparty/gcc.git] / gcc / dojump.c
CommitLineData
48fefb59 1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "function.h"
30#include "insn-config.h"
31#include "insn-attr.h"
32/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33#include "expr.h"
34#include "optabs.h"
35#include "langhooks.h"
36
37static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
38static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
39static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
40 rtx, rtx));
41
42/* At the start of a function, record that we have no previously-pushed
43 arguments waiting to be popped. */
44
45void
46init_pending_stack_adjust ()
47{
48 pending_stack_adjust = 0;
49}
50
51/* When exiting from function, if safe, clear out any pending stack adjust
52 so the adjustment won't get done.
53
54 Note, if the current function calls alloca, then it must have a
55 frame pointer regardless of the value of flag_omit_frame_pointer. */
56
57void
58clear_pending_stack_adjust ()
59{
60#ifdef EXIT_IGNORE_STACK
61 if (optimize > 0
62 && (! flag_omit_frame_pointer || current_function_calls_alloca)
63 && EXIT_IGNORE_STACK
64 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
65 && ! flag_inline_functions)
66 {
67 stack_pointer_delta -= pending_stack_adjust,
68 pending_stack_adjust = 0;
69 }
70#endif
71}
72
73/* Pop any previously-pushed arguments that have not been popped yet. */
74
75void
76do_pending_stack_adjust ()
77{
78 if (inhibit_defer_pop == 0)
79 {
80 if (pending_stack_adjust != 0)
81 adjust_stack (GEN_INT (pending_stack_adjust));
82 pending_stack_adjust = 0;
83 }
84}
85\f
86/* Expand conditional expressions. */
87
88/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
90 functions here. */
91
92void
93jumpifnot (exp, label)
94 tree exp;
95 rtx label;
96{
97 do_jump (exp, label, NULL_RTX);
98}
99
100/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
101
102void
103jumpif (exp, label)
104 tree exp;
105 rtx label;
106{
107 do_jump (exp, NULL_RTX, label);
108}
109
110/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
111 the result is zero, or IF_TRUE_LABEL if the result is one.
112 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
113 meaning fall through in that case.
114
115 do_jump always does any pending stack adjust except when it does not
116 actually perform a jump. An example where there is no jump
117 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
118
119 This function is responsible for optimizing cases such as
120 &&, || and comparison operators in EXP. */
121
122void
123do_jump (exp, if_false_label, if_true_label)
124 tree exp;
125 rtx if_false_label, if_true_label;
126{
127 enum tree_code code = TREE_CODE (exp);
128 /* Some cases need to create a label to jump to
129 in order to properly fall through.
130 These cases set DROP_THROUGH_LABEL nonzero. */
131 rtx drop_through_label = 0;
132 rtx temp;
133 int i;
134 tree type;
135 enum machine_mode mode;
136
137#ifdef MAX_INTEGER_COMPUTATION_MODE
138 check_max_integer_computation_mode (exp);
139#endif
140
141 emit_queue ();
142
143 switch (code)
144 {
145 case ERROR_MARK:
146 break;
147
148 case INTEGER_CST:
149 temp = integer_zerop (exp) ? if_false_label : if_true_label;
150 if (temp)
151 emit_jump (temp);
152 break;
153
154#if 0
155 /* This is not true with #pragma weak */
156 case ADDR_EXPR:
157 /* The address of something can never be zero. */
158 if (if_true_label)
159 emit_jump (if_true_label);
160 break;
161#endif
162
163 case NOP_EXPR:
164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
165 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
166 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
167 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
168 goto normal;
169 case CONVERT_EXPR:
170 /* If we are narrowing the operand, we have to do the compare in the
171 narrower mode. */
172 if ((TYPE_PRECISION (TREE_TYPE (exp))
173 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
174 goto normal;
175 case NON_LVALUE_EXPR:
176 case REFERENCE_EXPR:
177 case ABS_EXPR:
178 case NEGATE_EXPR:
179 case LROTATE_EXPR:
180 case RROTATE_EXPR:
181 /* These cannot change zero->nonzero or vice versa. */
182 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
183 break;
184
185 case WITH_RECORD_EXPR:
186 /* Put the object on the placeholder list, recurse through our first
187 operand, and pop the list. */
188 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
189 placeholder_list);
190 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
191 placeholder_list = TREE_CHAIN (placeholder_list);
192 break;
193
194#if 0
195 /* This is never less insns than evaluating the PLUS_EXPR followed by
196 a test and can be longer if the test is eliminated. */
197 case PLUS_EXPR:
198 /* Reduce to minus. */
199 exp = build (MINUS_EXPR, TREE_TYPE (exp),
200 TREE_OPERAND (exp, 0),
201 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
202 TREE_OPERAND (exp, 1))));
203 /* Process as MINUS. */
204#endif
205
206 case MINUS_EXPR:
207 /* Nonzero iff operands of minus differ. */
208 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
209 TREE_OPERAND (exp, 0),
210 TREE_OPERAND (exp, 1)),
211 NE, NE, if_false_label, if_true_label);
212 break;
213
214 case BIT_AND_EXPR:
215 /* If we are AND'ing with a small constant, do this comparison in the
216 smallest type that fits. If the machine doesn't have comparisons
217 that small, it will be converted back to the wider comparison.
218 This helps if we are testing the sign bit of a narrower object.
219 combine can't do this for us because it can't know whether a
220 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
221
222 if (! SLOW_BYTE_ACCESS
223 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
224 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
225 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
226 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
227 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
228 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
229 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
230 != CODE_FOR_nothing))
231 {
232 do_jump (convert (type, exp), if_false_label, if_true_label);
233 break;
234 }
235 goto normal;
236
237 case TRUTH_NOT_EXPR:
238 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
239 break;
240
241 case TRUTH_ANDIF_EXPR:
242 if (if_false_label == 0)
243 if_false_label = drop_through_label = gen_label_rtx ();
244 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
245 start_cleanup_deferral ();
246 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
247 end_cleanup_deferral ();
248 break;
249
250 case TRUTH_ORIF_EXPR:
251 if (if_true_label == 0)
252 if_true_label = drop_through_label = gen_label_rtx ();
253 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
254 start_cleanup_deferral ();
255 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
256 end_cleanup_deferral ();
257 break;
258
259 case COMPOUND_EXPR:
260 push_temp_slots ();
261 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
262 preserve_temp_slots (NULL_RTX);
263 free_temp_slots ();
264 pop_temp_slots ();
265 emit_queue ();
266 do_pending_stack_adjust ();
267 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
268 break;
269
270 case COMPONENT_REF:
271 case BIT_FIELD_REF:
272 case ARRAY_REF:
273 case ARRAY_RANGE_REF:
274 {
275 HOST_WIDE_INT bitsize, bitpos;
276 int unsignedp;
277 enum machine_mode mode;
278 tree type;
279 tree offset;
280 int volatilep = 0;
281
282 /* Get description of this reference. We don't actually care
283 about the underlying object here. */
284 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
285 &unsignedp, &volatilep);
286
287 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
288 if (! SLOW_BYTE_ACCESS
289 && type != 0 && bitsize >= 0
290 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
291 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
292 != CODE_FOR_nothing))
293 {
294 do_jump (convert (type, exp), if_false_label, if_true_label);
295 break;
296 }
297 goto normal;
298 }
299
300 case COND_EXPR:
301 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
302 if (integer_onep (TREE_OPERAND (exp, 1))
303 && integer_zerop (TREE_OPERAND (exp, 2)))
304 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
305
306 else if (integer_zerop (TREE_OPERAND (exp, 1))
307 && integer_onep (TREE_OPERAND (exp, 2)))
308 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
309
310 else
311 {
312 rtx label1 = gen_label_rtx ();
313 drop_through_label = gen_label_rtx ();
314
315 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
316
317 start_cleanup_deferral ();
318 /* Now the THEN-expression. */
319 do_jump (TREE_OPERAND (exp, 1),
320 if_false_label ? if_false_label : drop_through_label,
321 if_true_label ? if_true_label : drop_through_label);
322 /* In case the do_jump just above never jumps. */
323 do_pending_stack_adjust ();
324 emit_label (label1);
325
326 /* Now the ELSE-expression. */
327 do_jump (TREE_OPERAND (exp, 2),
328 if_false_label ? if_false_label : drop_through_label,
329 if_true_label ? if_true_label : drop_through_label);
330 end_cleanup_deferral ();
331 }
332 break;
333
334 case EQ_EXPR:
335 {
336 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
337
338 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
339 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
340 {
341 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
342 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
343 do_jump
344 (fold
345 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
346 fold (build (EQ_EXPR, TREE_TYPE (exp),
347 fold (build1 (REALPART_EXPR,
348 TREE_TYPE (inner_type),
349 exp0)),
350 fold (build1 (REALPART_EXPR,
351 TREE_TYPE (inner_type),
352 exp1)))),
353 fold (build (EQ_EXPR, TREE_TYPE (exp),
354 fold (build1 (IMAGPART_EXPR,
355 TREE_TYPE (inner_type),
356 exp0)),
357 fold (build1 (IMAGPART_EXPR,
358 TREE_TYPE (inner_type),
359 exp1)))))),
360 if_false_label, if_true_label);
361 }
362
363 else if (integer_zerop (TREE_OPERAND (exp, 1)))
364 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
365
366 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
367 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
368 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
369 else
370 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
371 break;
372 }
373
374 case NE_EXPR:
375 {
376 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
377
378 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
379 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
380 {
381 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
382 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
383 do_jump
384 (fold
385 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
386 fold (build (NE_EXPR, TREE_TYPE (exp),
387 fold (build1 (REALPART_EXPR,
388 TREE_TYPE (inner_type),
389 exp0)),
390 fold (build1 (REALPART_EXPR,
391 TREE_TYPE (inner_type),
392 exp1)))),
393 fold (build (NE_EXPR, TREE_TYPE (exp),
394 fold (build1 (IMAGPART_EXPR,
395 TREE_TYPE (inner_type),
396 exp0)),
397 fold (build1 (IMAGPART_EXPR,
398 TREE_TYPE (inner_type),
399 exp1)))))),
400 if_false_label, if_true_label);
401 }
402
403 else if (integer_zerop (TREE_OPERAND (exp, 1)))
404 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
405
406 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
407 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
408 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
409 else
410 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
411 break;
412 }
413
414 case LT_EXPR:
415 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
416 if (GET_MODE_CLASS (mode) == MODE_INT
417 && ! can_compare_p (LT, mode, ccp_jump))
418 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
419 else
420 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
421 break;
422
423 case LE_EXPR:
424 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
425 if (GET_MODE_CLASS (mode) == MODE_INT
426 && ! can_compare_p (LE, mode, ccp_jump))
427 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
428 else
429 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
430 break;
431
432 case GT_EXPR:
433 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
434 if (GET_MODE_CLASS (mode) == MODE_INT
435 && ! can_compare_p (GT, mode, ccp_jump))
436 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
437 else
438 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
439 break;
440
441 case GE_EXPR:
442 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
443 if (GET_MODE_CLASS (mode) == MODE_INT
444 && ! can_compare_p (GE, mode, ccp_jump))
445 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
446 else
447 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
448 break;
449
450 case UNORDERED_EXPR:
451 case ORDERED_EXPR:
452 {
453 enum rtx_code cmp, rcmp;
454 int do_rev;
455
456 if (code == UNORDERED_EXPR)
457 cmp = UNORDERED, rcmp = ORDERED;
458 else
459 cmp = ORDERED, rcmp = UNORDERED;
460 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
461
462 do_rev = 0;
463 if (! can_compare_p (cmp, mode, ccp_jump)
464 && (can_compare_p (rcmp, mode, ccp_jump)
465 /* If the target doesn't provide either UNORDERED or ORDERED
466 comparisons, canonicalize on UNORDERED for the library. */
467 || rcmp == UNORDERED))
468 do_rev = 1;
469
470 if (! do_rev)
471 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
472 else
473 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
474 }
475 break;
476
477 {
478 enum rtx_code rcode1;
479 enum tree_code tcode2;
480
481 case UNLT_EXPR:
482 rcode1 = UNLT;
483 tcode2 = LT_EXPR;
484 goto unordered_bcc;
485 case UNLE_EXPR:
486 rcode1 = UNLE;
487 tcode2 = LE_EXPR;
488 goto unordered_bcc;
489 case UNGT_EXPR:
490 rcode1 = UNGT;
491 tcode2 = GT_EXPR;
492 goto unordered_bcc;
493 case UNGE_EXPR:
494 rcode1 = UNGE;
495 tcode2 = GE_EXPR;
496 goto unordered_bcc;
497 case UNEQ_EXPR:
498 rcode1 = UNEQ;
499 tcode2 = EQ_EXPR;
500 goto unordered_bcc;
501
502 unordered_bcc:
503 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
504 if (can_compare_p (rcode1, mode, ccp_jump))
505 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
506 if_true_label);
507 else
508 {
509 tree op0 = save_expr (TREE_OPERAND (exp, 0));
510 tree op1 = save_expr (TREE_OPERAND (exp, 1));
511 tree cmp0, cmp1;
512
513 /* If the target doesn't support combined unordered
514 compares, decompose into UNORDERED + comparison. */
515 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
516 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
517 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
518 do_jump (exp, if_false_label, if_true_label);
519 }
520 }
521 break;
522
523 /* Special case:
524 __builtin_expect (<test>, 0) and
525 __builtin_expect (<test>, 1)
526
527 We need to do this here, so that <test> is not converted to a SCC
528 operation on machines that use condition code registers and COMPARE
529 like the PowerPC, and then the jump is done based on whether the SCC
530 operation produced a 1 or 0. */
531 case CALL_EXPR:
532 /* Check for a built-in function. */
533 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
534 {
535 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
536 tree arglist = TREE_OPERAND (exp, 1);
537
538 if (TREE_CODE (fndecl) == FUNCTION_DECL
539 && DECL_BUILT_IN (fndecl)
540 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
541 && arglist != NULL_TREE
542 && TREE_CHAIN (arglist) != NULL_TREE)
543 {
544 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
545 if_true_label);
546
547 if (seq != NULL_RTX)
548 {
549 emit_insn (seq);
550 return;
551 }
552 }
553 }
554 /* fall through and generate the normal code. */
555
556 default:
557 normal:
558 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
559#if 0
560 /* This is not needed any more and causes poor code since it causes
561 comparisons and tests from non-SI objects to have different code
562 sequences. */
563 /* Copy to register to avoid generating bad insns by cse
564 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
565 if (!cse_not_expected && GET_CODE (temp) == MEM)
566 temp = copy_to_reg (temp);
567#endif
568 do_pending_stack_adjust ();
569 /* Do any postincrements in the expression that was tested. */
570 emit_queue ();
571
572 if (GET_CODE (temp) == CONST_INT
573 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
574 || GET_CODE (temp) == LABEL_REF)
575 {
576 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
577 if (target)
578 emit_jump (target);
579 }
580 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
581 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
582 /* Note swapping the labels gives us not-equal. */
583 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
584 else if (GET_MODE (temp) != VOIDmode)
585 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
586 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
587 GET_MODE (temp), NULL_RTX,
588 if_false_label, if_true_label);
589 else
590 abort ();
591 }
592
593 if (drop_through_label)
594 {
595 /* If do_jump produces code that might be jumped around,
596 do any stack adjusts from that code, before the place
597 where control merges in. */
598 do_pending_stack_adjust ();
599 emit_label (drop_through_label);
600 }
601}
602\f
603/* Given a comparison expression EXP for values too wide to be compared
604 with one insn, test the comparison and jump to the appropriate label.
605 The code of EXP is ignored; we always test GT if SWAP is 0,
606 and LT if SWAP is 1. */
607
608static void
609do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
610 tree exp;
611 int swap;
612 rtx if_false_label, if_true_label;
613{
614 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
615 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
616 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
617 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
618
619 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
620}
621
622/* Compare OP0 with OP1, word at a time, in mode MODE.
623 UNSIGNEDP says to do unsigned comparison.
624 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
625
626void
627do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
628 enum machine_mode mode;
629 int unsignedp;
630 rtx op0, op1;
631 rtx if_false_label, if_true_label;
632{
633 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
634 rtx drop_through_label = 0;
635 int i;
636
637 if (! if_true_label || ! if_false_label)
638 drop_through_label = gen_label_rtx ();
639 if (! if_true_label)
640 if_true_label = drop_through_label;
641 if (! if_false_label)
642 if_false_label = drop_through_label;
643
644 /* Compare a word at a time, high order first. */
645 for (i = 0; i < nwords; i++)
646 {
647 rtx op0_word, op1_word;
648
649 if (WORDS_BIG_ENDIAN)
650 {
651 op0_word = operand_subword_force (op0, i, mode);
652 op1_word = operand_subword_force (op1, i, mode);
653 }
654 else
655 {
656 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
657 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
658 }
659
660 /* All but high-order word must be compared as unsigned. */
661 do_compare_rtx_and_jump (op0_word, op1_word, GT,
662 (unsignedp || i > 0), word_mode, NULL_RTX,
663 NULL_RTX, if_true_label);
664
665 /* Consider lower words only if these are equal. */
666 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
667 NULL_RTX, NULL_RTX, if_false_label);
668 }
669
670 if (if_false_label)
671 emit_jump (if_false_label);
672 if (drop_through_label)
673 emit_label (drop_through_label);
674}
675
676/* Given an EQ_EXPR expression EXP for values too wide to be compared
677 with one insn, test the comparison and jump to the appropriate label. */
678
679static void
680do_jump_by_parts_equality (exp, if_false_label, if_true_label)
681 tree exp;
682 rtx if_false_label, if_true_label;
683{
684 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
685 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
686 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
687 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
688 int i;
689 rtx drop_through_label = 0;
690
691 if (! if_false_label)
692 drop_through_label = if_false_label = gen_label_rtx ();
693
694 for (i = 0; i < nwords; i++)
695 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
696 operand_subword_force (op1, i, mode),
697 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
698 word_mode, NULL_RTX, if_false_label, NULL_RTX);
699
700 if (if_true_label)
701 emit_jump (if_true_label);
702 if (drop_through_label)
703 emit_label (drop_through_label);
704}
705\f
706/* Jump according to whether OP0 is 0.
707 We assume that OP0 has an integer mode that is too wide
708 for the available compare insns. */
709
710void
711do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
712 rtx op0;
713 rtx if_false_label, if_true_label;
714{
715 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
716 rtx part;
717 int i;
718 rtx drop_through_label = 0;
719
720 /* The fastest way of doing this comparison on almost any machine is to
721 "or" all the words and compare the result. If all have to be loaded
722 from memory and this is a very wide item, it's possible this may
723 be slower, but that's highly unlikely. */
724
725 part = gen_reg_rtx (word_mode);
726 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
727 for (i = 1; i < nwords && part != 0; i++)
728 part = expand_binop (word_mode, ior_optab, part,
729 operand_subword_force (op0, i, GET_MODE (op0)),
730 part, 1, OPTAB_WIDEN);
731
732 if (part != 0)
733 {
734 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
735 NULL_RTX, if_false_label, if_true_label);
736
737 return;
738 }
739
740 /* If we couldn't do the "or" simply, do this with a series of compares. */
741 if (! if_false_label)
742 drop_through_label = if_false_label = gen_label_rtx ();
743
744 for (i = 0; i < nwords; i++)
745 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
746 const0_rtx, EQ, 1, word_mode, NULL_RTX,
747 if_false_label, NULL_RTX);
748
749 if (if_true_label)
750 emit_jump (if_true_label);
751
752 if (drop_through_label)
753 emit_label (drop_through_label);
754}
755\f
756/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
757 (including code to compute the values to be compared)
758 and set (CC0) according to the result.
759 The decision as to signed or unsigned comparison must be made by the caller.
760
761 We force a stack adjustment unless there are currently
762 things pushed on the stack that aren't yet used.
763
764 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
765 compared. */
766
767rtx
768compare_from_rtx (op0, op1, code, unsignedp, mode, size)
769 rtx op0, op1;
770 enum rtx_code code;
771 int unsignedp;
772 enum machine_mode mode;
773 rtx size;
774{
775 enum rtx_code ucode;
776 rtx tem;
777
778 /* If one operand is constant, make it the second one. Only do this
779 if the other operand is not constant as well. */
780
781 if (swap_commutative_operands_p (op0, op1))
782 {
783 tem = op0;
784 op0 = op1;
785 op1 = tem;
786 code = swap_condition (code);
787 }
788
789 if (flag_force_mem)
790 {
791 op0 = force_not_mem (op0);
792 op1 = force_not_mem (op1);
793 }
794
795 do_pending_stack_adjust ();
796
797 ucode = unsignedp ? unsigned_condition (code) : code;
798 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
799 return tem;
800
801#if 0
802 /* There's no need to do this now that combine.c can eliminate lots of
803 sign extensions. This can be less efficient in certain cases on other
804 machines. */
805
806 /* If this is a signed equality comparison, we can do it as an
807 unsigned comparison since zero-extension is cheaper than sign
808 extension and comparisons with zero are done as unsigned. This is
809 the case even on machines that can do fast sign extension, since
810 zero-extension is easier to combine with other operations than
811 sign-extension is. If we are comparing against a constant, we must
812 convert it to what it would look like unsigned. */
813 if ((code == EQ || code == NE) && ! unsignedp
814 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
815 {
816 if (GET_CODE (op1) == CONST_INT
817 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
818 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
819 unsignedp = 1;
820 }
821#endif
822
823 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
824
825#if HAVE_cc0
826 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
827#else
828 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
829#endif
830}
831
832/* Like do_compare_and_jump but expects the values to compare as two rtx's.
833 The decision as to signed or unsigned comparison must be made by the caller.
834
835 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
836 compared. */
837
838void
839do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
840 if_false_label, if_true_label)
841 rtx op0, op1;
842 enum rtx_code code;
843 int unsignedp;
844 enum machine_mode mode;
845 rtx size;
846 rtx if_false_label, if_true_label;
847{
848 enum rtx_code ucode;
849 rtx tem;
850 int dummy_true_label = 0;
851
852 /* Reverse the comparison if that is safe and we want to jump if it is
853 false. */
854 if (! if_true_label && ! FLOAT_MODE_P (mode))
855 {
856 if_true_label = if_false_label;
857 if_false_label = 0;
858 code = reverse_condition (code);
859 }
860
861 /* If one operand is constant, make it the second one. Only do this
862 if the other operand is not constant as well. */
863
864 if (swap_commutative_operands_p (op0, op1))
865 {
866 tem = op0;
867 op0 = op1;
868 op1 = tem;
869 code = swap_condition (code);
870 }
871
872 if (flag_force_mem)
873 {
874 op0 = force_not_mem (op0);
875 op1 = force_not_mem (op1);
876 }
877
878 do_pending_stack_adjust ();
879
880 ucode = unsignedp ? unsigned_condition (code) : code;
881 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
882 {
883 if (tem == const_true_rtx)
884 {
885 if (if_true_label)
886 emit_jump (if_true_label);
887 }
888 else
889 {
890 if (if_false_label)
891 emit_jump (if_false_label);
892 }
893 return;
894 }
895
896#if 0
897 /* There's no need to do this now that combine.c can eliminate lots of
898 sign extensions. This can be less efficient in certain cases on other
899 machines. */
900
901 /* If this is a signed equality comparison, we can do it as an
902 unsigned comparison since zero-extension is cheaper than sign
903 extension and comparisons with zero are done as unsigned. This is
904 the case even on machines that can do fast sign extension, since
905 zero-extension is easier to combine with other operations than
906 sign-extension is. If we are comparing against a constant, we must
907 convert it to what it would look like unsigned. */
908 if ((code == EQ || code == NE) && ! unsignedp
909 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
910 {
911 if (GET_CODE (op1) == CONST_INT
912 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
913 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
914 unsignedp = 1;
915 }
916#endif
917
918 if (! if_true_label)
919 {
920 dummy_true_label = 1;
921 if_true_label = gen_label_rtx ();
922 }
923
924 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
925 if_true_label);
926
927 if (if_false_label)
928 emit_jump (if_false_label);
929 if (dummy_true_label)
930 emit_label (if_true_label);
931}
932
933/* Generate code for a comparison expression EXP (including code to compute
934 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
935 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
936 generated code will drop through.
937 SIGNED_CODE should be the rtx operation for this comparison for
938 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
939
940 We force a stack adjustment unless there are currently
941 things pushed on the stack that aren't yet used. */
942
943static void
944do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
945 if_true_label)
946 tree exp;
947 enum rtx_code signed_code, unsigned_code;
948 rtx if_false_label, if_true_label;
949{
950 rtx op0, op1;
951 tree type;
952 enum machine_mode mode;
953 int unsignedp;
954 enum rtx_code code;
955
956 /* Don't crash if the comparison was erroneous. */
957 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
958 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
959 return;
960
961 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
962 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
963 return;
964
965 type = TREE_TYPE (TREE_OPERAND (exp, 0));
966 mode = TYPE_MODE (type);
967 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
968 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
969 || (GET_MODE_BITSIZE (mode)
970 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
971 1)))))))
972 {
973 /* op0 might have been replaced by promoted constant, in which
974 case the type of second argument should be used. */
975 type = TREE_TYPE (TREE_OPERAND (exp, 1));
976 mode = TYPE_MODE (type);
977 }
978 unsignedp = TREE_UNSIGNED (type);
979 code = unsignedp ? unsigned_code : signed_code;
980
981#ifdef HAVE_canonicalize_funcptr_for_compare
982 /* If function pointers need to be "canonicalized" before they can
983 be reliably compared, then canonicalize them. */
984 if (HAVE_canonicalize_funcptr_for_compare
985 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
986 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
987 == FUNCTION_TYPE))
988 {
989 rtx new_op0 = gen_reg_rtx (mode);
990
991 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
992 op0 = new_op0;
993 }
994
995 if (HAVE_canonicalize_funcptr_for_compare
996 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
997 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
998 == FUNCTION_TYPE))
999 {
1000 rtx new_op1 = gen_reg_rtx (mode);
1001
1002 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1003 op1 = new_op1;
1004 }
1005#endif
1006
1007 /* Do any postincrements in the expression that was tested. */
1008 emit_queue ();
1009
1010 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1011 ((mode == BLKmode)
1012 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1013 if_false_label, if_true_label);
1014}