]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
rtlanal.c (note_uses): Deal with SEQUENCEs.
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
af082de3 1/* Analyze RTL for GNU compiler.
af841dbd 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
af082de3
BE
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
4 Foundation, Inc.
2c88418c 5
1322177d 6This file is part of GCC.
2c88418c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
2c88418c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
2c88418c
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
2c88418c
RS
22
23
24#include "config.h"
670ee920 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
e35b9579 28#include "toplev.h"
2c88418c 29#include "rtl.h"
3335f1d9 30#include "hard-reg-set.h"
bc204393
RH
31#include "insn-config.h"
32#include "recog.h"
f894b69b
PB
33#include "target.h"
34#include "output.h"
91ea4f8d 35#include "tm_p.h"
f5eb5fd0 36#include "flags.h"
52bfebf0 37#include "real.h"
66fd46b6 38#include "regs.h"
2f93eea8 39#include "function.h"
2c88418c 40
e2373f95 41/* Forward declarations */
0c20a65f 42static void set_of_1 (rtx, rtx, void *);
194acded
HPN
43static bool covers_regno_p (rtx, unsigned int);
44static bool covers_regno_no_parallel_p (rtx, unsigned int);
0c20a65f
AJ
45static int rtx_referenced_p_1 (rtx *, void *);
46static int computed_jump_p_1 (rtx);
47static void parms_set (rtx, rtx, void *);
2a1777af 48
2f93eea8
PB
49static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
60
cf94b0fc
PB
61/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63static int non_rtx_starting_operands[NUM_RTX_CODE];
64
2c88418c
RS
65/* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
68
69int target_flags;
b12cbf2c
AN
70
71/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
72 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
73 SIGN_EXTEND then while narrowing we also have to enforce the
74 representation and sign-extend the value to mode DESTINATION_REP.
75
76 If the value is already sign-extended to DESTINATION_REP mode we
77 can just switch to DESTINATION mode on it. For each pair of
78 integral modes SOURCE and DESTINATION, when truncating from SOURCE
79 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
80 contains the number of high-order bits in SOURCE that have to be
81 copies of the sign-bit so that we can do this mode-switch to
82 DESTINATION. */
83
84static unsigned int
85num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
2c88418c
RS
86\f
87/* Return 1 if the value of X is unstable
88 (would be different at a different point in the program).
89 The frame pointer, arg pointer, etc. are considered stable
90 (within one function) and so is anything marked `unchanging'. */
91
92int
0c20a65f 93rtx_unstable_p (rtx x)
2c88418c 94{
b3694847
SS
95 RTX_CODE code = GET_CODE (x);
96 int i;
97 const char *fmt;
2c88418c 98
ae0fb1b9
JW
99 switch (code)
100 {
101 case MEM:
389fdba0 102 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 103
ae0fb1b9
JW
104 case CONST:
105 case CONST_INT:
106 case CONST_DOUBLE:
69ef87e2 107 case CONST_VECTOR:
ae0fb1b9
JW
108 case SYMBOL_REF:
109 case LABEL_REF:
110 return 0;
2c88418c 111
ae0fb1b9
JW
112 case REG:
113 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 114 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9 115 /* The arg pointer varies if it is not a fixed register. */
389fdba0 116 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b
RH
117 return 0;
118#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
119 /* ??? When call-clobbered, the value is stable modulo the restore
120 that must happen after a call. This currently screws up local-alloc
121 into believing that the restore is not needed. */
122 if (x == pic_offset_table_rtx)
123 return 0;
124#endif
125 return 1;
ae0fb1b9
JW
126
127 case ASM_OPERANDS:
128 if (MEM_VOLATILE_P (x))
129 return 1;
130
5d3cc252 131 /* Fall through. */
ae0fb1b9
JW
132
133 default:
134 break;
135 }
2c88418c
RS
136
137 fmt = GET_RTX_FORMAT (code);
138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
139 if (fmt[i] == 'e')
9c82ac6b
JW
140 {
141 if (rtx_unstable_p (XEXP (x, i)))
142 return 1;
143 }
144 else if (fmt[i] == 'E')
145 {
146 int j;
147 for (j = 0; j < XVECLEN (x, i); j++)
148 if (rtx_unstable_p (XVECEXP (x, i, j)))
149 return 1;
150 }
151
2c88418c
RS
152 return 0;
153}
154
155/* Return 1 if X has a value that can vary even between two
156 executions of the program. 0 means X can be compared reliably
157 against certain constants or near-constants.
e38fe8e0
BS
158 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
159 zero, we are slightly more conservative.
2c88418c
RS
160 The frame pointer and the arg pointer are considered constant. */
161
162int
0c20a65f 163rtx_varies_p (rtx x, int for_alias)
2c88418c 164{
e978d62e 165 RTX_CODE code;
b3694847
SS
166 int i;
167 const char *fmt;
2c88418c 168
e978d62e
PB
169 if (!x)
170 return 0;
171
172 code = GET_CODE (x);
2c88418c
RS
173 switch (code)
174 {
175 case MEM:
389fdba0 176 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 177
2c88418c
RS
178 case CONST:
179 case CONST_INT:
180 case CONST_DOUBLE:
69ef87e2 181 case CONST_VECTOR:
2c88418c
RS
182 case SYMBOL_REF:
183 case LABEL_REF:
184 return 0;
185
186 case REG:
187 /* Note that we have to test for the actual rtx used for the frame
188 and arg pointers and not just the register number in case we have
189 eliminated the frame and/or arg pointer and are using it
190 for pseudos. */
c0fc376b 191 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
192 /* The arg pointer varies if it is not a fixed register. */
193 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 194 return 0;
e38fe8e0
BS
195 if (x == pic_offset_table_rtx
196#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
197 /* ??? When call-clobbered, the value is stable modulo the restore
198 that must happen after a call. This currently screws up
199 local-alloc into believing that the restore is not needed, so we
200 must return 0 only if we are called from alias analysis. */
201 && for_alias
c0fc376b 202#endif
e38fe8e0
BS
203 )
204 return 0;
c0fc376b 205 return 1;
2c88418c
RS
206
207 case LO_SUM:
208 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
209 (in fact it is related specifically to operand 1)
210 during alias analysis. */
211 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
212 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 213
ae0fb1b9
JW
214 case ASM_OPERANDS:
215 if (MEM_VOLATILE_P (x))
216 return 1;
217
5d3cc252 218 /* Fall through. */
ae0fb1b9 219
e9a25f70
JL
220 default:
221 break;
2c88418c
RS
222 }
223
224 fmt = GET_RTX_FORMAT (code);
225 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
226 if (fmt[i] == 'e')
9c82ac6b 227 {
e38fe8e0 228 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
229 return 1;
230 }
231 else if (fmt[i] == 'E')
232 {
233 int j;
234 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 235 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
236 return 1;
237 }
238
2c88418c
RS
239 return 0;
240}
241
2358ff91
EB
242/* Return nonzero if the use of X as an address in a MEM can cause a trap.
243 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
244 whether nonzero is returned for unaligned memory accesses on strict
245 alignment machines. */
2c88418c 246
2358ff91
EB
247static int
248rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
2c88418c 249{
b3694847 250 enum rtx_code code = GET_CODE (x);
2c88418c
RS
251
252 switch (code)
253 {
254 case SYMBOL_REF:
ff0b6b99
FS
255 return SYMBOL_REF_WEAK (x);
256
2c88418c 257 case LABEL_REF:
2c88418c
RS
258 return 0;
259
260 case REG:
261 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
4f73495e
RH
262 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
263 || x == stack_pointer_rtx
264 /* The arg pointer varies if it is not a fixed register. */
265 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
266 return 0;
267 /* All of the virtual frame registers are stack references. */
268 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
269 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
270 return 0;
271 return 1;
2c88418c
RS
272
273 case CONST:
2358ff91 274 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
2c88418c
RS
275
276 case PLUS:
2358ff91
EB
277 /* An address is assumed not to trap if:
278 - it is an address that can't trap plus a constant integer,
279 with the proper remainder modulo the mode size if we are
280 considering unaligned memory references. */
281 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
282 && GET_CODE (XEXP (x, 1)) == CONST_INT)
283 {
284 HOST_WIDE_INT offset;
285
bb11103a
EB
286 if (!STRICT_ALIGNMENT
287 || !unaligned_mems
288 || GET_MODE_SIZE (mode) == 0)
2358ff91
EB
289 return 0;
290
291 offset = INTVAL (XEXP (x, 1));
292
293#ifdef SPARC_STACK_BOUNDARY_HACK
294 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
295 the real alignment of %sp. However, when it does this, the
296 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
297 if (SPARC_STACK_BOUNDARY_HACK
298 && (XEXP (x, 0) == stack_pointer_rtx
299 || XEXP (x, 0) == hard_frame_pointer_rtx))
300 offset -= STACK_POINTER_OFFSET;
301#endif
302
303 return offset % GET_MODE_SIZE (mode) != 0;
304 }
305
306 /* - or it is the pic register plus a constant. */
307 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
308 return 0;
309
310 return 1;
2c88418c
RS
311
312 case LO_SUM:
4f73495e 313 case PRE_MODIFY:
2358ff91 314 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
4f73495e
RH
315
316 case PRE_DEC:
317 case PRE_INC:
318 case POST_DEC:
319 case POST_INC:
320 case POST_MODIFY:
2358ff91 321 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
4f73495e 322
e9a25f70
JL
323 default:
324 break;
2c88418c
RS
325 }
326
327 /* If it isn't one of the case above, it can cause a trap. */
328 return 1;
329}
330
2358ff91
EB
331/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
332
333int
334rtx_addr_can_trap_p (rtx x)
335{
336 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
337}
338
4977bab6
ZW
339/* Return true if X is an address that is known to not be zero. */
340
341bool
0c20a65f 342nonzero_address_p (rtx x)
4977bab6
ZW
343{
344 enum rtx_code code = GET_CODE (x);
345
346 switch (code)
347 {
348 case SYMBOL_REF:
349 return !SYMBOL_REF_WEAK (x);
350
351 case LABEL_REF:
352 return true;
353
4977bab6
ZW
354 case REG:
355 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
356 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
357 || x == stack_pointer_rtx
358 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
359 return true;
360 /* All of the virtual frame registers are stack references. */
361 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
362 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
363 return true;
364 return false;
365
366 case CONST:
367 return nonzero_address_p (XEXP (x, 0));
368
369 case PLUS:
370 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
942d7821 371 return nonzero_address_p (XEXP (x, 0));
4977bab6
ZW
372 /* Handle PIC references. */
373 else if (XEXP (x, 0) == pic_offset_table_rtx
374 && CONSTANT_P (XEXP (x, 1)))
375 return true;
376 return false;
377
378 case PRE_MODIFY:
379 /* Similar to the above; allow positive offsets. Further, since
380 auto-inc is only allowed in memories, the register must be a
381 pointer. */
382 if (GET_CODE (XEXP (x, 1)) == CONST_INT
383 && INTVAL (XEXP (x, 1)) > 0)
384 return true;
385 return nonzero_address_p (XEXP (x, 0));
386
387 case PRE_INC:
388 /* Similarly. Further, the offset is always positive. */
389 return true;
390
391 case PRE_DEC:
392 case POST_DEC:
393 case POST_INC:
394 case POST_MODIFY:
395 return nonzero_address_p (XEXP (x, 0));
396
397 case LO_SUM:
398 return nonzero_address_p (XEXP (x, 1));
399
400 default:
401 break;
402 }
403
404 /* If it isn't one of the case above, might be zero. */
405 return false;
406}
407
a6a2274a 408/* Return 1 if X refers to a memory location whose address
2c88418c 409 cannot be compared reliably with constant addresses,
a6a2274a 410 or if X refers to a BLKmode memory object.
e38fe8e0
BS
411 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
412 zero, we are slightly more conservative. */
2c88418c
RS
413
414int
0c20a65f 415rtx_addr_varies_p (rtx x, int for_alias)
2c88418c 416{
b3694847
SS
417 enum rtx_code code;
418 int i;
419 const char *fmt;
2c88418c
RS
420
421 if (x == 0)
422 return 0;
423
424 code = GET_CODE (x);
425 if (code == MEM)
e38fe8e0 426 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
427
428 fmt = GET_RTX_FORMAT (code);
429 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
430 if (fmt[i] == 'e')
833c0b26 431 {
e38fe8e0 432 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
433 return 1;
434 }
435 else if (fmt[i] == 'E')
436 {
437 int j;
438 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 439 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
440 return 1;
441 }
2c88418c
RS
442 return 0;
443}
444\f
445/* Return the value of the integer term in X, if one is apparent;
446 otherwise return 0.
447 Only obvious integer terms are detected.
3ef42a0c 448 This is used in cse.c with the `related_value' field. */
2c88418c 449
c166a311 450HOST_WIDE_INT
0c20a65f 451get_integer_term (rtx x)
2c88418c
RS
452{
453 if (GET_CODE (x) == CONST)
454 x = XEXP (x, 0);
455
456 if (GET_CODE (x) == MINUS
457 && GET_CODE (XEXP (x, 1)) == CONST_INT)
458 return - INTVAL (XEXP (x, 1));
459 if (GET_CODE (x) == PLUS
460 && GET_CODE (XEXP (x, 1)) == CONST_INT)
461 return INTVAL (XEXP (x, 1));
462 return 0;
463}
464
465/* If X is a constant, return the value sans apparent integer term;
466 otherwise return 0.
467 Only obvious integer terms are detected. */
468
469rtx
0c20a65f 470get_related_value (rtx x)
2c88418c
RS
471{
472 if (GET_CODE (x) != CONST)
473 return 0;
474 x = XEXP (x, 0);
475 if (GET_CODE (x) == PLUS
476 && GET_CODE (XEXP (x, 1)) == CONST_INT)
477 return XEXP (x, 0);
478 else if (GET_CODE (x) == MINUS
479 && GET_CODE (XEXP (x, 1)) == CONST_INT)
480 return XEXP (x, 0);
481 return 0;
482}
483\f
4b983fdc
RH
484/* Return the number of places FIND appears within X. If COUNT_DEST is
485 zero, we do not count occurrences inside the destination of a SET. */
486
487int
0c20a65f 488count_occurrences (rtx x, rtx find, int count_dest)
4b983fdc
RH
489{
490 int i, j;
491 enum rtx_code code;
492 const char *format_ptr;
493 int count;
494
495 if (x == find)
496 return 1;
497
498 code = GET_CODE (x);
499
500 switch (code)
501 {
502 case REG:
503 case CONST_INT:
504 case CONST_DOUBLE:
69ef87e2 505 case CONST_VECTOR:
4b983fdc
RH
506 case SYMBOL_REF:
507 case CODE_LABEL:
508 case PC:
509 case CC0:
510 return 0;
511
512 case MEM:
3c0cb5de 513 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
514 return 1;
515 break;
516
517 case SET:
518 if (SET_DEST (x) == find && ! count_dest)
519 return count_occurrences (SET_SRC (x), find, count_dest);
520 break;
521
522 default:
523 break;
524 }
525
526 format_ptr = GET_RTX_FORMAT (code);
527 count = 0;
528
529 for (i = 0; i < GET_RTX_LENGTH (code); i++)
530 {
531 switch (*format_ptr++)
532 {
533 case 'e':
534 count += count_occurrences (XEXP (x, i), find, count_dest);
535 break;
536
537 case 'E':
538 for (j = 0; j < XVECLEN (x, i); j++)
539 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
540 break;
541 }
542 }
543 return count;
544}
545\f
2c88418c
RS
546/* Nonzero if register REG appears somewhere within IN.
547 Also works if REG is not a register; in this case it checks
548 for a subexpression of IN that is Lisp "equal" to REG. */
549
550int
0c20a65f 551reg_mentioned_p (rtx reg, rtx in)
2c88418c 552{
b3694847
SS
553 const char *fmt;
554 int i;
555 enum rtx_code code;
2c88418c
RS
556
557 if (in == 0)
558 return 0;
559
560 if (reg == in)
561 return 1;
562
563 if (GET_CODE (in) == LABEL_REF)
564 return reg == XEXP (in, 0);
565
566 code = GET_CODE (in);
567
568 switch (code)
569 {
570 /* Compare registers by number. */
571 case REG:
f8cfc6aa 572 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
573
574 /* These codes have no constituent expressions
575 and are unique. */
576 case SCRATCH:
577 case CC0:
578 case PC:
579 return 0;
580
581 case CONST_INT:
69ef87e2 582 case CONST_VECTOR:
2c88418c
RS
583 case CONST_DOUBLE:
584 /* These are kept unique for a given value. */
585 return 0;
a6a2274a 586
e9a25f70
JL
587 default:
588 break;
2c88418c
RS
589 }
590
591 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
592 return 1;
593
594 fmt = GET_RTX_FORMAT (code);
595
596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
597 {
598 if (fmt[i] == 'E')
599 {
b3694847 600 int j;
2c88418c
RS
601 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
602 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
603 return 1;
604 }
605 else if (fmt[i] == 'e'
606 && reg_mentioned_p (reg, XEXP (in, i)))
607 return 1;
608 }
609 return 0;
610}
611\f
612/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
613 no CODE_LABEL insn. */
614
615int
0c20a65f 616no_labels_between_p (rtx beg, rtx end)
2c88418c 617{
b3694847 618 rtx p;
978f547f
JH
619 if (beg == end)
620 return 0;
2c88418c 621 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 622 if (LABEL_P (p))
2c88418c
RS
623 return 0;
624 return 1;
625}
626
627/* Nonzero if register REG is used in an insn between
628 FROM_INSN and TO_INSN (exclusive of those two). */
629
630int
0c20a65f 631reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 632{
b3694847 633 rtx insn;
2c88418c
RS
634
635 if (from_insn == to_insn)
636 return 0;
637
638 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 639 if (INSN_P (insn)
8f3e7a26 640 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
76dd5923 641 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
642 return 1;
643 return 0;
644}
645\f
646/* Nonzero if the old value of X, a register, is referenced in BODY. If X
647 is entirely replaced by a new value and the only use is as a SET_DEST,
648 we do not consider it a reference. */
649
650int
0c20a65f 651reg_referenced_p (rtx x, rtx body)
2c88418c
RS
652{
653 int i;
654
655 switch (GET_CODE (body))
656 {
657 case SET:
658 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
659 return 1;
660
661 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
662 of a REG that occupies all of the REG, the insn references X if
663 it is mentioned in the destination. */
664 if (GET_CODE (SET_DEST (body)) != CC0
665 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 666 && !REG_P (SET_DEST (body))
2c88418c 667 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 668 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
669 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
670 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
671 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
672 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
673 && reg_overlap_mentioned_p (x, SET_DEST (body)))
674 return 1;
e9a25f70 675 return 0;
2c88418c
RS
676
677 case ASM_OPERANDS:
678 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
679 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
680 return 1;
e9a25f70 681 return 0;
2c88418c
RS
682
683 case CALL:
684 case USE:
14a774a9 685 case IF_THEN_ELSE:
2c88418c
RS
686 return reg_overlap_mentioned_p (x, body);
687
688 case TRAP_IF:
689 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
690
21b8482a
JJ
691 case PREFETCH:
692 return reg_overlap_mentioned_p (x, XEXP (body, 0));
693
2ac4fed0
RK
694 case UNSPEC:
695 case UNSPEC_VOLATILE:
2f9fb4c2
R
696 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
697 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
698 return 1;
699 return 0;
700
2c88418c
RS
701 case PARALLEL:
702 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
703 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
704 return 1;
e9a25f70 705 return 0;
a6a2274a 706
0d3ffb5a 707 case CLOBBER:
3c0cb5de 708 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
709 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
710 return 1;
711 return 0;
712
0c99ec5c
RH
713 case COND_EXEC:
714 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
715 return 1;
716 return reg_referenced_p (x, COND_EXEC_CODE (body));
717
e9a25f70
JL
718 default:
719 return 0;
2c88418c 720 }
2c88418c 721}
2c88418c
RS
722\f
723/* Nonzero if register REG is set or clobbered in an insn between
724 FROM_INSN and TO_INSN (exclusive of those two). */
725
726int
0c20a65f 727reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 728{
b3694847 729 rtx insn;
2c88418c
RS
730
731 if (from_insn == to_insn)
732 return 0;
733
734 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 735 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
736 return 1;
737 return 0;
738}
739
740/* Internals of reg_set_between_p. */
2c88418c 741int
0c20a65f 742reg_set_p (rtx reg, rtx insn)
2c88418c 743{
2c88418c
RS
744 /* We can be passed an insn or part of one. If we are passed an insn,
745 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
746 if (INSN_P (insn)
747 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 748 || (CALL_P (insn)
f8cfc6aa 749 && ((REG_P (reg)
4f1605d2
RS
750 && REGNO (reg) < FIRST_PSEUDO_REGISTER
751 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
752 REGNO (reg)))
3c0cb5de 753 || MEM_P (reg)
4977bab6
ZW
754 || find_reg_fusage (insn, CLOBBER, reg)))))
755 return 1;
2c88418c 756
91b2d119 757 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
758}
759
760/* Similar to reg_set_between_p, but check all registers in X. Return 0
761 only if none of them are modified between START and END. Return 1 if
7b52eede 762 X contains a MEM; this routine does usememory aliasing. */
2c88418c
RS
763
764int
0c20a65f 765modified_between_p (rtx x, rtx start, rtx end)
2c88418c
RS
766{
767 enum rtx_code code = GET_CODE (x);
6f7d635c 768 const char *fmt;
f8163c92 769 int i, j;
7b52eede
JH
770 rtx insn;
771
772 if (start == end)
773 return 0;
2c88418c
RS
774
775 switch (code)
776 {
777 case CONST_INT:
778 case CONST_DOUBLE:
69ef87e2 779 case CONST_VECTOR:
2c88418c
RS
780 case CONST:
781 case SYMBOL_REF:
782 case LABEL_REF:
783 return 0;
784
785 case PC:
786 case CC0:
787 return 1;
788
789 case MEM:
7b52eede 790 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 791 return 1;
550b7784
KK
792 if (MEM_READONLY_P (x))
793 return 0;
7b52eede
JH
794 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
795 if (memory_modified_in_insn_p (x, insn))
796 return 1;
797 return 0;
2c88418c
RS
798 break;
799
800 case REG:
801 return reg_set_between_p (x, start, end);
a6a2274a 802
e9a25f70
JL
803 default:
804 break;
2c88418c
RS
805 }
806
807 fmt = GET_RTX_FORMAT (code);
808 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
809 {
810 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
811 return 1;
812
d4757e6a 813 else if (fmt[i] == 'E')
f8163c92
RK
814 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
815 if (modified_between_p (XVECEXP (x, i, j), start, end))
816 return 1;
817 }
818
819 return 0;
820}
821
822/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
823 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 824 does use memory aliasing. */
f8163c92
RK
825
826int
0c20a65f 827modified_in_p (rtx x, rtx insn)
f8163c92
RK
828{
829 enum rtx_code code = GET_CODE (x);
6f7d635c 830 const char *fmt;
f8163c92
RK
831 int i, j;
832
833 switch (code)
834 {
835 case CONST_INT:
836 case CONST_DOUBLE:
69ef87e2 837 case CONST_VECTOR:
f8163c92
RK
838 case CONST:
839 case SYMBOL_REF:
840 case LABEL_REF:
841 return 0;
842
843 case PC:
844 case CC0:
2c88418c
RS
845 return 1;
846
f8163c92 847 case MEM:
7b52eede 848 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 849 return 1;
550b7784
KK
850 if (MEM_READONLY_P (x))
851 return 0;
7b52eede
JH
852 if (memory_modified_in_insn_p (x, insn))
853 return 1;
854 return 0;
f8163c92
RK
855 break;
856
857 case REG:
858 return reg_set_p (x, insn);
e9a25f70
JL
859
860 default:
861 break;
f8163c92
RK
862 }
863
864 fmt = GET_RTX_FORMAT (code);
865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
866 {
867 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
868 return 1;
869
d4757e6a 870 else if (fmt[i] == 'E')
f8163c92
RK
871 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
872 if (modified_in_p (XVECEXP (x, i, j), insn))
873 return 1;
874 }
875
2c88418c
RS
876 return 0;
877}
878\f
91b2d119
JH
879/* Helper function for set_of. */
880struct set_of_data
881 {
882 rtx found;
883 rtx pat;
884 };
885
886static void
0c20a65f 887set_of_1 (rtx x, rtx pat, void *data1)
91b2d119
JH
888{
889 struct set_of_data *data = (struct set_of_data *) (data1);
890 if (rtx_equal_p (x, data->pat)
3c0cb5de 891 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
91b2d119
JH
892 data->found = pat;
893}
894
895/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 896 (either directly or via STRICT_LOW_PART and similar modifiers). */
91b2d119 897rtx
0c20a65f 898set_of (rtx pat, rtx insn)
91b2d119
JH
899{
900 struct set_of_data data;
901 data.found = NULL_RTX;
902 data.pat = pat;
903 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
904 return data.found;
905}
906\f
2c88418c
RS
907/* Given an INSN, return a SET expression if this insn has only a single SET.
908 It may also have CLOBBERs, USEs, or SET whose output
909 will not be used, which we ignore. */
910
911rtx
0c20a65f 912single_set_2 (rtx insn, rtx pat)
2c88418c 913{
c9b89a21
JH
914 rtx set = NULL;
915 int set_verified = 1;
2c88418c 916 int i;
c9b89a21 917
b1cdafbb 918 if (GET_CODE (pat) == PARALLEL)
2c88418c 919 {
c9b89a21 920 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 921 {
c9b89a21
JH
922 rtx sub = XVECEXP (pat, 0, i);
923 switch (GET_CODE (sub))
924 {
925 case USE:
926 case CLOBBER:
927 break;
928
929 case SET:
930 /* We can consider insns having multiple sets, where all
931 but one are dead as single set insns. In common case
932 only single set is present in the pattern so we want
f63d1bf7 933 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
934
935 When we reach set first time, we just expect this is
936 the single set we are looking for and only when more
937 sets are found in the insn, we check them. */
938 if (!set_verified)
939 {
940 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
941 && !side_effects_p (set))
942 set = NULL;
943 else
944 set_verified = 1;
945 }
946 if (!set)
947 set = sub, set_verified = 0;
948 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
949 || side_effects_p (sub))
950 return NULL_RTX;
951 break;
952
953 default:
954 return NULL_RTX;
955 }
787ccee0 956 }
2c88418c 957 }
c9b89a21 958 return set;
2c88418c 959}
941c63ac
JL
960
961/* Given an INSN, return nonzero if it has more than one SET, else return
962 zero. */
963
5f7d3786 964int
0c20a65f 965multiple_sets (rtx insn)
941c63ac 966{
cae8acdd 967 int found;
941c63ac 968 int i;
a6a2274a 969
941c63ac 970 /* INSN must be an insn. */
2c3c49de 971 if (! INSN_P (insn))
941c63ac
JL
972 return 0;
973
974 /* Only a PARALLEL can have multiple SETs. */
975 if (GET_CODE (PATTERN (insn)) == PARALLEL)
976 {
977 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
978 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
979 {
980 /* If we have already found a SET, then return now. */
981 if (found)
982 return 1;
983 else
984 found = 1;
985 }
986 }
a6a2274a 987
941c63ac
JL
988 /* Either zero or one SET. */
989 return 0;
990}
2c88418c 991\f
7142e318
JW
992/* Return nonzero if the destination of SET equals the source
993 and there are no side effects. */
994
995int
0c20a65f 996set_noop_p (rtx set)
7142e318
JW
997{
998 rtx src = SET_SRC (set);
999 rtx dst = SET_DEST (set);
1000
371b8fc0
JH
1001 if (dst == pc_rtx && src == pc_rtx)
1002 return 1;
1003
3c0cb5de 1004 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1005 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1006
46d096a3 1007 if (GET_CODE (dst) == ZERO_EXTRACT)
7142e318 1008 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1009 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1010 && !side_effects_p (src);
7142e318
JW
1011
1012 if (GET_CODE (dst) == STRICT_LOW_PART)
1013 dst = XEXP (dst, 0);
1014
1015 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1016 {
1017 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1018 return 0;
1019 src = SUBREG_REG (src);
1020 dst = SUBREG_REG (dst);
1021 }
1022
f8cfc6aa 1023 return (REG_P (src) && REG_P (dst)
7142e318
JW
1024 && REGNO (src) == REGNO (dst));
1025}
0005550b
JH
1026\f
1027/* Return nonzero if an insn consists only of SETs, each of which only sets a
1028 value to itself. */
1029
1030int
0c20a65f 1031noop_move_p (rtx insn)
0005550b
JH
1032{
1033 rtx pat = PATTERN (insn);
1034
b5832b43
JH
1035 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1036 return 1;
1037
0005550b
JH
1038 /* Insns carrying these notes are useful later on. */
1039 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1040 return 0;
1041
eb9d8e4d
JW
1042 /* For now treat an insn with a REG_RETVAL note as a
1043 a special insn which should not be considered a no-op. */
1044 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1045 return 0;
1046
0005550b
JH
1047 if (GET_CODE (pat) == SET && set_noop_p (pat))
1048 return 1;
1049
1050 if (GET_CODE (pat) == PARALLEL)
1051 {
1052 int i;
1053 /* If nothing but SETs of registers to themselves,
1054 this insn can also be deleted. */
1055 for (i = 0; i < XVECLEN (pat, 0); i++)
1056 {
1057 rtx tem = XVECEXP (pat, 0, i);
1058
1059 if (GET_CODE (tem) == USE
1060 || GET_CODE (tem) == CLOBBER)
1061 continue;
1062
1063 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1064 return 0;
1065 }
1066
1067 return 1;
1068 }
1069 return 0;
1070}
1071\f
7142e318 1072
63be01fb
JW
1073/* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1074 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1075 If the object was modified, if we hit a partial assignment to X, or hit a
1076 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1077 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1078 be the src. */
2c88418c
RS
1079
1080rtx
0c20a65f 1081find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
2c88418c
RS
1082{
1083 rtx p;
1084
4b4bf941 1085 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
2c88418c 1086 p = PREV_INSN (p))
2c3c49de 1087 if (INSN_P (p))
2c88418c
RS
1088 {
1089 rtx set = single_set (p);
c166a311 1090 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
2c88418c
RS
1091
1092 if (set && rtx_equal_p (x, SET_DEST (set)))
1093 {
1094 rtx src = SET_SRC (set);
1095
1096 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1097 src = XEXP (note, 0);
1098
63be01fb
JW
1099 if ((valid_to == NULL_RTX
1100 || ! modified_between_p (src, PREV_INSN (p), valid_to))
2c88418c
RS
1101 /* Reject hard registers because we don't usually want
1102 to use them; we'd rather use a pseudo. */
f8cfc6aa 1103 && (! (REG_P (src)
89d3d442 1104 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
2c88418c
RS
1105 {
1106 *pinsn = p;
1107 return src;
1108 }
1109 }
a6a2274a 1110
2c88418c
RS
1111 /* If set in non-simple way, we don't have a value. */
1112 if (reg_set_p (x, p))
1113 break;
1114 }
1115
1116 return x;
a6a2274a 1117}
2c88418c
RS
1118\f
1119/* Return nonzero if register in range [REGNO, ENDREGNO)
1120 appears either explicitly or implicitly in X
1121 other than being stored into.
1122
1123 References contained within the substructure at LOC do not count.
1124 LOC may be zero, meaning don't ignore anything. */
1125
1126int
0c20a65f
AJ
1127refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1128 rtx *loc)
2c88418c 1129{
770ae6cc
RK
1130 int i;
1131 unsigned int x_regno;
1132 RTX_CODE code;
1133 const char *fmt;
2c88418c
RS
1134
1135 repeat:
1136 /* The contents of a REG_NONNEG note is always zero, so we must come here
1137 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1138 if (x == 0)
1139 return 0;
1140
1141 code = GET_CODE (x);
1142
1143 switch (code)
1144 {
1145 case REG:
770ae6cc 1146 x_regno = REGNO (x);
f8163c92
RK
1147
1148 /* If we modifying the stack, frame, or argument pointer, it will
1149 clobber a virtual register. In fact, we could be more precise,
1150 but it isn't worth it. */
770ae6cc 1151 if ((x_regno == STACK_POINTER_REGNUM
f8163c92 1152#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
770ae6cc 1153 || x_regno == ARG_POINTER_REGNUM
f8163c92 1154#endif
770ae6cc 1155 || x_regno == FRAME_POINTER_REGNUM)
f8163c92
RK
1156 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1157 return 1;
1158
770ae6cc 1159 return (endregno > x_regno
a6a2274a 1160 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
66fd46b6 1161 ? hard_regno_nregs[x_regno][GET_MODE (x)]
2c88418c
RS
1162 : 1));
1163
1164 case SUBREG:
1165 /* If this is a SUBREG of a hard reg, we can see exactly which
1166 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1167 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1168 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1169 {
ddef6bc7 1170 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1171 unsigned int inner_endregno
403c659c
DE
1172 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1173 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
2c88418c
RS
1174
1175 return endregno > inner_regno && regno < inner_endregno;
1176 }
1177 break;
1178
1179 case CLOBBER:
1180 case SET:
1181 if (&SET_DEST (x) != loc
1182 /* Note setting a SUBREG counts as referring to the REG it is in for
1183 a pseudo but not for hard registers since we can
1184 treat each word individually. */
1185 && ((GET_CODE (SET_DEST (x)) == SUBREG
1186 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1187 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1188 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1189 && refers_to_regno_p (regno, endregno,
1190 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1191 || (!REG_P (SET_DEST (x))
2c88418c
RS
1192 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1193 return 1;
1194
1195 if (code == CLOBBER || loc == &SET_SRC (x))
1196 return 0;
1197 x = SET_SRC (x);
1198 goto repeat;
e9a25f70
JL
1199
1200 default:
1201 break;
2c88418c
RS
1202 }
1203
1204 /* X does not match, so try its subexpressions. */
1205
1206 fmt = GET_RTX_FORMAT (code);
1207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1208 {
1209 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1210 {
1211 if (i == 0)
1212 {
1213 x = XEXP (x, 0);
1214 goto repeat;
1215 }
1216 else
1217 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1218 return 1;
1219 }
1220 else if (fmt[i] == 'E')
1221 {
b3694847 1222 int j;
6a87d634 1223 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1224 if (loc != &XVECEXP (x, i, j)
1225 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1226 return 1;
1227 }
1228 }
1229 return 0;
1230}
1231
1232/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1233 we check if any register number in X conflicts with the relevant register
1234 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1235 contains a MEM (we don't bother checking for memory addresses that can't
1236 conflict because we expect this to be a rare case. */
1237
1238int
0c20a65f 1239reg_overlap_mentioned_p (rtx x, rtx in)
2c88418c 1240{
770ae6cc 1241 unsigned int regno, endregno;
2c88418c 1242
6f626d1b
PB
1243 /* If either argument is a constant, then modifying X can not
1244 affect IN. Here we look at IN, we can profitably combine
1245 CONSTANT_P (x) with the switch statement below. */
1246 if (CONSTANT_P (in))
b98b49ac 1247 return 0;
0c99ec5c 1248
6f626d1b 1249 recurse:
0c99ec5c 1250 switch (GET_CODE (x))
2c88418c 1251 {
6f626d1b
PB
1252 case STRICT_LOW_PART:
1253 case ZERO_EXTRACT:
1254 case SIGN_EXTRACT:
1255 /* Overly conservative. */
1256 x = XEXP (x, 0);
1257 goto recurse;
1258
0c99ec5c 1259 case SUBREG:
2c88418c
RS
1260 regno = REGNO (SUBREG_REG (x));
1261 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1262 regno = subreg_regno (x);
0c99ec5c 1263 goto do_reg;
2c88418c 1264
0c99ec5c
RH
1265 case REG:
1266 regno = REGNO (x);
1267 do_reg:
1268 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
66fd46b6 1269 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
8e2e89f7 1270 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1271
0c99ec5c
RH
1272 case MEM:
1273 {
1274 const char *fmt;
1275 int i;
2c88418c 1276
3c0cb5de 1277 if (MEM_P (in))
2c88418c
RS
1278 return 1;
1279
0c99ec5c
RH
1280 fmt = GET_RTX_FORMAT (GET_CODE (in));
1281 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
3b009185
RH
1282 if (fmt[i] == 'e')
1283 {
1284 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1285 return 1;
1286 }
1287 else if (fmt[i] == 'E')
1288 {
1289 int j;
1290 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1291 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1292 return 1;
1293 }
c0222c21 1294
0c99ec5c
RH
1295 return 0;
1296 }
1297
1298 case SCRATCH:
1299 case PC:
1300 case CC0:
1301 return reg_mentioned_p (x, in);
1302
1303 case PARALLEL:
37ceff9d 1304 {
90d036a0 1305 int i;
37ceff9d
RH
1306
1307 /* If any register in here refers to it we return true. */
7193d1dc
RK
1308 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1309 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1310 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1311 return 1;
7193d1dc 1312 return 0;
37ceff9d 1313 }
2c88418c 1314
0c99ec5c 1315 default:
41374e13 1316 gcc_assert (CONSTANT_P (x));
6f626d1b
PB
1317 return 0;
1318 }
2c88418c
RS
1319}
1320\f
2c88418c
RS
1321/* Call FUN on each register or MEM that is stored into or clobbered by X.
1322 (X would be the pattern of an insn).
1323 FUN receives two arguments:
1324 the REG, MEM, CC0 or PC being stored in or clobbered,
1325 the SET or CLOBBER rtx that does the store.
1326
1327 If the item being stored in or clobbered is a SUBREG of a hard register,
1328 the SUBREG will be passed. */
a6a2274a 1329
2c88418c 1330void
0c20a65f 1331note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
2c88418c 1332{
90d036a0
RK
1333 int i;
1334
0c99ec5c
RH
1335 if (GET_CODE (x) == COND_EXEC)
1336 x = COND_EXEC_CODE (x);
90d036a0 1337
0c99ec5c 1338 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2c88418c 1339 {
b3694847 1340 rtx dest = SET_DEST (x);
90d036a0 1341
2c88418c 1342 while ((GET_CODE (dest) == SUBREG
f8cfc6aa 1343 && (!REG_P (SUBREG_REG (dest))
2c88418c
RS
1344 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1345 || GET_CODE (dest) == ZERO_EXTRACT
2c88418c
RS
1346 || GET_CODE (dest) == STRICT_LOW_PART)
1347 dest = XEXP (dest, 0);
86465af7 1348
7193d1dc 1349 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
629111c7 1350 each of whose first operand is a register. */
7193d1dc
RK
1351 if (GET_CODE (dest) == PARALLEL)
1352 {
1353 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1354 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
629111c7 1355 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
7193d1dc 1356 }
86465af7 1357 else
84832317 1358 (*fun) (dest, x, data);
2c88418c 1359 }
770ae6cc 1360
90d036a0
RK
1361 else if (GET_CODE (x) == PARALLEL)
1362 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1363 note_stores (XVECEXP (x, 0, i), fun, data);
2c88418c
RS
1364}
1365\f
e2373f95
RK
1366/* Like notes_stores, but call FUN for each expression that is being
1367 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1368 FUN for each expression, not any interior subexpressions. FUN receives a
1369 pointer to the expression and the DATA passed to this function.
1370
1371 Note that this is not quite the same test as that done in reg_referenced_p
1372 since that considers something as being referenced if it is being
1373 partially set, while we do not. */
1374
1375void
0c20a65f 1376note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1377{
1378 rtx body = *pbody;
1379 int i;
1380
1381 switch (GET_CODE (body))
1382 {
1383 case COND_EXEC:
1384 (*fun) (&COND_EXEC_TEST (body), data);
1385 note_uses (&COND_EXEC_CODE (body), fun, data);
1386 return;
1387
1388 case PARALLEL:
1389 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1390 note_uses (&XVECEXP (body, 0, i), fun, data);
1391 return;
1392
bbbc206e
BS
1393 case SEQUENCE:
1394 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1395 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1396 return;
1397
e2373f95
RK
1398 case USE:
1399 (*fun) (&XEXP (body, 0), data);
1400 return;
1401
1402 case ASM_OPERANDS:
1403 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1404 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1405 return;
1406
1407 case TRAP_IF:
1408 (*fun) (&TRAP_CONDITION (body), data);
1409 return;
1410
21b8482a
JJ
1411 case PREFETCH:
1412 (*fun) (&XEXP (body, 0), data);
1413 return;
1414
e2373f95
RK
1415 case UNSPEC:
1416 case UNSPEC_VOLATILE:
1417 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1418 (*fun) (&XVECEXP (body, 0, i), data);
1419 return;
1420
1421 case CLOBBER:
3c0cb5de 1422 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1423 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1424 return;
1425
1426 case SET:
1427 {
1428 rtx dest = SET_DEST (body);
1429
1430 /* For sets we replace everything in source plus registers in memory
1431 expression in store and operands of a ZERO_EXTRACT. */
1432 (*fun) (&SET_SRC (body), data);
1433
1434 if (GET_CODE (dest) == ZERO_EXTRACT)
1435 {
1436 (*fun) (&XEXP (dest, 1), data);
1437 (*fun) (&XEXP (dest, 2), data);
1438 }
1439
1440 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1441 dest = XEXP (dest, 0);
1442
3c0cb5de 1443 if (MEM_P (dest))
e2373f95
RK
1444 (*fun) (&XEXP (dest, 0), data);
1445 }
1446 return;
1447
1448 default:
1449 /* All the other possibilities never store. */
1450 (*fun) (pbody, data);
1451 return;
1452 }
1453}
1454\f
2c88418c
RS
1455/* Return nonzero if X's old contents don't survive after INSN.
1456 This will be true if X is (cc0) or if X is a register and
1457 X dies in INSN or because INSN entirely sets X.
1458
46d096a3
SB
1459 "Entirely set" means set directly and not through a SUBREG, or
1460 ZERO_EXTRACT, so no trace of the old contents remains.
2c88418c
RS
1461 Likewise, REG_INC does not count.
1462
1463 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1464 but for this use that makes no difference, since regs don't overlap
1465 during their lifetimes. Therefore, this function may be used
1466 at any time after deaths have been computed (in flow.c).
1467
1468 If REG is a hard reg that occupies multiple machine registers, this
1469 function will only return 1 if each of those registers will be replaced
1470 by INSN. */
1471
1472int
0c20a65f 1473dead_or_set_p (rtx insn, rtx x)
2c88418c 1474{
770ae6cc
RK
1475 unsigned int regno, last_regno;
1476 unsigned int i;
2c88418c
RS
1477
1478 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1479 if (GET_CODE (x) == CC0)
1480 return 1;
1481
41374e13 1482 gcc_assert (REG_P (x));
2c88418c
RS
1483
1484 regno = REGNO (x);
1485 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
66fd46b6 1486 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
2c88418c
RS
1487
1488 for (i = regno; i <= last_regno; i++)
1489 if (! dead_or_set_regno_p (insn, i))
1490 return 0;
1491
1492 return 1;
1493}
1494
194acded
HPN
1495/* Return TRUE iff DEST is a register or subreg of a register and
1496 doesn't change the number of words of the inner register, and any
1497 part of the register is TEST_REGNO. */
1498
1499static bool
1500covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1501{
1502 unsigned int regno, endregno;
1503
1504 if (GET_CODE (dest) == SUBREG
1505 && (((GET_MODE_SIZE (GET_MODE (dest))
1506 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1507 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1508 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1509 dest = SUBREG_REG (dest);
1510
1511 if (!REG_P (dest))
1512 return false;
1513
1514 regno = REGNO (dest);
1515 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1516 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1517 return (test_regno >= regno && test_regno < endregno);
1518}
1519
1520/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1521 any member matches the covers_regno_no_parallel_p criteria. */
1522
1523static bool
1524covers_regno_p (rtx dest, unsigned int test_regno)
1525{
1526 if (GET_CODE (dest) == PARALLEL)
1527 {
1528 /* Some targets place small structures in registers for return
1529 values of functions, and those registers are wrapped in
1530 PARALLELs that we may see as the destination of a SET. */
1531 int i;
1532
1533 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1534 {
1535 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1536 if (inner != NULL_RTX
1537 && covers_regno_no_parallel_p (inner, test_regno))
1538 return true;
1539 }
1540
1541 return false;
1542 }
1543 else
1544 return covers_regno_no_parallel_p (dest, test_regno);
1545}
1546
2c88418c
RS
1547/* Utility function for dead_or_set_p to check an individual register. Also
1548 called from flow.c. */
1549
1550int
0c20a65f 1551dead_or_set_regno_p (rtx insn, unsigned int test_regno)
2c88418c 1552{
8c8de5fc 1553 rtx pattern;
2c88418c 1554
0a2287bf
RH
1555 /* See if there is a death note for something that includes TEST_REGNO. */
1556 if (find_regno_note (insn, REG_DEAD, test_regno))
1557 return 1;
2c88418c 1558
4b4bf941 1559 if (CALL_P (insn)
8f3e7a26
RK
1560 && find_regno_fusage (insn, CLOBBER, test_regno))
1561 return 1;
1562
0c99ec5c
RH
1563 pattern = PATTERN (insn);
1564
1565 if (GET_CODE (pattern) == COND_EXEC)
1566 pattern = COND_EXEC_CODE (pattern);
1567
1568 if (GET_CODE (pattern) == SET)
194acded 1569 return covers_regno_p (SET_DEST (pattern), test_regno);
0c99ec5c 1570 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 1571 {
b3694847 1572 int i;
2c88418c 1573
0c99ec5c 1574 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 1575 {
0c99ec5c
RH
1576 rtx body = XVECEXP (pattern, 0, i);
1577
1578 if (GET_CODE (body) == COND_EXEC)
1579 body = COND_EXEC_CODE (body);
2c88418c 1580
194acded
HPN
1581 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1582 && covers_regno_p (SET_DEST (body), test_regno))
1583 return 1;
2c88418c
RS
1584 }
1585 }
1586
1587 return 0;
1588}
1589
1590/* Return the reg-note of kind KIND in insn INSN, if there is one.
1591 If DATUM is nonzero, look for one whose datum is DATUM. */
1592
1593rtx
0c20a65f 1594find_reg_note (rtx insn, enum reg_note kind, rtx datum)
2c88418c 1595{
b3694847 1596 rtx link;
2c88418c 1597
af082de3
BE
1598 gcc_assert (insn);
1599
ae78d276 1600 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1601 if (! INSN_P (insn))
ae78d276 1602 return 0;
cd798543
AP
1603 if (datum == 0)
1604 {
1605 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1606 if (REG_NOTE_KIND (link) == kind)
1607 return link;
1608 return 0;
1609 }
ae78d276 1610
2c88418c 1611 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 1612 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
1613 return link;
1614 return 0;
1615}
1616
1617/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
1618 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1619 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1620 it might be the case that the note overlaps REGNO. */
2c88418c
RS
1621
1622rtx
0c20a65f 1623find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 1624{
b3694847 1625 rtx link;
2c88418c 1626
ae78d276 1627 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1628 if (! INSN_P (insn))
ae78d276
MM
1629 return 0;
1630
2c88418c
RS
1631 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1632 if (REG_NOTE_KIND (link) == kind
1633 /* Verify that it is a register, so that scratch and MEM won't cause a
1634 problem here. */
f8cfc6aa 1635 && REG_P (XEXP (link, 0))
99309f3b
RK
1636 && REGNO (XEXP (link, 0)) <= regno
1637 && ((REGNO (XEXP (link, 0))
1638 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6
JH
1639 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1640 [GET_MODE (XEXP (link, 0))]))
99309f3b 1641 > regno))
2c88418c
RS
1642 return link;
1643 return 0;
1644}
8f3e7a26 1645
d9c695ff
RK
1646/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1647 has such a note. */
1648
1649rtx
0c20a65f 1650find_reg_equal_equiv_note (rtx insn)
d9c695ff 1651{
cd648cec 1652 rtx link;
d9c695ff 1653
cd648cec 1654 if (!INSN_P (insn))
d9c695ff 1655 return 0;
cd648cec
JH
1656 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1657 if (REG_NOTE_KIND (link) == REG_EQUAL
1658 || REG_NOTE_KIND (link) == REG_EQUIV)
1659 {
1660 if (single_set (insn) == 0)
1661 return 0;
1662 return link;
1663 }
1664 return NULL;
d9c695ff
RK
1665}
1666
8f3e7a26
RK
1667/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1668 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1669
1670int
0c20a65f 1671find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
8f3e7a26
RK
1672{
1673 /* If it's not a CALL_INSN, it can't possibly have a
1674 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 1675 if (!CALL_P (insn))
8f3e7a26
RK
1676 return 0;
1677
41374e13 1678 gcc_assert (datum);
8f3e7a26 1679
f8cfc6aa 1680 if (!REG_P (datum))
8f3e7a26 1681 {
b3694847 1682 rtx link;
8f3e7a26
RK
1683
1684 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 1685 link;
8f3e7a26 1686 link = XEXP (link, 1))
a6a2274a 1687 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 1688 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 1689 return 1;
8f3e7a26
RK
1690 }
1691 else
1692 {
770ae6cc 1693 unsigned int regno = REGNO (datum);
8f3e7a26
RK
1694
1695 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1696 to pseudo registers, so don't bother checking. */
1697
1698 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 1699 {
770ae6cc 1700 unsigned int end_regno
66fd46b6 1701 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
770ae6cc 1702 unsigned int i;
8f3e7a26
RK
1703
1704 for (i = regno; i < end_regno; i++)
1705 if (find_regno_fusage (insn, code, i))
1706 return 1;
a6a2274a 1707 }
8f3e7a26
RK
1708 }
1709
1710 return 0;
1711}
1712
1713/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1714 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1715
1716int
0c20a65f 1717find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 1718{
b3694847 1719 rtx link;
8f3e7a26
RK
1720
1721 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1722 to pseudo registers, so don't bother checking. */
1723
1724 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 1725 || !CALL_P (insn) )
8f3e7a26
RK
1726 return 0;
1727
1728 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 1729 {
770ae6cc
RK
1730 unsigned int regnote;
1731 rtx op, reg;
83ab3839
RH
1732
1733 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 1734 && REG_P (reg = XEXP (op, 0))
83ab3839 1735 && (regnote = REGNO (reg)) <= regno
66fd46b6 1736 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
83ab3839
RH
1737 return 1;
1738 }
8f3e7a26
RK
1739
1740 return 0;
1741}
a6a063b8
AM
1742
1743/* Return true if INSN is a call to a pure function. */
1744
1745int
0c20a65f 1746pure_call_p (rtx insn)
a6a063b8
AM
1747{
1748 rtx link;
1749
4b4bf941 1750 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
a6a063b8
AM
1751 return 0;
1752
1753 /* Look for the note that differentiates const and pure functions. */
1754 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1755 {
1756 rtx u, m;
1757
1758 if (GET_CODE (u = XEXP (link, 0)) == USE
3c0cb5de 1759 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
a6a063b8
AM
1760 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1761 return 1;
1762 }
1763
1764 return 0;
1765}
2c88418c
RS
1766\f
1767/* Remove register note NOTE from the REG_NOTES of INSN. */
1768
1769void
0c20a65f 1770remove_note (rtx insn, rtx note)
2c88418c 1771{
b3694847 1772 rtx link;
2c88418c 1773
49c3bb12
RH
1774 if (note == NULL_RTX)
1775 return;
1776
2c88418c
RS
1777 if (REG_NOTES (insn) == note)
1778 {
1779 REG_NOTES (insn) = XEXP (note, 1);
1780 return;
1781 }
1782
1783 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1784 if (XEXP (link, 1) == note)
1785 {
1786 XEXP (link, 1) = XEXP (note, 1);
1787 return;
1788 }
1789
41374e13 1790 gcc_unreachable ();
2c88418c 1791}
55a98783 1792
5f0d2358
RK
1793/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1794 return 1 if it is found. A simple equality test is used to determine if
1795 NODE matches. */
1796
1797int
0c20a65f 1798in_expr_list_p (rtx listp, rtx node)
5f0d2358
RK
1799{
1800 rtx x;
1801
1802 for (x = listp; x; x = XEXP (x, 1))
1803 if (node == XEXP (x, 0))
1804 return 1;
1805
1806 return 0;
1807}
1808
dd248abd
RK
1809/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1810 remove that entry from the list if it is found.
55a98783 1811
dd248abd 1812 A simple equality test is used to determine if NODE matches. */
55a98783
JL
1813
1814void
0c20a65f 1815remove_node_from_expr_list (rtx node, rtx *listp)
55a98783
JL
1816{
1817 rtx temp = *listp;
1818 rtx prev = NULL_RTX;
1819
1820 while (temp)
1821 {
1822 if (node == XEXP (temp, 0))
1823 {
1824 /* Splice the node out of the list. */
1825 if (prev)
1826 XEXP (prev, 1) = XEXP (temp, 1);
1827 else
1828 *listp = XEXP (temp, 1);
1829
1830 return;
1831 }
dd248abd
RK
1832
1833 prev = temp;
55a98783
JL
1834 temp = XEXP (temp, 1);
1835 }
1836}
2c88418c 1837\f
2b067faf
RS
1838/* Nonzero if X contains any volatile instructions. These are instructions
1839 which may cause unpredictable machine state instructions, and thus no
1840 instructions should be moved or combined across them. This includes
1841 only volatile asms and UNSPEC_VOLATILE instructions. */
1842
1843int
0c20a65f 1844volatile_insn_p (rtx x)
2b067faf 1845{
b3694847 1846 RTX_CODE code;
2b067faf
RS
1847
1848 code = GET_CODE (x);
1849 switch (code)
1850 {
1851 case LABEL_REF:
1852 case SYMBOL_REF:
1853 case CONST_INT:
1854 case CONST:
1855 case CONST_DOUBLE:
69ef87e2 1856 case CONST_VECTOR:
2b067faf
RS
1857 case CC0:
1858 case PC:
1859 case REG:
1860 case SCRATCH:
1861 case CLOBBER:
2b067faf
RS
1862 case ADDR_VEC:
1863 case ADDR_DIFF_VEC:
1864 case CALL:
1865 case MEM:
1866 return 0;
1867
1868 case UNSPEC_VOLATILE:
1869 /* case TRAP_IF: This isn't clear yet. */
1870 return 1;
1871
4c46ea23 1872 case ASM_INPUT:
2b067faf
RS
1873 case ASM_OPERANDS:
1874 if (MEM_VOLATILE_P (x))
1875 return 1;
e9a25f70
JL
1876
1877 default:
1878 break;
2b067faf
RS
1879 }
1880
1881 /* Recursively scan the operands of this expression. */
1882
1883 {
b3694847
SS
1884 const char *fmt = GET_RTX_FORMAT (code);
1885 int i;
a6a2274a 1886
2b067faf
RS
1887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1888 {
1889 if (fmt[i] == 'e')
1890 {
31001f72 1891 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
1892 return 1;
1893 }
d4757e6a 1894 else if (fmt[i] == 'E')
2b067faf 1895 {
b3694847 1896 int j;
2b067faf 1897 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 1898 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
1899 return 1;
1900 }
1901 }
1902 }
1903 return 0;
1904}
1905
2c88418c 1906/* Nonzero if X contains any volatile memory references
2ac4fed0 1907 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
1908
1909int
0c20a65f 1910volatile_refs_p (rtx x)
2c88418c 1911{
b3694847 1912 RTX_CODE code;
2c88418c
RS
1913
1914 code = GET_CODE (x);
1915 switch (code)
1916 {
1917 case LABEL_REF:
1918 case SYMBOL_REF:
1919 case CONST_INT:
1920 case CONST:
1921 case CONST_DOUBLE:
69ef87e2 1922 case CONST_VECTOR:
2c88418c
RS
1923 case CC0:
1924 case PC:
1925 case REG:
1926 case SCRATCH:
1927 case CLOBBER:
2c88418c
RS
1928 case ADDR_VEC:
1929 case ADDR_DIFF_VEC:
1930 return 0;
1931
2ac4fed0 1932 case UNSPEC_VOLATILE:
2c88418c
RS
1933 return 1;
1934
1935 case MEM:
4c46ea23 1936 case ASM_INPUT:
2c88418c
RS
1937 case ASM_OPERANDS:
1938 if (MEM_VOLATILE_P (x))
1939 return 1;
e9a25f70
JL
1940
1941 default:
1942 break;
2c88418c
RS
1943 }
1944
1945 /* Recursively scan the operands of this expression. */
1946
1947 {
b3694847
SS
1948 const char *fmt = GET_RTX_FORMAT (code);
1949 int i;
a6a2274a 1950
2c88418c
RS
1951 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1952 {
1953 if (fmt[i] == 'e')
1954 {
1955 if (volatile_refs_p (XEXP (x, i)))
1956 return 1;
1957 }
d4757e6a 1958 else if (fmt[i] == 'E')
2c88418c 1959 {
b3694847 1960 int j;
2c88418c
RS
1961 for (j = 0; j < XVECLEN (x, i); j++)
1962 if (volatile_refs_p (XVECEXP (x, i, j)))
1963 return 1;
1964 }
1965 }
1966 }
1967 return 0;
1968}
1969
1970/* Similar to above, except that it also rejects register pre- and post-
1971 incrementing. */
1972
1973int
0c20a65f 1974side_effects_p (rtx x)
2c88418c 1975{
b3694847 1976 RTX_CODE code;
2c88418c
RS
1977
1978 code = GET_CODE (x);
1979 switch (code)
1980 {
1981 case LABEL_REF:
1982 case SYMBOL_REF:
1983 case CONST_INT:
1984 case CONST:
1985 case CONST_DOUBLE:
69ef87e2 1986 case CONST_VECTOR:
2c88418c
RS
1987 case CC0:
1988 case PC:
1989 case REG:
1990 case SCRATCH:
2c88418c
RS
1991 case ADDR_VEC:
1992 case ADDR_DIFF_VEC:
1993 return 0;
1994
1995 case CLOBBER:
1996 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
1997 when some combination can't be done. If we see one, don't think
1998 that we can simplify the expression. */
1999 return (GET_MODE (x) != VOIDmode);
2000
2001 case PRE_INC:
2002 case PRE_DEC:
2003 case POST_INC:
2004 case POST_DEC:
1fb9c5cd
MH
2005 case PRE_MODIFY:
2006 case POST_MODIFY:
2c88418c 2007 case CALL:
2ac4fed0 2008 case UNSPEC_VOLATILE:
2c88418c
RS
2009 /* case TRAP_IF: This isn't clear yet. */
2010 return 1;
2011
2012 case MEM:
4c46ea23 2013 case ASM_INPUT:
2c88418c
RS
2014 case ASM_OPERANDS:
2015 if (MEM_VOLATILE_P (x))
2016 return 1;
e9a25f70
JL
2017
2018 default:
2019 break;
2c88418c
RS
2020 }
2021
2022 /* Recursively scan the operands of this expression. */
2023
2024 {
b3694847
SS
2025 const char *fmt = GET_RTX_FORMAT (code);
2026 int i;
a6a2274a 2027
2c88418c
RS
2028 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2029 {
2030 if (fmt[i] == 'e')
2031 {
2032 if (side_effects_p (XEXP (x, i)))
2033 return 1;
2034 }
d4757e6a 2035 else if (fmt[i] == 'E')
2c88418c 2036 {
b3694847 2037 int j;
2c88418c
RS
2038 for (j = 0; j < XVECLEN (x, i); j++)
2039 if (side_effects_p (XVECEXP (x, i, j)))
2040 return 1;
2041 }
2042 }
2043 }
2044 return 0;
2045}
2046\f
e755fcf5
ZD
2047enum may_trap_p_flags
2048{
2049 MTP_UNALIGNED_MEMS = 1,
2050 MTP_AFTER_MOVE = 2
2051};
2052/* Return nonzero if evaluating rtx X might cause a trap.
2053 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2054 unaligned memory accesses on strict alignment machines. If
2055 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2056 cannot trap at its current location, but it might become trapping if moved
2057 elsewhere. */
2c88418c 2058
2358ff91 2059static int
e755fcf5 2060may_trap_p_1 (rtx x, unsigned flags)
2c88418c
RS
2061{
2062 int i;
2063 enum rtx_code code;
6f7d635c 2064 const char *fmt;
e755fcf5 2065 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2c88418c
RS
2066
2067 if (x == 0)
2068 return 0;
2069 code = GET_CODE (x);
2070 switch (code)
2071 {
2072 /* Handle these cases quickly. */
2073 case CONST_INT:
2074 case CONST_DOUBLE:
69ef87e2 2075 case CONST_VECTOR:
2c88418c
RS
2076 case SYMBOL_REF:
2077 case LABEL_REF:
2078 case CONST:
2079 case PC:
2080 case CC0:
2081 case REG:
2082 case SCRATCH:
2083 return 0;
2084
22aa60a1 2085 case ASM_INPUT:
2ac4fed0 2086 case UNSPEC_VOLATILE:
2c88418c
RS
2087 case TRAP_IF:
2088 return 1;
2089
22aa60a1
RH
2090 case ASM_OPERANDS:
2091 return MEM_VOLATILE_P (x);
2092
2c88418c
RS
2093 /* Memory ref can trap unless it's a static var or a stack slot. */
2094 case MEM:
e755fcf5
ZD
2095 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2096 reference; moving it out of condition might cause its address
2097 become invalid. */
2098 !(flags & MTP_AFTER_MOVE)
2099 && MEM_NOTRAP_P (x)
2358ff91 2100 && (!STRICT_ALIGNMENT || !unaligned_mems))
4da2eb6b 2101 return 0;
2358ff91
EB
2102 return
2103 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2c88418c
RS
2104
2105 /* Division by a non-constant might trap. */
2106 case DIV:
2107 case MOD:
2108 case UDIV:
2109 case UMOD:
52bfebf0
RS
2110 if (HONOR_SNANS (GET_MODE (x)))
2111 return 1;
3d8bf70f 2112 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
f9013075
DE
2113 return flag_trapping_math;
2114 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2c88418c 2115 return 1;
e9a25f70
JL
2116 break;
2117
b278301b
RK
2118 case EXPR_LIST:
2119 /* An EXPR_LIST is used to represent a function call. This
2120 certainly may trap. */
2121 return 1;
e9a25f70 2122
734508ea
JW
2123 case GE:
2124 case GT:
2125 case LE:
2126 case LT:
19aec195 2127 case LTGT:
55143861 2128 case COMPARE:
734508ea 2129 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2130 if (!flag_trapping_math)
2131 break;
734508ea
JW
2132 /* ??? There is no machine independent way to check for tests that trap
2133 when COMPARE is used, though many targets do make this distinction.
2134 For instance, sparc uses CCFPE for compares which generate exceptions
2135 and CCFP for compares which do not generate exceptions. */
52bfebf0 2136 if (HONOR_NANS (GET_MODE (x)))
55143861
JJ
2137 return 1;
2138 /* But often the compare has some CC mode, so check operand
2139 modes as well. */
52bfebf0
RS
2140 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2141 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2142 return 1;
2143 break;
2144
2145 case EQ:
2146 case NE:
2147 if (HONOR_SNANS (GET_MODE (x)))
2148 return 1;
2149 /* Often comparison is CC mode, so check operand modes. */
2150 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2151 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
55143861
JJ
2152 return 1;
2153 break;
2154
22fd5743
FH
2155 case FIX:
2156 /* Conversion of floating point might trap. */
2157 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2158 return 1;
2159 break;
2160
05cc23e8
RH
2161 case NEG:
2162 case ABS:
e3947b34 2163 case SUBREG:
05cc23e8
RH
2164 /* These operations don't trap even with floating point. */
2165 break;
2166
2c88418c
RS
2167 default:
2168 /* Any floating arithmetic may trap. */
3d8bf70f 2169 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
f5eb5fd0 2170 && flag_trapping_math)
2c88418c
RS
2171 return 1;
2172 }
2173
2174 fmt = GET_RTX_FORMAT (code);
2175 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2176 {
2177 if (fmt[i] == 'e')
2178 {
e755fcf5 2179 if (may_trap_p_1 (XEXP (x, i), flags))
2c88418c
RS
2180 return 1;
2181 }
2182 else if (fmt[i] == 'E')
2183 {
b3694847 2184 int j;
2c88418c 2185 for (j = 0; j < XVECLEN (x, i); j++)
e755fcf5 2186 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2c88418c
RS
2187 return 1;
2188 }
2189 }
2190 return 0;
2191}
2358ff91
EB
2192
2193/* Return nonzero if evaluating rtx X might cause a trap. */
2194
2195int
2196may_trap_p (rtx x)
2197{
e755fcf5
ZD
2198 return may_trap_p_1 (x, 0);
2199}
2200
2201/* Return nonzero if evaluating rtx X might cause a trap, when the expression
2202 is moved from its current location by some optimization. */
2203
2204int
2205may_trap_after_code_motion_p (rtx x)
2206{
2207 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2358ff91
EB
2208}
2209
c0220ea4 2210/* Same as above, but additionally return nonzero if evaluating rtx X might
2358ff91
EB
2211 cause a fault. We define a fault for the purpose of this function as a
2212 erroneous execution condition that cannot be encountered during the normal
2213 execution of a valid program; the typical example is an unaligned memory
2214 access on a strict alignment machine. The compiler guarantees that it
2215 doesn't generate code that will fault from a valid program, but this
2216 guarantee doesn't mean anything for individual instructions. Consider
2217 the following example:
2218
2219 struct S { int d; union { char *cp; int *ip; }; };
2220
2221 int foo(struct S *s)
2222 {
2223 if (s->d == 1)
2224 return *s->ip;
2225 else
2226 return *s->cp;
2227 }
2228
2229 on a strict alignment machine. In a valid program, foo will never be
2230 invoked on a structure for which d is equal to 1 and the underlying
2231 unique field of the union not aligned on a 4-byte boundary, but the
2232 expression *s->ip might cause a fault if considered individually.
2233
2234 At the RTL level, potentially problematic expressions will almost always
2235 verify may_trap_p; for example, the above dereference can be emitted as
2236 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2237 However, suppose that foo is inlined in a caller that causes s->cp to
2238 point to a local character variable and guarantees that s->d is not set
2239 to 1; foo may have been effectively translated into pseudo-RTL as:
2240
2241 if ((reg:SI) == 1)
2242 (set (reg:SI) (mem:SI (%fp - 7)))
2243 else
2244 (set (reg:QI) (mem:QI (%fp - 7)))
2245
2246 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2247 memory reference to a stack slot, but it will certainly cause a fault
2248 on a strict alignment machine. */
2249
2250int
2251may_trap_or_fault_p (rtx x)
2252{
e755fcf5 2253 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2358ff91 2254}
2c88418c
RS
2255\f
2256/* Return nonzero if X contains a comparison that is not either EQ or NE,
2257 i.e., an inequality. */
2258
2259int
0c20a65f 2260inequality_comparisons_p (rtx x)
2c88418c 2261{
b3694847
SS
2262 const char *fmt;
2263 int len, i;
2264 enum rtx_code code = GET_CODE (x);
2c88418c
RS
2265
2266 switch (code)
2267 {
2268 case REG:
2269 case SCRATCH:
2270 case PC:
2271 case CC0:
2272 case CONST_INT:
2273 case CONST_DOUBLE:
69ef87e2 2274 case CONST_VECTOR:
2c88418c
RS
2275 case CONST:
2276 case LABEL_REF:
2277 case SYMBOL_REF:
2278 return 0;
2279
2280 case LT:
2281 case LTU:
2282 case GT:
2283 case GTU:
2284 case LE:
2285 case LEU:
2286 case GE:
2287 case GEU:
2288 return 1;
a6a2274a 2289
e9a25f70
JL
2290 default:
2291 break;
2c88418c
RS
2292 }
2293
2294 len = GET_RTX_LENGTH (code);
2295 fmt = GET_RTX_FORMAT (code);
2296
2297 for (i = 0; i < len; i++)
2298 {
2299 if (fmt[i] == 'e')
2300 {
2301 if (inequality_comparisons_p (XEXP (x, i)))
2302 return 1;
2303 }
2304 else if (fmt[i] == 'E')
2305 {
b3694847 2306 int j;
2c88418c
RS
2307 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2308 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2309 return 1;
2310 }
2311 }
a6a2274a 2312
2c88418c
RS
2313 return 0;
2314}
2315\f
1ed0205e
VM
2316/* Replace any occurrence of FROM in X with TO. The function does
2317 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2318
2319 Note that copying is not done so X must not be shared unless all copies
2320 are to be modified. */
2321
2322rtx
0c20a65f 2323replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2324{
b3694847
SS
2325 int i, j;
2326 const char *fmt;
2c88418c 2327
1ed0205e 2328 /* The following prevents loops occurrence when we change MEM in
dc297297 2329 CONST_DOUBLE onto the same CONST_DOUBLE. */
1ed0205e
VM
2330 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2331 return x;
2332
2c88418c
RS
2333 if (x == from)
2334 return to;
2335
2336 /* Allow this function to make replacements in EXPR_LISTs. */
2337 if (x == 0)
2338 return 0;
2339
9dd791c8
AO
2340 if (GET_CODE (x) == SUBREG)
2341 {
2342 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2343
2344 if (GET_CODE (new) == CONST_INT)
2345 {
2346 x = simplify_subreg (GET_MODE (x), new,
2347 GET_MODE (SUBREG_REG (x)),
2348 SUBREG_BYTE (x));
41374e13 2349 gcc_assert (x);
9dd791c8
AO
2350 }
2351 else
2352 SUBREG_REG (x) = new;
2353
2354 return x;
2355 }
2356 else if (GET_CODE (x) == ZERO_EXTEND)
2357 {
2358 rtx new = replace_rtx (XEXP (x, 0), from, to);
2359
2360 if (GET_CODE (new) == CONST_INT)
2361 {
2362 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2363 new, GET_MODE (XEXP (x, 0)));
41374e13 2364 gcc_assert (x);
9dd791c8
AO
2365 }
2366 else
2367 XEXP (x, 0) = new;
2368
2369 return x;
2370 }
2371
2c88418c
RS
2372 fmt = GET_RTX_FORMAT (GET_CODE (x));
2373 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2374 {
2375 if (fmt[i] == 'e')
2376 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2377 else if (fmt[i] == 'E')
2378 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2379 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2380 }
2381
2382 return x;
a6a2274a 2383}
2c88418c 2384\f
39811184 2385/* Replace occurrences of the old label in *X with the new one.
4af16369 2386 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
39811184
JZ
2387
2388int
0c20a65f 2389replace_label (rtx *x, void *data)
39811184
JZ
2390{
2391 rtx l = *x;
4af16369
JZ
2392 rtx old_label = ((replace_label_data *) data)->r1;
2393 rtx new_label = ((replace_label_data *) data)->r2;
2394 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
39811184
JZ
2395
2396 if (l == NULL_RTX)
2397 return 0;
2398
173cd571
JZ
2399 if (GET_CODE (l) == SYMBOL_REF
2400 && CONSTANT_POOL_ADDRESS_P (l))
4af16369 2401 {
173cd571 2402 rtx c = get_pool_constant (l);
4af16369
JZ
2403 if (rtx_referenced_p (old_label, c))
2404 {
2405 rtx new_c, new_l;
2406 replace_label_data *d = (replace_label_data *) data;
0c20a65f 2407
4af16369
JZ
2408 /* Create a copy of constant C; replace the label inside
2409 but do not update LABEL_NUSES because uses in constant pool
2410 are not counted. */
2411 new_c = copy_rtx (c);
2412 d->update_label_nuses = false;
2413 for_each_rtx (&new_c, replace_label, data);
2414 d->update_label_nuses = update_label_nuses;
2415
2416 /* Add the new constant NEW_C to constant pool and replace
2417 the old reference to constant by new reference. */
173cd571 2418 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
4af16369
JZ
2419 *x = replace_rtx (l, l, new_l);
2420 }
2421 return 0;
2422 }
2423
39811184
JZ
2424 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2425 field. This is not handled by for_each_rtx because it doesn't
2426 handle unprinted ('0') fields. */
4b4bf941 2427 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
39811184 2428 JUMP_LABEL (l) = new_label;
39811184 2429
4af16369
JZ
2430 if ((GET_CODE (l) == LABEL_REF
2431 || GET_CODE (l) == INSN_LIST)
2432 && XEXP (l, 0) == old_label)
2433 {
2434 XEXP (l, 0) = new_label;
2435 if (update_label_nuses)
2436 {
2437 ++LABEL_NUSES (new_label);
2438 --LABEL_NUSES (old_label);
2439 }
2440 return 0;
2441 }
39811184
JZ
2442
2443 return 0;
2444}
2445
4af16369
JZ
2446/* When *BODY is equal to X or X is directly referenced by *BODY
2447 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2448 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
39811184
JZ
2449
2450static int
0c20a65f 2451rtx_referenced_p_1 (rtx *body, void *x)
39811184 2452{
4af16369
JZ
2453 rtx y = (rtx) x;
2454
2455 if (*body == NULL_RTX)
2456 return y == NULL_RTX;
2457
2458 /* Return true if a label_ref *BODY refers to label Y. */
4b4bf941 2459 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
4af16369
JZ
2460 return XEXP (*body, 0) == y;
2461
2462 /* If *BODY is a reference to pool constant traverse the constant. */
2463 if (GET_CODE (*body) == SYMBOL_REF
2464 && CONSTANT_POOL_ADDRESS_P (*body))
2465 return rtx_referenced_p (y, get_pool_constant (*body));
2466
2467 /* By default, compare the RTL expressions. */
2468 return rtx_equal_p (*body, y);
39811184
JZ
2469}
2470
4af16369 2471/* Return true if X is referenced in BODY. */
39811184
JZ
2472
2473int
0c20a65f 2474rtx_referenced_p (rtx x, rtx body)
39811184 2475{
4af16369 2476 return for_each_rtx (&body, rtx_referenced_p_1, x);
39811184
JZ
2477}
2478
ee735eef
JZ
2479/* If INSN is a tablejump return true and store the label (before jump table) to
2480 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
2481
2482bool
ee735eef 2483tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
39811184 2484{
ee735eef
JZ
2485 rtx label, table;
2486
4b4bf941 2487 if (JUMP_P (insn)
ee735eef
JZ
2488 && (label = JUMP_LABEL (insn)) != NULL_RTX
2489 && (table = next_active_insn (label)) != NULL_RTX
4b4bf941 2490 && JUMP_P (table)
ee735eef
JZ
2491 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2492 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
39811184 2493 {
ee735eef
JZ
2494 if (labelp)
2495 *labelp = label;
2496 if (tablep)
2497 *tablep = table;
39811184
JZ
2498 return true;
2499 }
2500 return false;
2501}
2502
fce7e199
RH
2503/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2504 constant that is not in the constant pool and not in the condition
2505 of an IF_THEN_ELSE. */
2a1777af
JL
2506
2507static int
0c20a65f 2508computed_jump_p_1 (rtx x)
2a1777af
JL
2509{
2510 enum rtx_code code = GET_CODE (x);
2511 int i, j;
6f7d635c 2512 const char *fmt;
2a1777af
JL
2513
2514 switch (code)
2515 {
2a1777af
JL
2516 case LABEL_REF:
2517 case PC:
2518 return 0;
2519
fce7e199
RH
2520 case CONST:
2521 case CONST_INT:
2522 case CONST_DOUBLE:
69ef87e2 2523 case CONST_VECTOR:
fce7e199 2524 case SYMBOL_REF:
2a1777af
JL
2525 case REG:
2526 return 1;
2527
2528 case MEM:
2529 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2530 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2531
2532 case IF_THEN_ELSE:
fce7e199
RH
2533 return (computed_jump_p_1 (XEXP (x, 1))
2534 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
2535
2536 default:
2537 break;
2a1777af
JL
2538 }
2539
2540 fmt = GET_RTX_FORMAT (code);
2541 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2542 {
2543 if (fmt[i] == 'e'
fce7e199 2544 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
2545 return 1;
2546
d4757e6a 2547 else if (fmt[i] == 'E')
2a1777af 2548 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 2549 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
2550 return 1;
2551 }
2552
2553 return 0;
2554}
2555
2556/* Return nonzero if INSN is an indirect jump (aka computed jump).
2557
2558 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 2559 we can recognize them by a (use (label_ref)). */
2a1777af
JL
2560
2561int
0c20a65f 2562computed_jump_p (rtx insn)
2a1777af
JL
2563{
2564 int i;
4b4bf941 2565 if (JUMP_P (insn))
2a1777af
JL
2566 {
2567 rtx pat = PATTERN (insn);
2a1777af 2568
f759eb8b
AO
2569 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2570 return 0;
2571 else if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
2572 {
2573 int len = XVECLEN (pat, 0);
2574 int has_use_labelref = 0;
2575
2576 for (i = len - 1; i >= 0; i--)
2577 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2578 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2579 == LABEL_REF))
2580 has_use_labelref = 1;
2581
2582 if (! has_use_labelref)
2583 for (i = len - 1; i >= 0; i--)
2584 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2585 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 2586 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
2587 return 1;
2588 }
2589 else if (GET_CODE (pat) == SET
2590 && SET_DEST (pat) == pc_rtx
fce7e199 2591 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
2592 return 1;
2593 }
2594 return 0;
2595}
ccc2d6d0 2596
cf94b0fc
PB
2597/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2598 calls. Processes the subexpressions of EXP and passes them to F. */
2599static int
2600for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2601{
2602 int result, i, j;
2603 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2604 rtx *x;
2605
2606 for (; format[n] != '\0'; n++)
2607 {
2608 switch (format[n])
2609 {
2610 case 'e':
2611 /* Call F on X. */
2612 x = &XEXP (exp, n);
2613 result = (*f) (x, data);
2614 if (result == -1)
2615 /* Do not traverse sub-expressions. */
2616 continue;
2617 else if (result != 0)
2618 /* Stop the traversal. */
2619 return result;
2620
2621 if (*x == NULL_RTX)
2622 /* There are no sub-expressions. */
2623 continue;
2624
2625 i = non_rtx_starting_operands[GET_CODE (*x)];
2626 if (i >= 0)
2627 {
2628 result = for_each_rtx_1 (*x, i, f, data);
2629 if (result != 0)
2630 return result;
2631 }
2632 break;
2633
2634 case 'V':
2635 case 'E':
2636 if (XVEC (exp, n) == 0)
2637 continue;
2638 for (j = 0; j < XVECLEN (exp, n); ++j)
2639 {
2640 /* Call F on X. */
2641 x = &XVECEXP (exp, n, j);
2642 result = (*f) (x, data);
2643 if (result == -1)
2644 /* Do not traverse sub-expressions. */
2645 continue;
2646 else if (result != 0)
2647 /* Stop the traversal. */
2648 return result;
2649
2650 if (*x == NULL_RTX)
2651 /* There are no sub-expressions. */
2652 continue;
2653
2654 i = non_rtx_starting_operands[GET_CODE (*x)];
2655 if (i >= 0)
2656 {
2657 result = for_each_rtx_1 (*x, i, f, data);
2658 if (result != 0)
2659 return result;
2660 }
2661 }
2662 break;
2663
2664 default:
2665 /* Nothing to do. */
2666 break;
2667 }
2668 }
2669
2670 return 0;
2671}
2672
ccc2d6d0
MM
2673/* Traverse X via depth-first search, calling F for each
2674 sub-expression (including X itself). F is also passed the DATA.
2675 If F returns -1, do not traverse sub-expressions, but continue
2676 traversing the rest of the tree. If F ever returns any other
40f03658 2677 nonzero value, stop the traversal, and return the value returned
ccc2d6d0
MM
2678 by F. Otherwise, return 0. This function does not traverse inside
2679 tree structure that contains RTX_EXPRs, or into sub-expressions
2680 whose format code is `0' since it is not known whether or not those
2681 codes are actually RTL.
2682
2683 This routine is very general, and could (should?) be used to
2684 implement many of the other routines in this file. */
2685
ae0b51ef 2686int
0c20a65f 2687for_each_rtx (rtx *x, rtx_function f, void *data)
ccc2d6d0
MM
2688{
2689 int result;
ccc2d6d0
MM
2690 int i;
2691
2692 /* Call F on X. */
b987f237 2693 result = (*f) (x, data);
ccc2d6d0
MM
2694 if (result == -1)
2695 /* Do not traverse sub-expressions. */
2696 return 0;
2697 else if (result != 0)
2698 /* Stop the traversal. */
2699 return result;
2700
2701 if (*x == NULL_RTX)
2702 /* There are no sub-expressions. */
2703 return 0;
2704
cf94b0fc
PB
2705 i = non_rtx_starting_operands[GET_CODE (*x)];
2706 if (i < 0)
2707 return 0;
ccc2d6d0 2708
cf94b0fc 2709 return for_each_rtx_1 (*x, i, f, data);
ccc2d6d0 2710}
3ec2b590 2711
cf94b0fc 2712
777b1b71
RH
2713/* Searches X for any reference to REGNO, returning the rtx of the
2714 reference found if any. Otherwise, returns NULL_RTX. */
2715
2716rtx
0c20a65f 2717regno_use_in (unsigned int regno, rtx x)
777b1b71 2718{
b3694847 2719 const char *fmt;
777b1b71
RH
2720 int i, j;
2721 rtx tem;
2722
f8cfc6aa 2723 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
2724 return x;
2725
2726 fmt = GET_RTX_FORMAT (GET_CODE (x));
2727 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2728 {
2729 if (fmt[i] == 'e')
2730 {
2731 if ((tem = regno_use_in (regno, XEXP (x, i))))
2732 return tem;
2733 }
2734 else if (fmt[i] == 'E')
2735 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2736 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2737 return tem;
2738 }
2739
2740 return NULL_RTX;
2741}
2dfa9a87 2742
e5c56fd9
JH
2743/* Return a value indicating whether OP, an operand of a commutative
2744 operation, is preferred as the first or second operand. The higher
2745 the value, the stronger the preference for being the first operand.
2746 We use negative values to indicate a preference for the first operand
2747 and positive values for the second operand. */
2748
9b3bd424 2749int
0c20a65f 2750commutative_operand_precedence (rtx op)
e5c56fd9 2751{
e3d6e740 2752 enum rtx_code code = GET_CODE (op);
e3d6e740 2753
e5c56fd9 2754 /* Constants always come the second operand. Prefer "nice" constants. */
e3d6e740 2755 if (code == CONST_INT)
9ce79a7a 2756 return -7;
e3d6e740 2757 if (code == CONST_DOUBLE)
9ce79a7a
RS
2758 return -6;
2759 op = avoid_constant_pool_reference (op);
79b82df3 2760 code = GET_CODE (op);
ec8e098d
PB
2761
2762 switch (GET_RTX_CLASS (code))
2763 {
2764 case RTX_CONST_OBJ:
2765 if (code == CONST_INT)
2766 return -5;
2767 if (code == CONST_DOUBLE)
2768 return -4;
2769 return -3;
2770
2771 case RTX_EXTRA:
2772 /* SUBREGs of objects should come second. */
2773 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2774 return -2;
2775
2776 if (!CONSTANT_P (op))
2777 return 0;
2778 else
2779 /* As for RTX_CONST_OBJ. */
2780 return -3;
2781
2782 case RTX_OBJ:
2783 /* Complex expressions should be the first, so decrease priority
2784 of objects. */
2785 return -1;
2786
2787 case RTX_COMM_ARITH:
2788 /* Prefer operands that are themselves commutative to be first.
2789 This helps to make things linear. In particular,
2790 (and (and (reg) (reg)) (not (reg))) is canonical. */
2791 return 4;
2792
2793 case RTX_BIN_ARITH:
2794 /* If only one operand is a binary expression, it will be the first
2795 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2796 is canonical, although it will usually be further simplified. */
2797 return 2;
e3d6e740 2798
ec8e098d
PB
2799 case RTX_UNARY:
2800 /* Then prefer NEG and NOT. */
2801 if (code == NEG || code == NOT)
2802 return 1;
e5c56fd9 2803
ec8e098d
PB
2804 default:
2805 return 0;
2806 }
e5c56fd9
JH
2807}
2808
f63d1bf7 2809/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
2810 in order to canonicalize expression. */
2811
2812int
0c20a65f 2813swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 2814{
9b3bd424
RH
2815 return (commutative_operand_precedence (x)
2816 < commutative_operand_precedence (y));
e5c56fd9 2817}
2dfa9a87
MH
2818
2819/* Return 1 if X is an autoincrement side effect and the register is
2820 not the stack pointer. */
2821int
0c20a65f 2822auto_inc_p (rtx x)
2dfa9a87
MH
2823{
2824 switch (GET_CODE (x))
2825 {
2826 case PRE_INC:
2827 case POST_INC:
2828 case PRE_DEC:
2829 case POST_DEC:
2830 case PRE_MODIFY:
2831 case POST_MODIFY:
2832 /* There are no REG_INC notes for SP. */
2833 if (XEXP (x, 0) != stack_pointer_rtx)
2834 return 1;
2835 default:
2836 break;
2837 }
2838 return 0;
2839}
3b10cf4b 2840
f9da5064 2841/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 2842int
0c20a65f 2843loc_mentioned_in_p (rtx *loc, rtx in)
db7ba742 2844{
a52b023a
PB
2845 enum rtx_code code;
2846 const char *fmt;
db7ba742
R
2847 int i, j;
2848
a52b023a
PB
2849 if (!in)
2850 return 0;
2851
2852 code = GET_CODE (in);
2853 fmt = GET_RTX_FORMAT (code);
db7ba742
R
2854 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2855 {
9ce88f5e 2856 if (loc == &in->u.fld[i].rt_rtx)
db7ba742
R
2857 return 1;
2858 if (fmt[i] == 'e')
2859 {
2860 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2861 return 1;
2862 }
2863 else if (fmt[i] == 'E')
2864 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2865 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2866 return 1;
2867 }
2868 return 0;
2869}
ddef6bc7 2870
bb51e270
RS
2871/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2872 and SUBREG_BYTE, return the bit offset where the subreg begins
2873 (counting from the least significant bit of the operand). */
33aceff2
JW
2874
2875unsigned int
bb51e270
RS
2876subreg_lsb_1 (enum machine_mode outer_mode,
2877 enum machine_mode inner_mode,
2878 unsigned int subreg_byte)
33aceff2 2879{
33aceff2
JW
2880 unsigned int bitpos;
2881 unsigned int byte;
2882 unsigned int word;
2883
2884 /* A paradoxical subreg begins at bit position 0. */
bb51e270 2885 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
33aceff2
JW
2886 return 0;
2887
2888 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2889 /* If the subreg crosses a word boundary ensure that
2890 it also begins and ends on a word boundary. */
41374e13
NS
2891 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2892 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2893 && (subreg_byte % UNITS_PER_WORD
2894 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
33aceff2
JW
2895
2896 if (WORDS_BIG_ENDIAN)
2897 word = (GET_MODE_SIZE (inner_mode)
bb51e270 2898 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 2899 else
bb51e270 2900 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
2901 bitpos = word * BITS_PER_WORD;
2902
2903 if (BYTES_BIG_ENDIAN)
2904 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 2905 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 2906 else
bb51e270 2907 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
2908 bitpos += byte * BITS_PER_UNIT;
2909
2910 return bitpos;
2911}
2912
bb51e270
RS
2913/* Given a subreg X, return the bit offset where the subreg begins
2914 (counting from the least significant bit of the reg). */
2915
2916unsigned int
2917subreg_lsb (rtx x)
2918{
2919 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
2920 SUBREG_BYTE (x));
2921}
2922
ddef6bc7
JJ
2923/* This function returns the regno offset of a subreg expression.
2924 xregno - A regno of an inner hard subreg_reg (or what will become one).
2925 xmode - The mode of xregno.
2926 offset - The byte offset.
2927 ymode - The mode of a top level SUBREG (or what may become one).
5809eb5f 2928 RETURN - The regno offset which would be used. */
ddef6bc7 2929unsigned int
0c20a65f
AJ
2930subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
2931 unsigned int offset, enum machine_mode ymode)
ddef6bc7 2932{
8521c414 2933 int nregs_xmode, nregs_ymode;
ddef6bc7
JJ
2934 int mode_multiple, nregs_multiple;
2935 int y_offset;
2936
41374e13 2937 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
ddef6bc7 2938
dd79bb7e 2939 /* Adjust nregs_xmode to allow for 'holes'. */
8521c414
JM
2940 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2941 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
dd79bb7e
GK
2942 else
2943 nregs_xmode = hard_regno_nregs[xregno][xmode];
2944
66fd46b6 2945 nregs_ymode = hard_regno_nregs[xregno][ymode];
eab2120d
R
2946
2947 /* If this is a big endian paradoxical subreg, which uses more actual
2948 hard registers than the original register, we must return a negative
2949 offset so that we find the proper highpart of the register. */
2950 if (offset == 0
2951 && nregs_ymode > nregs_xmode
2952 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2953 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
2954 return nregs_xmode - nregs_ymode;
2955
ddef6bc7
JJ
2956 if (offset == 0 || nregs_xmode == nregs_ymode)
2957 return 0;
a6a2274a 2958
dd79bb7e 2959 /* Size of ymode must not be greater than the size of xmode. */
ddef6bc7 2960 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 2961 gcc_assert (mode_multiple != 0);
ddef6bc7
JJ
2962
2963 y_offset = offset / GET_MODE_SIZE (ymode);
2964 nregs_multiple = nregs_xmode / nregs_ymode;
5809eb5f 2965 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
ddef6bc7
JJ
2966}
2967
04c5580f
JH
2968/* This function returns true when the offset is representable via
2969 subreg_offset in the given regno.
2970 xregno - A regno of an inner hard subreg_reg (or what will become one).
2971 xmode - The mode of xregno.
2972 offset - The byte offset.
2973 ymode - The mode of a top level SUBREG (or what may become one).
dd79bb7e 2974 RETURN - Whether the offset is representable. */
04c5580f 2975bool
0c20a65f
AJ
2976subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
2977 unsigned int offset, enum machine_mode ymode)
04c5580f 2978{
8521c414 2979 int nregs_xmode, nregs_ymode;
04c5580f
JH
2980 int mode_multiple, nregs_multiple;
2981 int y_offset;
8521c414 2982 int regsize_xmode, regsize_ymode;
04c5580f 2983
41374e13 2984 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
04c5580f 2985
dd79bb7e
GK
2986 /* If there are holes in a non-scalar mode in registers, we expect
2987 that it is made up of its units concatenated together. */
8521c414 2988 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
dd79bb7e 2989 {
8521c414
JM
2990 enum machine_mode xmode_unit;
2991
2992 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2993 if (GET_MODE_INNER (xmode) == VOIDmode)
2994 xmode_unit = xmode;
2995 else
2996 xmode_unit = GET_MODE_INNER (xmode);
2997 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
2998 gcc_assert (nregs_xmode
2999 == (GET_MODE_NUNITS (xmode)
3000 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3001 gcc_assert (hard_regno_nregs[xregno][xmode]
3002 == (hard_regno_nregs[xregno][xmode_unit]
3003 * GET_MODE_NUNITS (xmode)));
dd79bb7e
GK
3004
3005 /* You can only ask for a SUBREG of a value with holes in the middle
3006 if you don't cross the holes. (Such a SUBREG should be done by
3007 picking a different register class, or doing it in memory if
3008 necessary.) An example of a value with holes is XCmode on 32-bit
3009 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3010 3 for each part, but in memory it's two 128-bit parts.
3011 Padding is assumed to be at the end (not necessarily the 'high part')
3012 of each unit. */
8521c414
JM
3013 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3014 < GET_MODE_NUNITS (xmode))
3015 && (offset / GET_MODE_SIZE (xmode_unit)
dd79bb7e 3016 != ((offset + GET_MODE_SIZE (ymode) - 1)
8521c414 3017 / GET_MODE_SIZE (xmode_unit))))
dd79bb7e 3018 return false;
dd79bb7e
GK
3019 }
3020 else
3021 nregs_xmode = hard_regno_nregs[xregno][xmode];
3022
66fd46b6 3023 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3024
dd79bb7e 3025 /* Paradoxical subregs are otherwise valid. */
04c5580f
JH
3026 if (offset == 0
3027 && nregs_ymode > nregs_xmode
3028 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3029 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3030 return true;
3031
8521c414
JM
3032 /* If registers store different numbers of bits in the different
3033 modes, we cannot generally form this subreg. */
3034 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3035 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3036 if (regsize_xmode > regsize_ymode && nregs_ymode > 1)
3037 return false;
3038 if (regsize_ymode > regsize_xmode && nregs_xmode > 1)
3039 return false;
3040
dd79bb7e 3041 /* Lowpart subregs are otherwise valid. */
04c5580f
JH
3042 if (offset == subreg_lowpart_offset (ymode, xmode))
3043 return true;
3044
dd79bb7e
GK
3045 /* This should always pass, otherwise we don't know how to verify
3046 the constraint. These conditions may be relaxed but
3047 subreg_regno_offset would need to be redesigned. */
41374e13 3048 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
41374e13 3049 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
04c5580f 3050
b20b352b 3051 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3052 values. The subreg must represent a lowpart of given field.
04c5580f 3053 Compute what field it is. */
0c20a65f
AJ
3054 offset -= subreg_lowpart_offset (ymode,
3055 mode_for_size (GET_MODE_BITSIZE (xmode)
3056 / nregs_xmode,
07015444 3057 MODE_INT, 0));
04c5580f 3058
dd79bb7e 3059 /* Size of ymode must not be greater than the size of xmode. */
04c5580f 3060 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 3061 gcc_assert (mode_multiple != 0);
04c5580f
JH
3062
3063 y_offset = offset / GET_MODE_SIZE (ymode);
3064 nregs_multiple = nregs_xmode / nregs_ymode;
41374e13
NS
3065
3066 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3067 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3068
04c5580f
JH
3069 return (!(y_offset % (mode_multiple / nregs_multiple)));
3070}
3071
dc297297 3072/* Return the final regno that a subreg expression refers to. */
a6a2274a 3073unsigned int
0c20a65f 3074subreg_regno (rtx x)
ddef6bc7
JJ
3075{
3076 unsigned int ret;
3077 rtx subreg = SUBREG_REG (x);
3078 int regno = REGNO (subreg);
3079
a6a2274a
KH
3080 ret = regno + subreg_regno_offset (regno,
3081 GET_MODE (subreg),
ddef6bc7
JJ
3082 SUBREG_BYTE (x),
3083 GET_MODE (x));
3084 return ret;
3085
3086}
833366d6
JH
3087struct parms_set_data
3088{
3089 int nregs;
3090 HARD_REG_SET regs;
3091};
3092
3093/* Helper function for noticing stores to parameter registers. */
3094static void
0c20a65f 3095parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6
JH
3096{
3097 struct parms_set_data *d = data;
3098 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3099 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3100 {
3101 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3102 d->nregs--;
3103 }
3104}
3105
a6a2274a 3106/* Look backward for first parameter to be loaded.
b2df20b4
DJ
3107 Note that loads of all parameters will not necessarily be
3108 found if CSE has eliminated some of them (e.g., an argument
3109 to the outer function is passed down as a parameter).
833366d6
JH
3110 Do not skip BOUNDARY. */
3111rtx
0c20a65f 3112find_first_parameter_load (rtx call_insn, rtx boundary)
833366d6
JH
3113{
3114 struct parms_set_data parm;
b2df20b4 3115 rtx p, before, first_set;
833366d6
JH
3116
3117 /* Since different machines initialize their parameter registers
3118 in different orders, assume nothing. Collect the set of all
3119 parameter registers. */
3120 CLEAR_HARD_REG_SET (parm.regs);
3121 parm.nregs = 0;
3122 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3123 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3124 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6 3125 {
41374e13 3126 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
833366d6
JH
3127
3128 /* We only care about registers which can hold function
3129 arguments. */
3130 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3131 continue;
3132
3133 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3134 parm.nregs++;
3135 }
3136 before = call_insn;
b2df20b4 3137 first_set = call_insn;
833366d6
JH
3138
3139 /* Search backward for the first set of a register in this set. */
3140 while (parm.nregs && before != boundary)
3141 {
3142 before = PREV_INSN (before);
3143
3144 /* It is possible that some loads got CSEed from one call to
3145 another. Stop in that case. */
4b4bf941 3146 if (CALL_P (before))
833366d6
JH
3147 break;
3148
dbc1a163 3149 /* Our caller needs either ensure that we will find all sets
833366d6 3150 (in case code has not been optimized yet), or take care
eaec9b3d 3151 for possible labels in a way by setting boundary to preceding
833366d6 3152 CODE_LABEL. */
4b4bf941 3153 if (LABEL_P (before))
dbc1a163 3154 {
41374e13 3155 gcc_assert (before == boundary);
dbc1a163
RH
3156 break;
3157 }
833366d6 3158
0d025d43 3159 if (INSN_P (before))
b2df20b4
DJ
3160 {
3161 int nregs_old = parm.nregs;
3162 note_stores (PATTERN (before), parms_set, &parm);
3163 /* If we found something that did not set a parameter reg,
3164 we're done. Do not keep going, as that might result
3165 in hoisting an insn before the setting of a pseudo
3166 that is used by the hoisted insn. */
3167 if (nregs_old != parm.nregs)
3168 first_set = before;
3169 else
3170 break;
3171 }
833366d6 3172 }
b2df20b4 3173 return first_set;
833366d6 3174}
3dec4024 3175
14b493d6 3176/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3177 call instruction. */
3178
3179bool
0c20a65f 3180keep_with_call_p (rtx insn)
3dec4024
JH
3181{
3182 rtx set;
3183
3184 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3185 {
f8cfc6aa 3186 if (REG_P (SET_DEST (set))
5df533b3 3187 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3188 && fixed_regs[REGNO (SET_DEST (set))]
3189 && general_operand (SET_SRC (set), VOIDmode))
3190 return true;
f8cfc6aa 3191 if (REG_P (SET_SRC (set))
3dec4024 3192 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
f8cfc6aa 3193 && REG_P (SET_DEST (set))
3dec4024
JH
3194 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3195 return true;
bc204393
RH
3196 /* There may be a stack pop just after the call and before the store
3197 of the return register. Search for the actual store when deciding
3198 if we can break or not. */
3dec4024
JH
3199 if (SET_DEST (set) == stack_pointer_rtx)
3200 {
3201 rtx i2 = next_nonnote_insn (insn);
bc204393 3202 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3203 return true;
3204 }
3205 }
3206 return false;
3207}
71d2c5bd 3208
432f982f
JH
3209/* Return true if LABEL is a target of JUMP_INSN. This applies only
3210 to non-complex jumps. That is, direct unconditional, conditional,
3211 and tablejumps, but not computed jumps or returns. It also does
3212 not apply to the fallthru case of a conditional jump. */
3213
3214bool
3215label_is_jump_target_p (rtx label, rtx jump_insn)
3216{
3217 rtx tmp = JUMP_LABEL (jump_insn);
3218
3219 if (label == tmp)
3220 return true;
3221
3222 if (tablejump_p (jump_insn, NULL, &tmp))
3223 {
3224 rtvec vec = XVEC (PATTERN (tmp),
3225 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3226 int i, veclen = GET_NUM_ELEM (vec);
3227
3228 for (i = 0; i < veclen; ++i)
3229 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3230 return true;
3231 }
3232
3233 return false;
3234}
3235
f894b69b
PB
3236\f
3237/* Return an estimate of the cost of computing rtx X.
3238 One use is in cse, to decide which expression to keep in the hash table.
3239 Another is in rtl generation, to pick the cheapest way to multiply.
3240 Other uses like the latter are expected in the future. */
3241
3242int
3243rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3244{
3245 int i, j;
3246 enum rtx_code code;
3247 const char *fmt;
3248 int total;
3249
3250 if (x == 0)
3251 return 0;
3252
3253 /* Compute the default costs of certain things.
3254 Note that targetm.rtx_costs can override the defaults. */
3255
3256 code = GET_CODE (x);
3257 switch (code)
3258 {
3259 case MULT:
3260 total = COSTS_N_INSNS (5);
3261 break;
3262 case DIV:
3263 case UDIV:
3264 case MOD:
3265 case UMOD:
3266 total = COSTS_N_INSNS (7);
3267 break;
3268 case USE:
db3edc20 3269 /* Used in combine.c as a marker. */
f894b69b
PB
3270 total = 0;
3271 break;
3272 default:
3273 total = COSTS_N_INSNS (1);
3274 }
3275
3276 switch (code)
3277 {
3278 case REG:
3279 return 0;
3280
3281 case SUBREG:
edb81165 3282 total = 0;
f894b69b
PB
3283 /* If we can't tie these modes, make this expensive. The larger
3284 the mode, the more expensive it is. */
3285 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3286 return COSTS_N_INSNS (2
3287 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3288 break;
3289
3290 default:
5fd9b178 3291 if (targetm.rtx_costs (x, code, outer_code, &total))
f894b69b
PB
3292 return total;
3293 break;
3294 }
3295
3296 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3297 which is already in total. */
3298
3299 fmt = GET_RTX_FORMAT (code);
3300 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3301 if (fmt[i] == 'e')
3302 total += rtx_cost (XEXP (x, i), code);
3303 else if (fmt[i] == 'E')
3304 for (j = 0; j < XVECLEN (x, i); j++)
3305 total += rtx_cost (XVECEXP (x, i, j), code);
3306
3307 return total;
3308}
3309\f
3310/* Return cost of address expression X.
3311 Expect that X is properly formed address reference. */
3312
3313int
3314address_cost (rtx x, enum machine_mode mode)
3315{
f894b69b
PB
3316 /* We may be asked for cost of various unusual addresses, such as operands
3317 of push instruction. It is not worthwhile to complicate writing
3318 of the target hook by such cases. */
3319
3320 if (!memory_address_p (mode, x))
3321 return 1000;
3322
5fd9b178 3323 return targetm.address_cost (x);
f894b69b
PB
3324}
3325
3326/* If the target doesn't override, compute the cost as with arithmetic. */
3327
3328int
3329default_address_cost (rtx x)
3330{
3331 return rtx_cost (x, MEM);
3332}
2f93eea8
PB
3333\f
3334
3335unsigned HOST_WIDE_INT
3336nonzero_bits (rtx x, enum machine_mode mode)
3337{
3338 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3339}
3340
3341unsigned int
3342num_sign_bit_copies (rtx x, enum machine_mode mode)
3343{
3344 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3345}
3346
3347/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3348 It avoids exponential behavior in nonzero_bits1 when X has
3349 identical subexpressions on the first or the second level. */
3350
3351static unsigned HOST_WIDE_INT
3352cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3353 enum machine_mode known_mode,
3354 unsigned HOST_WIDE_INT known_ret)
3355{
3356 if (x == known_x && mode == known_mode)
3357 return known_ret;
3358
3359 /* Try to find identical subexpressions. If found call
3360 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3361 precomputed value for the subexpression as KNOWN_RET. */
3362
3363 if (ARITHMETIC_P (x))
3364 {
3365 rtx x0 = XEXP (x, 0);
3366 rtx x1 = XEXP (x, 1);
3367
3368 /* Check the first level. */
3369 if (x0 == x1)
3370 return nonzero_bits1 (x, mode, x0, mode,
3371 cached_nonzero_bits (x0, mode, known_x,
3372 known_mode, known_ret));
3373
3374 /* Check the second level. */
3375 if (ARITHMETIC_P (x0)
3376 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3377 return nonzero_bits1 (x, mode, x1, mode,
3378 cached_nonzero_bits (x1, mode, known_x,
3379 known_mode, known_ret));
3380
3381 if (ARITHMETIC_P (x1)
3382 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3383 return nonzero_bits1 (x, mode, x0, mode,
3384 cached_nonzero_bits (x0, mode, known_x,
3385 known_mode, known_ret));
3386 }
3387
3388 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3389}
3390
3391/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3392 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3393 is less useful. We can't allow both, because that results in exponential
3394 run time recursion. There is a nullstone testcase that triggered
3395 this. This macro avoids accidental uses of num_sign_bit_copies. */
3396#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3397
3398/* Given an expression, X, compute which bits in X can be nonzero.
3399 We don't care about bits outside of those defined in MODE.
3400
3401 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3402 an arithmetic operation, we can do better. */
3403
3404static unsigned HOST_WIDE_INT
3405nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3406 enum machine_mode known_mode,
3407 unsigned HOST_WIDE_INT known_ret)
3408{
3409 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3410 unsigned HOST_WIDE_INT inner_nz;
3411 enum rtx_code code;
3412 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3413
3414 /* For floating-point values, assume all bits are needed. */
3415 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3416 return nonzero;
3417
3418 /* If X is wider than MODE, use its mode instead. */
3419 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3420 {
3421 mode = GET_MODE (x);
3422 nonzero = GET_MODE_MASK (mode);
3423 mode_width = GET_MODE_BITSIZE (mode);
3424 }
3425
3426 if (mode_width > HOST_BITS_PER_WIDE_INT)
3427 /* Our only callers in this case look for single bit values. So
3428 just return the mode mask. Those tests will then be false. */
3429 return nonzero;
3430
3431#ifndef WORD_REGISTER_OPERATIONS
3432 /* If MODE is wider than X, but both are a single word for both the host
3433 and target machines, we can compute this from which bits of the
3434 object might be nonzero in its own mode, taking into account the fact
3435 that on many CISC machines, accessing an object in a wider mode
3436 causes the high-order bits to become undefined. So they are
3437 not known to be zero. */
3438
3439 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3440 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3441 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3442 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3443 {
3444 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3445 known_x, known_mode, known_ret);
3446 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3447 return nonzero;
3448 }
3449#endif
3450
3451 code = GET_CODE (x);
3452 switch (code)
3453 {
3454 case REG:
3455#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3456 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3457 all the bits above ptr_mode are known to be zero. */
3458 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3459 && REG_POINTER (x))
3460 nonzero &= GET_MODE_MASK (ptr_mode);
3461#endif
3462
3463 /* Include declared information about alignment of pointers. */
3464 /* ??? We don't properly preserve REG_POINTER changes across
3465 pointer-to-integer casts, so we can't trust it except for
3466 things that we know must be pointers. See execute/960116-1.c. */
3467 if ((x == stack_pointer_rtx
3468 || x == frame_pointer_rtx
3469 || x == arg_pointer_rtx)
3470 && REGNO_POINTER_ALIGN (REGNO (x)))
3471 {
3472 unsigned HOST_WIDE_INT alignment
3473 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3474
3475#ifdef PUSH_ROUNDING
3476 /* If PUSH_ROUNDING is defined, it is possible for the
3477 stack to be momentarily aligned only to that amount,
3478 so we pick the least alignment. */
3479 if (x == stack_pointer_rtx && PUSH_ARGS)
3480 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3481 alignment);
3482#endif
3483
3484 nonzero &= ~(alignment - 1);
3485 }
3486
3487 {
3488 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3489 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3490 known_mode, known_ret,
3491 &nonzero_for_hook);
3492
3493 if (new)
3494 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3495 known_mode, known_ret);
3496
3497 return nonzero_for_hook;
3498 }
3499
3500 case CONST_INT:
3501#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3502 /* If X is negative in MODE, sign-extend the value. */
3503 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3504 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3505 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3506#endif
3507
3508 return INTVAL (x);
3509
3510 case MEM:
3511#ifdef LOAD_EXTEND_OP
3512 /* In many, if not most, RISC machines, reading a byte from memory
3513 zeros the rest of the register. Noticing that fact saves a lot
3514 of extra zero-extends. */
3515 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3516 nonzero &= GET_MODE_MASK (GET_MODE (x));
3517#endif
3518 break;
3519
3520 case EQ: case NE:
3521 case UNEQ: case LTGT:
3522 case GT: case GTU: case UNGT:
3523 case LT: case LTU: case UNLT:
3524 case GE: case GEU: case UNGE:
3525 case LE: case LEU: case UNLE:
3526 case UNORDERED: case ORDERED:
2f93eea8
PB
3527 /* If this produces an integer result, we know which bits are set.
3528 Code here used to clear bits outside the mode of X, but that is
3529 now done above. */
505ac507
RH
3530 /* Mind that MODE is the mode the caller wants to look at this
3531 operation in, and not the actual operation mode. We can wind
3532 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3533 that describes the results of a vector compare. */
3534 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2f93eea8
PB
3535 && mode_width <= HOST_BITS_PER_WIDE_INT)
3536 nonzero = STORE_FLAG_VALUE;
3537 break;
3538
3539 case NEG:
3540#if 0
3541 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3542 and num_sign_bit_copies. */
3543 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3544 == GET_MODE_BITSIZE (GET_MODE (x)))
3545 nonzero = 1;
3546#endif
3547
3548 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3549 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3550 break;
3551
3552 case ABS:
3553#if 0
3554 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3555 and num_sign_bit_copies. */
3556 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3557 == GET_MODE_BITSIZE (GET_MODE (x)))
3558 nonzero = 1;
3559#endif
3560 break;
3561
3562 case TRUNCATE:
3563 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3564 known_x, known_mode, known_ret)
3565 & GET_MODE_MASK (mode));
3566 break;
3567
3568 case ZERO_EXTEND:
3569 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3570 known_x, known_mode, known_ret);
3571 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3572 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3573 break;
3574
3575 case SIGN_EXTEND:
3576 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3577 Otherwise, show all the bits in the outer mode but not the inner
3578 may be nonzero. */
3579 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3580 known_x, known_mode, known_ret);
3581 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3582 {
3583 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3584 if (inner_nz
3585 & (((HOST_WIDE_INT) 1
3586 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3587 inner_nz |= (GET_MODE_MASK (mode)
3588 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3589 }
3590
3591 nonzero &= inner_nz;
3592 break;
3593
3594 case AND:
3595 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3596 known_x, known_mode, known_ret)
3597 & cached_nonzero_bits (XEXP (x, 1), mode,
3598 known_x, known_mode, known_ret);
3599 break;
3600
3601 case XOR: case IOR:
3602 case UMIN: case UMAX: case SMIN: case SMAX:
3603 {
3604 unsigned HOST_WIDE_INT nonzero0 =
3605 cached_nonzero_bits (XEXP (x, 0), mode,
3606 known_x, known_mode, known_ret);
3607
3608 /* Don't call nonzero_bits for the second time if it cannot change
3609 anything. */
3610 if ((nonzero & nonzero0) != nonzero)
3611 nonzero &= nonzero0
3612 | cached_nonzero_bits (XEXP (x, 1), mode,
3613 known_x, known_mode, known_ret);
3614 }
3615 break;
3616
3617 case PLUS: case MINUS:
3618 case MULT:
3619 case DIV: case UDIV:
3620 case MOD: case UMOD:
3621 /* We can apply the rules of arithmetic to compute the number of
3622 high- and low-order zero bits of these operations. We start by
3623 computing the width (position of the highest-order nonzero bit)
3624 and the number of low-order zero bits for each value. */
3625 {
3626 unsigned HOST_WIDE_INT nz0 =
3627 cached_nonzero_bits (XEXP (x, 0), mode,
3628 known_x, known_mode, known_ret);
3629 unsigned HOST_WIDE_INT nz1 =
3630 cached_nonzero_bits (XEXP (x, 1), mode,
3631 known_x, known_mode, known_ret);
3632 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3633 int width0 = floor_log2 (nz0) + 1;
3634 int width1 = floor_log2 (nz1) + 1;
3635 int low0 = floor_log2 (nz0 & -nz0);
3636 int low1 = floor_log2 (nz1 & -nz1);
3637 HOST_WIDE_INT op0_maybe_minusp
3638 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3639 HOST_WIDE_INT op1_maybe_minusp
3640 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3641 unsigned int result_width = mode_width;
3642 int result_low = 0;
3643
3644 switch (code)
3645 {
3646 case PLUS:
3647 result_width = MAX (width0, width1) + 1;
3648 result_low = MIN (low0, low1);
3649 break;
3650 case MINUS:
3651 result_low = MIN (low0, low1);
3652 break;
3653 case MULT:
3654 result_width = width0 + width1;
3655 result_low = low0 + low1;
3656 break;
3657 case DIV:
3658 if (width1 == 0)
3659 break;
3660 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3661 result_width = width0;
3662 break;
3663 case UDIV:
3664 if (width1 == 0)
3665 break;
3666 result_width = width0;
3667 break;
3668 case MOD:
3669 if (width1 == 0)
3670 break;
3671 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3672 result_width = MIN (width0, width1);
3673 result_low = MIN (low0, low1);
3674 break;
3675 case UMOD:
3676 if (width1 == 0)
3677 break;
3678 result_width = MIN (width0, width1);
3679 result_low = MIN (low0, low1);
3680 break;
3681 default:
41374e13 3682 gcc_unreachable ();
2f93eea8
PB
3683 }
3684
3685 if (result_width < mode_width)
3686 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3687
3688 if (result_low > 0)
3689 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3690
3691#ifdef POINTERS_EXTEND_UNSIGNED
3692 /* If pointers extend unsigned and this is an addition or subtraction
3693 to a pointer in Pmode, all the bits above ptr_mode are known to be
3694 zero. */
3695 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3696 && (code == PLUS || code == MINUS)
f8cfc6aa 3697 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
3698 nonzero &= GET_MODE_MASK (ptr_mode);
3699#endif
3700 }
3701 break;
3702
3703 case ZERO_EXTRACT:
3704 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3705 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3706 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3707 break;
3708
3709 case SUBREG:
3710 /* If this is a SUBREG formed for a promoted variable that has
3711 been zero-extended, we know that at least the high-order bits
3712 are zero, though others might be too. */
3713
3714 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3715 nonzero = GET_MODE_MASK (GET_MODE (x))
3716 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3717 known_x, known_mode, known_ret);
3718
3719 /* If the inner mode is a single word for both the host and target
3720 machines, we can compute this from which bits of the inner
3721 object might be nonzero. */
3722 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3723 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3724 <= HOST_BITS_PER_WIDE_INT))
3725 {
3726 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3727 known_x, known_mode, known_ret);
3728
3729#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3730 /* If this is a typical RISC machine, we only have to worry
3731 about the way loads are extended. */
3732 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3733 ? (((nonzero
3734 & (((unsigned HOST_WIDE_INT) 1
3735 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3736 != 0))
3737 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3c0cb5de 3738 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
3739#endif
3740 {
3741 /* On many CISC machines, accessing an object in a wider mode
3742 causes the high-order bits to become undefined. So they are
3743 not known to be zero. */
3744 if (GET_MODE_SIZE (GET_MODE (x))
3745 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3746 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3747 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3748 }
3749 }
3750 break;
3751
3752 case ASHIFTRT:
3753 case LSHIFTRT:
3754 case ASHIFT:
3755 case ROTATE:
3756 /* The nonzero bits are in two classes: any bits within MODE
3757 that aren't in GET_MODE (x) are always significant. The rest of the
3758 nonzero bits are those that are significant in the operand of
3759 the shift when shifted the appropriate number of bits. This
3760 shows that high-order bits are cleared by the right shift and
3761 low-order bits by left shifts. */
3762 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3763 && INTVAL (XEXP (x, 1)) >= 0
3764 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3765 {
3766 enum machine_mode inner_mode = GET_MODE (x);
3767 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3768 int count = INTVAL (XEXP (x, 1));
3769 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3770 unsigned HOST_WIDE_INT op_nonzero =
3771 cached_nonzero_bits (XEXP (x, 0), mode,
3772 known_x, known_mode, known_ret);
3773 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3774 unsigned HOST_WIDE_INT outer = 0;
3775
3776 if (mode_width > width)
3777 outer = (op_nonzero & nonzero & ~mode_mask);
3778
3779 if (code == LSHIFTRT)
3780 inner >>= count;
3781 else if (code == ASHIFTRT)
3782 {
3783 inner >>= count;
3784
3785 /* If the sign bit may have been nonzero before the shift, we
3786 need to mark all the places it could have been copied to
3787 by the shift as possibly nonzero. */
3788 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3789 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3790 }
3791 else if (code == ASHIFT)
3792 inner <<= count;
3793 else
3794 inner = ((inner << (count % width)
3795 | (inner >> (width - (count % width)))) & mode_mask);
3796
3797 nonzero &= (outer | inner);
3798 }
3799 break;
3800
3801 case FFS:
3802 case POPCOUNT:
3803 /* This is at most the number of bits in the mode. */
3804 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3805 break;
3806
3807 case CLZ:
3808 /* If CLZ has a known value at zero, then the nonzero bits are
3809 that value, plus the number of bits in the mode minus one. */
3810 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3811 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3812 else
3813 nonzero = -1;
3814 break;
3815
3816 case CTZ:
3817 /* If CTZ has a known value at zero, then the nonzero bits are
3818 that value, plus the number of bits in the mode minus one. */
3819 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3820 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3821 else
3822 nonzero = -1;
3823 break;
3824
3825 case PARITY:
3826 nonzero = 1;
3827 break;
3828
3829 case IF_THEN_ELSE:
3830 {
3831 unsigned HOST_WIDE_INT nonzero_true =
3832 cached_nonzero_bits (XEXP (x, 1), mode,
3833 known_x, known_mode, known_ret);
3834
3835 /* Don't call nonzero_bits for the second time if it cannot change
3836 anything. */
3837 if ((nonzero & nonzero_true) != nonzero)
3838 nonzero &= nonzero_true
3839 | cached_nonzero_bits (XEXP (x, 2), mode,
3840 known_x, known_mode, known_ret);
3841 }
3842 break;
3843
3844 default:
3845 break;
3846 }
3847
3848 return nonzero;
3849}
3850
3851/* See the macro definition above. */
3852#undef cached_num_sign_bit_copies
3853
3854\f
3855/* The function cached_num_sign_bit_copies is a wrapper around
3856 num_sign_bit_copies1. It avoids exponential behavior in
3857 num_sign_bit_copies1 when X has identical subexpressions on the
3858 first or the second level. */
3859
3860static unsigned int
3861cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3862 enum machine_mode known_mode,
3863 unsigned int known_ret)
3864{
3865 if (x == known_x && mode == known_mode)
3866 return known_ret;
3867
3868 /* Try to find identical subexpressions. If found call
3869 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3870 the precomputed value for the subexpression as KNOWN_RET. */
3871
3872 if (ARITHMETIC_P (x))
3873 {
3874 rtx x0 = XEXP (x, 0);
3875 rtx x1 = XEXP (x, 1);
3876
3877 /* Check the first level. */
3878 if (x0 == x1)
3879 return
3880 num_sign_bit_copies1 (x, mode, x0, mode,
3881 cached_num_sign_bit_copies (x0, mode, known_x,
3882 known_mode,
3883 known_ret));
3884
3885 /* Check the second level. */
3886 if (ARITHMETIC_P (x0)
3887 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3888 return
3889 num_sign_bit_copies1 (x, mode, x1, mode,
3890 cached_num_sign_bit_copies (x1, mode, known_x,
3891 known_mode,
3892 known_ret));
3893
3894 if (ARITHMETIC_P (x1)
3895 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3896 return
3897 num_sign_bit_copies1 (x, mode, x0, mode,
3898 cached_num_sign_bit_copies (x0, mode, known_x,
3899 known_mode,
3900 known_ret));
3901 }
3902
3903 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3904}
3905
3906/* Return the number of bits at the high-order end of X that are known to
3907 be equal to the sign bit. X will be used in mode MODE; if MODE is
3908 VOIDmode, X will be used in its own mode. The returned value will always
3909 be between 1 and the number of bits in MODE. */
3910
3911static unsigned int
3912num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3913 enum machine_mode known_mode,
3914 unsigned int known_ret)
3915{
3916 enum rtx_code code = GET_CODE (x);
3917 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3918 int num0, num1, result;
3919 unsigned HOST_WIDE_INT nonzero;
3920
3921 /* If we weren't given a mode, use the mode of X. If the mode is still
3922 VOIDmode, we don't know anything. Likewise if one of the modes is
3923 floating-point. */
3924
3925 if (mode == VOIDmode)
3926 mode = GET_MODE (x);
3927
3928 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3929 return 1;
3930
3931 /* For a smaller object, just ignore the high bits. */
3932 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3933 {
3934 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3935 known_x, known_mode, known_ret);
3936 return MAX (1,
3937 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3938 }
3939
3940 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3941 {
3942#ifndef WORD_REGISTER_OPERATIONS
3943 /* If this machine does not do all register operations on the entire
3944 register and MODE is wider than the mode of X, we can say nothing
3945 at all about the high-order bits. */
3946 return 1;
3947#else
3948 /* Likewise on machines that do, if the mode of the object is smaller
3949 than a word and loads of that size don't sign extend, we can say
3950 nothing about the high order bits. */
3951 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3952#ifdef LOAD_EXTEND_OP
3953 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3954#endif
3955 )
3956 return 1;
3957#endif
3958 }
3959
3960 switch (code)
3961 {
3962 case REG:
3963
3964#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3965 /* If pointers extend signed and this is a pointer in Pmode, say that
3966 all the bits above ptr_mode are known to be sign bit copies. */
3967 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3968 && REG_POINTER (x))
3969 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3970#endif
3971
3972 {
3973 unsigned int copies_for_hook = 1, copies = 1;
3974 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3975 known_mode, known_ret,
3976 &copies_for_hook);
3977
3978 if (new)
3979 copies = cached_num_sign_bit_copies (new, mode, known_x,
3980 known_mode, known_ret);
3981
3982 if (copies > 1 || copies_for_hook > 1)
3983 return MAX (copies, copies_for_hook);
3984
3985 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
3986 }
3987 break;
3988
3989 case MEM:
3990#ifdef LOAD_EXTEND_OP
3991 /* Some RISC machines sign-extend all loads of smaller than a word. */
3992 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
3993 return MAX (1, ((int) bitwidth
3994 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
3995#endif
3996 break;
3997
3998 case CONST_INT:
3999 /* If the constant is negative, take its 1's complement and remask.
4000 Then see how many zero bits we have. */
4001 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4002 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4003 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4004 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4005
4006 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4007
4008 case SUBREG:
4009 /* If this is a SUBREG for a promoted object that is sign-extended
4010 and we are looking at it in a wider mode, we know that at least the
4011 high-order bits are known to be sign bit copies. */
4012
4013 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4014 {
4015 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4016 known_x, known_mode, known_ret);
4017 return MAX ((int) bitwidth
4018 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4019 num0);
4020 }
4021
4022 /* For a smaller object, just ignore the high bits. */
4023 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4024 {
4025 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4026 known_x, known_mode, known_ret);
4027 return MAX (1, (num0
4028 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4029 - bitwidth)));
4030 }
4031
4032#ifdef WORD_REGISTER_OPERATIONS
4033#ifdef LOAD_EXTEND_OP
4034 /* For paradoxical SUBREGs on machines where all register operations
4035 affect the entire register, just look inside. Note that we are
4036 passing MODE to the recursive call, so the number of sign bit copies
4037 will remain relative to that mode, not the inner mode. */
4038
4039 /* This works only if loads sign extend. Otherwise, if we get a
4040 reload for the inner part, it may be loaded from the stack, and
4041 then we lose all sign bit copies that existed before the store
4042 to the stack. */
4043
4044 if ((GET_MODE_SIZE (GET_MODE (x))
4045 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4046 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4047 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4048 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4049 known_x, known_mode, known_ret);
4050#endif
4051#endif
4052 break;
4053
4054 case SIGN_EXTRACT:
4055 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4056 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4057 break;
4058
4059 case SIGN_EXTEND:
4060 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4061 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4062 known_x, known_mode, known_ret));
4063
4064 case TRUNCATE:
4065 /* For a smaller object, just ignore the high bits. */
4066 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4067 known_x, known_mode, known_ret);
4068 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4069 - bitwidth)));
4070
4071 case NOT:
4072 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4073 known_x, known_mode, known_ret);
4074
4075 case ROTATE: case ROTATERT:
4076 /* If we are rotating left by a number of bits less than the number
4077 of sign bit copies, we can just subtract that amount from the
4078 number. */
4079 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4080 && INTVAL (XEXP (x, 1)) >= 0
4081 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4082 {
4083 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4084 known_x, known_mode, known_ret);
4085 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4086 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4087 }
4088 break;
4089
4090 case NEG:
4091 /* In general, this subtracts one sign bit copy. But if the value
4092 is known to be positive, the number of sign bit copies is the
4093 same as that of the input. Finally, if the input has just one bit
4094 that might be nonzero, all the bits are copies of the sign bit. */
4095 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4096 known_x, known_mode, known_ret);
4097 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4098 return num0 > 1 ? num0 - 1 : 1;
4099
4100 nonzero = nonzero_bits (XEXP (x, 0), mode);
4101 if (nonzero == 1)
4102 return bitwidth;
4103
4104 if (num0 > 1
4105 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4106 num0--;
4107
4108 return num0;
4109
4110 case IOR: case AND: case XOR:
4111 case SMIN: case SMAX: case UMIN: case UMAX:
4112 /* Logical operations will preserve the number of sign-bit copies.
4113 MIN and MAX operations always return one of the operands. */
4114 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4115 known_x, known_mode, known_ret);
4116 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4117 known_x, known_mode, known_ret);
4118 return MIN (num0, num1);
4119
4120 case PLUS: case MINUS:
4121 /* For addition and subtraction, we can have a 1-bit carry. However,
4122 if we are subtracting 1 from a positive number, there will not
4123 be such a carry. Furthermore, if the positive number is known to
4124 be 0 or 1, we know the result is either -1 or 0. */
4125
4126 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4127 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4128 {
4129 nonzero = nonzero_bits (XEXP (x, 0), mode);
4130 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4131 return (nonzero == 1 || nonzero == 0 ? bitwidth
4132 : bitwidth - floor_log2 (nonzero) - 1);
4133 }
4134
4135 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4136 known_x, known_mode, known_ret);
4137 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4138 known_x, known_mode, known_ret);
4139 result = MAX (1, MIN (num0, num1) - 1);
4140
4141#ifdef POINTERS_EXTEND_UNSIGNED
4142 /* If pointers extend signed and this is an addition or subtraction
4143 to a pointer in Pmode, all the bits above ptr_mode are known to be
4144 sign bit copies. */
4145 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4146 && (code == PLUS || code == MINUS)
f8cfc6aa 4147 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
4148 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4149 - GET_MODE_BITSIZE (ptr_mode) + 1),
4150 result);
4151#endif
4152 return result;
4153
4154 case MULT:
4155 /* The number of bits of the product is the sum of the number of
4156 bits of both terms. However, unless one of the terms if known
4157 to be positive, we must allow for an additional bit since negating
4158 a negative number can remove one sign bit copy. */
4159
4160 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4161 known_x, known_mode, known_ret);
4162 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4163 known_x, known_mode, known_ret);
4164
4165 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4166 if (result > 0
4167 && (bitwidth > HOST_BITS_PER_WIDE_INT
4168 || (((nonzero_bits (XEXP (x, 0), mode)
4169 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4170 && ((nonzero_bits (XEXP (x, 1), mode)
4171 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4172 result--;
4173
4174 return MAX (1, result);
4175
4176 case UDIV:
4177 /* The result must be <= the first operand. If the first operand
4178 has the high bit set, we know nothing about the number of sign
4179 bit copies. */
4180 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4181 return 1;
4182 else if ((nonzero_bits (XEXP (x, 0), mode)
4183 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4184 return 1;
4185 else
4186 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4187 known_x, known_mode, known_ret);
4188
4189 case UMOD:
4190 /* The result must be <= the second operand. */
4191 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4192 known_x, known_mode, known_ret);
4193
4194 case DIV:
4195 /* Similar to unsigned division, except that we have to worry about
4196 the case where the divisor is negative, in which case we have
4197 to add 1. */
4198 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4199 known_x, known_mode, known_ret);
4200 if (result > 1
4201 && (bitwidth > HOST_BITS_PER_WIDE_INT
4202 || (nonzero_bits (XEXP (x, 1), mode)
4203 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4204 result--;
4205
4206 return result;
4207
4208 case MOD:
4209 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4210 known_x, known_mode, known_ret);
4211 if (result > 1
4212 && (bitwidth > HOST_BITS_PER_WIDE_INT
4213 || (nonzero_bits (XEXP (x, 1), mode)
4214 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4215 result--;
4216
4217 return result;
4218
4219 case ASHIFTRT:
4220 /* Shifts by a constant add to the number of bits equal to the
4221 sign bit. */
4222 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4223 known_x, known_mode, known_ret);
4224 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4225 && INTVAL (XEXP (x, 1)) > 0)
4226 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4227
4228 return num0;
4229
4230 case ASHIFT:
4231 /* Left shifts destroy copies. */
4232 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4233 || INTVAL (XEXP (x, 1)) < 0
4234 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4235 return 1;
4236
4237 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4238 known_x, known_mode, known_ret);
4239 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4240
4241 case IF_THEN_ELSE:
4242 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4243 known_x, known_mode, known_ret);
4244 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4245 known_x, known_mode, known_ret);
4246 return MIN (num0, num1);
4247
4248 case EQ: case NE: case GE: case GT: case LE: case LT:
4249 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4250 case GEU: case GTU: case LEU: case LTU:
4251 case UNORDERED: case ORDERED:
4252 /* If the constant is negative, take its 1's complement and remask.
4253 Then see how many zero bits we have. */
4254 nonzero = STORE_FLAG_VALUE;
4255 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4256 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4257 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4258
4259 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4260
4261 default:
4262 break;
4263 }
4264
4265 /* If we haven't been able to figure it out by one of the above rules,
4266 see if some of the high-order bits are known to be zero. If so,
4267 count those bits and return one less than that amount. If we can't
4268 safely compute the mask for this mode, always return BITWIDTH. */
4269
4270 bitwidth = GET_MODE_BITSIZE (mode);
4271 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4272 return 1;
4273
4274 nonzero = nonzero_bits (x, mode);
4275 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4276 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4277}
6fd21094
RS
4278
4279/* Calculate the rtx_cost of a single instruction. A return value of
4280 zero indicates an instruction pattern without a known cost. */
4281
4282int
4283insn_rtx_cost (rtx pat)
4284{
4285 int i, cost;
4286 rtx set;
4287
4288 /* Extract the single set rtx from the instruction pattern.
4289 We can't use single_set since we only have the pattern. */
4290 if (GET_CODE (pat) == SET)
4291 set = pat;
4292 else if (GET_CODE (pat) == PARALLEL)
4293 {
4294 set = NULL_RTX;
4295 for (i = 0; i < XVECLEN (pat, 0); i++)
4296 {
4297 rtx x = XVECEXP (pat, 0, i);
4298 if (GET_CODE (x) == SET)
4299 {
4300 if (set)
4301 return 0;
4302 set = x;
4303 }
4304 }
4305 if (!set)
4306 return 0;
4307 }
4308 else
4309 return 0;
4310
4311 cost = rtx_cost (SET_SRC (set), SET);
4312 return cost > 0 ? cost : COSTS_N_INSNS (1);
4313}
75473b02
SB
4314
4315/* Given an insn INSN and condition COND, return the condition in a
4316 canonical form to simplify testing by callers. Specifically:
4317
4318 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4319 (2) Both operands will be machine operands; (cc0) will have been replaced.
4320 (3) If an operand is a constant, it will be the second operand.
4321 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4322 for GE, GEU, and LEU.
4323
4324 If the condition cannot be understood, or is an inequality floating-point
4325 comparison which needs to be reversed, 0 will be returned.
4326
4327 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4328
4329 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4330 insn used in locating the condition was found. If a replacement test
4331 of the condition is desired, it should be placed in front of that
4332 insn and we will be sure that the inputs are still valid.
4333
4334 If WANT_REG is nonzero, we wish the condition to be relative to that
4335 register, if possible. Therefore, do not canonicalize the condition
4336 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4337 to be a compare to a CC mode register.
4338
4339 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4340 and at INSN. */
4341
4342rtx
4343canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4344 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4345{
4346 enum rtx_code code;
4347 rtx prev = insn;
4348 rtx set;
4349 rtx tem;
4350 rtx op0, op1;
4351 int reverse_code = 0;
4352 enum machine_mode mode;
569f8d98 4353 basic_block bb = BLOCK_FOR_INSN (insn);
75473b02
SB
4354
4355 code = GET_CODE (cond);
4356 mode = GET_MODE (cond);
4357 op0 = XEXP (cond, 0);
4358 op1 = XEXP (cond, 1);
4359
4360 if (reverse)
4361 code = reversed_comparison_code (cond, insn);
4362 if (code == UNKNOWN)
4363 return 0;
4364
4365 if (earliest)
4366 *earliest = insn;
4367
4368 /* If we are comparing a register with zero, see if the register is set
4369 in the previous insn to a COMPARE or a comparison operation. Perform
4370 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4371 in cse.c */
4372
4373 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4374 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4375 && op1 == CONST0_RTX (GET_MODE (op0))
4376 && op0 != want_reg)
4377 {
4378 /* Set nonzero when we find something of interest. */
4379 rtx x = 0;
4380
4381#ifdef HAVE_cc0
4382 /* If comparison with cc0, import actual comparison from compare
4383 insn. */
4384 if (op0 == cc0_rtx)
4385 {
4386 if ((prev = prev_nonnote_insn (prev)) == 0
4387 || !NONJUMP_INSN_P (prev)
4388 || (set = single_set (prev)) == 0
4389 || SET_DEST (set) != cc0_rtx)
4390 return 0;
4391
4392 op0 = SET_SRC (set);
4393 op1 = CONST0_RTX (GET_MODE (op0));
4394 if (earliest)
4395 *earliest = prev;
4396 }
4397#endif
4398
4399 /* If this is a COMPARE, pick up the two things being compared. */
4400 if (GET_CODE (op0) == COMPARE)
4401 {
4402 op1 = XEXP (op0, 1);
4403 op0 = XEXP (op0, 0);
4404 continue;
4405 }
4406 else if (!REG_P (op0))
4407 break;
4408
4409 /* Go back to the previous insn. Stop if it is not an INSN. We also
4410 stop if it isn't a single set or if it has a REG_INC note because
4411 we don't want to bother dealing with it. */
4412
4413 if ((prev = prev_nonnote_insn (prev)) == 0
4414 || !NONJUMP_INSN_P (prev)
569f8d98
ZD
4415 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4416 /* In cfglayout mode, there do not have to be labels at the
4417 beginning of a block, or jumps at the end, so the previous
4418 conditions would not stop us when we reach bb boundary. */
4419 || BLOCK_FOR_INSN (prev) != bb)
75473b02
SB
4420 break;
4421
4422 set = set_of (op0, prev);
4423
4424 if (set
4425 && (GET_CODE (set) != SET
4426 || !rtx_equal_p (SET_DEST (set), op0)))
4427 break;
4428
4429 /* If this is setting OP0, get what it sets it to if it looks
4430 relevant. */
4431 if (set)
4432 {
4433 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4434#ifdef FLOAT_STORE_FLAG_VALUE
4435 REAL_VALUE_TYPE fsfv;
4436#endif
4437
4438 /* ??? We may not combine comparisons done in a CCmode with
4439 comparisons not done in a CCmode. This is to aid targets
4440 like Alpha that have an IEEE compliant EQ instruction, and
4441 a non-IEEE compliant BEQ instruction. The use of CCmode is
4442 actually artificial, simply to prevent the combination, but
4443 should not affect other platforms.
4444
4445 However, we must allow VOIDmode comparisons to match either
4446 CCmode or non-CCmode comparison, because some ports have
4447 modeless comparisons inside branch patterns.
4448
4449 ??? This mode check should perhaps look more like the mode check
4450 in simplify_comparison in combine. */
4451
4452 if ((GET_CODE (SET_SRC (set)) == COMPARE
4453 || (((code == NE
4454 || (code == LT
4455 && GET_MODE_CLASS (inner_mode) == MODE_INT
4456 && (GET_MODE_BITSIZE (inner_mode)
4457 <= HOST_BITS_PER_WIDE_INT)
4458 && (STORE_FLAG_VALUE
4459 & ((HOST_WIDE_INT) 1
4460 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4461#ifdef FLOAT_STORE_FLAG_VALUE
4462 || (code == LT
3d8bf70f 4463 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4464 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4465 REAL_VALUE_NEGATIVE (fsfv)))
4466#endif
4467 ))
4468 && COMPARISON_P (SET_SRC (set))))
4469 && (((GET_MODE_CLASS (mode) == MODE_CC)
4470 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4471 || mode == VOIDmode || inner_mode == VOIDmode))
4472 x = SET_SRC (set);
4473 else if (((code == EQ
4474 || (code == GE
4475 && (GET_MODE_BITSIZE (inner_mode)
4476 <= HOST_BITS_PER_WIDE_INT)
4477 && GET_MODE_CLASS (inner_mode) == MODE_INT
4478 && (STORE_FLAG_VALUE
4479 & ((HOST_WIDE_INT) 1
4480 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4481#ifdef FLOAT_STORE_FLAG_VALUE
4482 || (code == GE
3d8bf70f 4483 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4484 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4485 REAL_VALUE_NEGATIVE (fsfv)))
4486#endif
4487 ))
4488 && COMPARISON_P (SET_SRC (set))
4489 && (((GET_MODE_CLASS (mode) == MODE_CC)
4490 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4491 || mode == VOIDmode || inner_mode == VOIDmode))
4492
4493 {
4494 reverse_code = 1;
4495 x = SET_SRC (set);
4496 }
4497 else
4498 break;
4499 }
4500
4501 else if (reg_set_p (op0, prev))
4502 /* If this sets OP0, but not directly, we have to give up. */
4503 break;
4504
4505 if (x)
4506 {
4507 /* If the caller is expecting the condition to be valid at INSN,
4508 make sure X doesn't change before INSN. */
4509 if (valid_at_insn_p)
4510 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4511 break;
4512 if (COMPARISON_P (x))
4513 code = GET_CODE (x);
4514 if (reverse_code)
4515 {
4516 code = reversed_comparison_code (x, prev);
4517 if (code == UNKNOWN)
4518 return 0;
4519 reverse_code = 0;
4520 }
4521
4522 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4523 if (earliest)
4524 *earliest = prev;
4525 }
4526 }
4527
4528 /* If constant is first, put it last. */
4529 if (CONSTANT_P (op0))
4530 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4531
4532 /* If OP0 is the result of a comparison, we weren't able to find what
4533 was really being compared, so fail. */
4534 if (!allow_cc_mode
4535 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4536 return 0;
4537
4538 /* Canonicalize any ordered comparison with integers involving equality
4539 if we can do computations in the relevant mode and we do not
4540 overflow. */
4541
4542 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4543 && GET_CODE (op1) == CONST_INT
4544 && GET_MODE (op0) != VOIDmode
4545 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4546 {
4547 HOST_WIDE_INT const_val = INTVAL (op1);
4548 unsigned HOST_WIDE_INT uconst_val = const_val;
4549 unsigned HOST_WIDE_INT max_val
4550 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4551
4552 switch (code)
4553 {
4554 case LE:
4555 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4556 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4557 break;
4558
4559 /* When cross-compiling, const_val might be sign-extended from
4560 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4561 case GE:
4562 if ((HOST_WIDE_INT) (const_val & max_val)
4563 != (((HOST_WIDE_INT) 1
4564 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4565 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4566 break;
4567
4568 case LEU:
4569 if (uconst_val < max_val)
4570 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4571 break;
4572
4573 case GEU:
4574 if (uconst_val != 0)
4575 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4576 break;
4577
4578 default:
4579 break;
4580 }
4581 }
4582
4583 /* Never return CC0; return zero instead. */
4584 if (CC0_P (op0))
4585 return 0;
4586
4587 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4588}
4589
4590/* Given a jump insn JUMP, return the condition that will cause it to branch
4591 to its JUMP_LABEL. If the condition cannot be understood, or is an
4592 inequality floating-point comparison which needs to be reversed, 0 will
4593 be returned.
4594
4595 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4596 insn used in locating the condition was found. If a replacement test
4597 of the condition is desired, it should be placed in front of that
4598 insn and we will be sure that the inputs are still valid. If EARLIEST
4599 is null, the returned condition will be valid at INSN.
4600
4601 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4602 compare CC mode register.
4603
4604 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4605
4606rtx
4607get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4608{
4609 rtx cond;
4610 int reverse;
4611 rtx set;
4612
4613 /* If this is not a standard conditional jump, we can't parse it. */
4614 if (!JUMP_P (jump)
4615 || ! any_condjump_p (jump))
4616 return 0;
4617 set = pc_set (jump);
4618
4619 cond = XEXP (SET_SRC (set), 0);
4620
4621 /* If this branches to JUMP_LABEL when the condition is false, reverse
4622 the condition. */
4623 reverse
4624 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4625 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4626
4627 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4628 allow_cc_mode, valid_at_insn_p);
4629}
4630
b12cbf2c
AN
4631/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4632 TARGET_MODE_REP_EXTENDED.
4633
4634 Note that we assume that the property of
4635 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4636 narrower than mode B. I.e., if A is a mode narrower than B then in
4637 order to be able to operate on it in mode B, mode A needs to
4638 satisfy the requirements set by the representation of mode B. */
4639
4640static void
4641init_num_sign_bit_copies_in_rep (void)
4642{
4643 enum machine_mode mode, in_mode;
4644
4645 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4646 in_mode = GET_MODE_WIDER_MODE (mode))
4647 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4648 mode = GET_MODE_WIDER_MODE (mode))
4649 {
4650 enum machine_mode i;
4651
4652 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4653 extends to the next widest mode. */
4654 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4655 || GET_MODE_WIDER_MODE (mode) == in_mode);
4656
4657 /* We are in in_mode. Count how many bits outside of mode
4658 have to be copies of the sign-bit. */
4659 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4660 {
4661 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4662
4663 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4664 /* We can only check sign-bit copies starting from the
4665 top-bit. In order to be able to check the bits we
4666 have already seen we pretend that subsequent bits
4667 have to be sign-bit copies too. */
4668 || num_sign_bit_copies_in_rep [in_mode][mode])
4669 num_sign_bit_copies_in_rep [in_mode][mode]
4670 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4671 }
4672 }
4673}
4674
d3b72690
PB
4675/* Suppose that truncation from the machine mode of X to MODE is not a
4676 no-op. See if there is anything special about X so that we can
4677 assume it already contains a truncated value of MODE. */
4678
4679bool
4680truncated_to_mode (enum machine_mode mode, rtx x)
4681{
b12cbf2c
AN
4682 /* This register has already been used in MODE without explicit
4683 truncation. */
4684 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4685 return true;
4686
4687 /* See if we already satisfy the requirements of MODE. If yes we
4688 can just switch to MODE. */
4689 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4690 && (num_sign_bit_copies (x, GET_MODE (x))
4691 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4692 return true;
d3b72690 4693
b12cbf2c
AN
4694 return false;
4695}
cf94b0fc
PB
4696\f
4697/* Initialize non_rtx_starting_operands, which is used to speed up
4698 for_each_rtx. */
4699void
4700init_rtlanal (void)
4701{
4702 int i;
4703 for (i = 0; i < NUM_RTX_CODE; i++)
4704 {
4705 const char *format = GET_RTX_FORMAT (i);
4706 const char *first = strpbrk (format, "eEV");
4707 non_rtx_starting_operands[i] = first ? first - format : -1;
4708 }
b12cbf2c
AN
4709
4710 init_num_sign_bit_copies_in_rep ();
cf94b0fc 4711}
3d8504ac
RS
4712\f
4713/* Check whether this is a constant pool constant. */
4714bool
4715constant_pool_constant_p (rtx x)
4716{
4717 x = avoid_constant_pool_reference (x);
4718 return GET_CODE (x) == CONST_DOUBLE;
4719}
4720