]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
Daily bump.
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
af082de3 1/* Analyze RTL for GNU compiler.
af841dbd 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
a446b4e8 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software
af082de3 4 Foundation, Inc.
2c88418c 5
1322177d 6This file is part of GCC.
2c88418c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
2c88418c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
2c88418c
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
2c88418c
RS
22
23
24#include "config.h"
670ee920 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
e35b9579 28#include "toplev.h"
2c88418c 29#include "rtl.h"
3335f1d9 30#include "hard-reg-set.h"
bc204393
RH
31#include "insn-config.h"
32#include "recog.h"
f894b69b
PB
33#include "target.h"
34#include "output.h"
91ea4f8d 35#include "tm_p.h"
f5eb5fd0 36#include "flags.h"
52bfebf0 37#include "real.h"
66fd46b6 38#include "regs.h"
2f93eea8 39#include "function.h"
6fb5fa3c 40#include "df.h"
7ffb5e78 41#include "tree.h"
2c88418c 42
f1f4e530
JM
43/* Information about a subreg of a hard register. */
44struct subreg_info
45{
46 /* Offset of first hard register involved in the subreg. */
47 int offset;
48 /* Number of hard registers involved in the subreg. */
49 int nregs;
50 /* Whether this subreg can be represented as a hard reg with the new
51 mode. */
52 bool representable_p;
53};
54
e2373f95 55/* Forward declarations */
0c20a65f 56static void set_of_1 (rtx, rtx, void *);
194acded
HPN
57static bool covers_regno_p (rtx, unsigned int);
58static bool covers_regno_no_parallel_p (rtx, unsigned int);
0c20a65f
AJ
59static int rtx_referenced_p_1 (rtx *, void *);
60static int computed_jump_p_1 (rtx);
61static void parms_set (rtx, rtx, void *);
f1f4e530
JM
62static void subreg_get_info (unsigned int, enum machine_mode,
63 unsigned int, enum machine_mode,
64 struct subreg_info *);
2a1777af 65
2f93eea8
PB
66static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
67 rtx, enum machine_mode,
68 unsigned HOST_WIDE_INT);
69static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
70 enum machine_mode,
71 unsigned HOST_WIDE_INT);
72static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
73 enum machine_mode,
74 unsigned int);
75static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
76 enum machine_mode, unsigned int);
77
cf94b0fc
PB
78/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
79 -1 if a code has no such operand. */
80static int non_rtx_starting_operands[NUM_RTX_CODE];
81
2c88418c
RS
82/* Bit flags that specify the machine subtype we are compiling for.
83 Bits are tested using macros TARGET_... defined in the tm.h file
84 and set by `-m...' switches. Must be defined in rtlanal.c. */
85
86int target_flags;
b12cbf2c
AN
87
88/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
89 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
90 SIGN_EXTEND then while narrowing we also have to enforce the
91 representation and sign-extend the value to mode DESTINATION_REP.
92
93 If the value is already sign-extended to DESTINATION_REP mode we
94 can just switch to DESTINATION mode on it. For each pair of
95 integral modes SOURCE and DESTINATION, when truncating from SOURCE
96 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
97 contains the number of high-order bits in SOURCE that have to be
98 copies of the sign-bit so that we can do this mode-switch to
99 DESTINATION. */
100
101static unsigned int
102num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
2c88418c
RS
103\f
104/* Return 1 if the value of X is unstable
105 (would be different at a different point in the program).
106 The frame pointer, arg pointer, etc. are considered stable
107 (within one function) and so is anything marked `unchanging'. */
108
109int
0c20a65f 110rtx_unstable_p (rtx x)
2c88418c 111{
b3694847
SS
112 RTX_CODE code = GET_CODE (x);
113 int i;
114 const char *fmt;
2c88418c 115
ae0fb1b9
JW
116 switch (code)
117 {
118 case MEM:
389fdba0 119 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 120
ae0fb1b9
JW
121 case CONST:
122 case CONST_INT:
123 case CONST_DOUBLE:
69ef87e2 124 case CONST_VECTOR:
ae0fb1b9
JW
125 case SYMBOL_REF:
126 case LABEL_REF:
127 return 0;
2c88418c 128
ae0fb1b9
JW
129 case REG:
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9 132 /* The arg pointer varies if it is not a fixed register. */
389fdba0 133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b
RH
134 return 0;
135#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
140 return 0;
141#endif
142 return 1;
ae0fb1b9
JW
143
144 case ASM_OPERANDS:
145 if (MEM_VOLATILE_P (x))
146 return 1;
147
5d3cc252 148 /* Fall through. */
ae0fb1b9
JW
149
150 default:
151 break;
152 }
2c88418c
RS
153
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
156 if (fmt[i] == 'e')
9c82ac6b
JW
157 {
158 if (rtx_unstable_p (XEXP (x, i)))
159 return 1;
160 }
161 else if (fmt[i] == 'E')
162 {
163 int j;
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
166 return 1;
167 }
168
2c88418c
RS
169 return 0;
170}
171
172/* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
e38fe8e0
BS
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
2c88418c
RS
177 The frame pointer and the arg pointer are considered constant. */
178
179int
0c20a65f 180rtx_varies_p (rtx x, int for_alias)
2c88418c 181{
e978d62e 182 RTX_CODE code;
b3694847
SS
183 int i;
184 const char *fmt;
2c88418c 185
e978d62e
PB
186 if (!x)
187 return 0;
188
189 code = GET_CODE (x);
2c88418c
RS
190 switch (code)
191 {
192 case MEM:
389fdba0 193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 194
2c88418c
RS
195 case CONST:
196 case CONST_INT:
197 case CONST_DOUBLE:
69ef87e2 198 case CONST_VECTOR:
2c88418c
RS
199 case SYMBOL_REF:
200 case LABEL_REF:
201 return 0;
202
203 case REG:
204 /* Note that we have to test for the actual rtx used for the frame
205 and arg pointers and not just the register number in case we have
206 eliminated the frame and/or arg pointer and are using it
207 for pseudos. */
c0fc376b 208 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
209 /* The arg pointer varies if it is not a fixed register. */
210 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 211 return 0;
e38fe8e0
BS
212 if (x == pic_offset_table_rtx
213#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
214 /* ??? When call-clobbered, the value is stable modulo the restore
215 that must happen after a call. This currently screws up
216 local-alloc into believing that the restore is not needed, so we
217 must return 0 only if we are called from alias analysis. */
218 && for_alias
c0fc376b 219#endif
e38fe8e0
BS
220 )
221 return 0;
c0fc376b 222 return 1;
2c88418c
RS
223
224 case LO_SUM:
225 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
226 (in fact it is related specifically to operand 1)
227 during alias analysis. */
228 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
229 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 230
ae0fb1b9
JW
231 case ASM_OPERANDS:
232 if (MEM_VOLATILE_P (x))
233 return 1;
234
5d3cc252 235 /* Fall through. */
ae0fb1b9 236
e9a25f70
JL
237 default:
238 break;
2c88418c
RS
239 }
240
241 fmt = GET_RTX_FORMAT (code);
242 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
243 if (fmt[i] == 'e')
9c82ac6b 244 {
e38fe8e0 245 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
246 return 1;
247 }
248 else if (fmt[i] == 'E')
249 {
250 int j;
251 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 252 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
253 return 1;
254 }
255
2c88418c
RS
256 return 0;
257}
258
2358ff91
EB
259/* Return nonzero if the use of X as an address in a MEM can cause a trap.
260 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
261 whether nonzero is returned for unaligned memory accesses on strict
262 alignment machines. */
2c88418c 263
2358ff91
EB
264static int
265rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
2c88418c 266{
b3694847 267 enum rtx_code code = GET_CODE (x);
2c88418c
RS
268
269 switch (code)
270 {
271 case SYMBOL_REF:
ff0b6b99
FS
272 return SYMBOL_REF_WEAK (x);
273
2c88418c 274 case LABEL_REF:
2c88418c
RS
275 return 0;
276
277 case REG:
278 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
4f73495e
RH
279 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
280 || x == stack_pointer_rtx
281 /* The arg pointer varies if it is not a fixed register. */
282 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
283 return 0;
284 /* All of the virtual frame registers are stack references. */
285 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
286 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
287 return 0;
288 return 1;
2c88418c
RS
289
290 case CONST:
2358ff91 291 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
2c88418c
RS
292
293 case PLUS:
2358ff91
EB
294 /* An address is assumed not to trap if:
295 - it is an address that can't trap plus a constant integer,
296 with the proper remainder modulo the mode size if we are
297 considering unaligned memory references. */
298 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
299 && GET_CODE (XEXP (x, 1)) == CONST_INT)
300 {
301 HOST_WIDE_INT offset;
302
bb11103a
EB
303 if (!STRICT_ALIGNMENT
304 || !unaligned_mems
305 || GET_MODE_SIZE (mode) == 0)
2358ff91
EB
306 return 0;
307
308 offset = INTVAL (XEXP (x, 1));
309
310#ifdef SPARC_STACK_BOUNDARY_HACK
311 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
312 the real alignment of %sp. However, when it does this, the
313 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
314 if (SPARC_STACK_BOUNDARY_HACK
315 && (XEXP (x, 0) == stack_pointer_rtx
316 || XEXP (x, 0) == hard_frame_pointer_rtx))
317 offset -= STACK_POINTER_OFFSET;
318#endif
319
320 return offset % GET_MODE_SIZE (mode) != 0;
321 }
322
323 /* - or it is the pic register plus a constant. */
324 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
325 return 0;
326
327 return 1;
2c88418c
RS
328
329 case LO_SUM:
4f73495e 330 case PRE_MODIFY:
2358ff91 331 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
4f73495e
RH
332
333 case PRE_DEC:
334 case PRE_INC:
335 case POST_DEC:
336 case POST_INC:
337 case POST_MODIFY:
2358ff91 338 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
4f73495e 339
e9a25f70
JL
340 default:
341 break;
2c88418c
RS
342 }
343
344 /* If it isn't one of the case above, it can cause a trap. */
345 return 1;
346}
347
2358ff91
EB
348/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
349
350int
351rtx_addr_can_trap_p (rtx x)
352{
353 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
354}
355
4977bab6
ZW
356/* Return true if X is an address that is known to not be zero. */
357
358bool
0c20a65f 359nonzero_address_p (rtx x)
4977bab6
ZW
360{
361 enum rtx_code code = GET_CODE (x);
362
363 switch (code)
364 {
365 case SYMBOL_REF:
366 return !SYMBOL_REF_WEAK (x);
367
368 case LABEL_REF:
369 return true;
370
4977bab6
ZW
371 case REG:
372 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
373 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
374 || x == stack_pointer_rtx
375 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
376 return true;
377 /* All of the virtual frame registers are stack references. */
378 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
379 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
380 return true;
381 return false;
382
383 case CONST:
384 return nonzero_address_p (XEXP (x, 0));
385
386 case PLUS:
387 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
942d7821 388 return nonzero_address_p (XEXP (x, 0));
4977bab6
ZW
389 /* Handle PIC references. */
390 else if (XEXP (x, 0) == pic_offset_table_rtx
391 && CONSTANT_P (XEXP (x, 1)))
392 return true;
393 return false;
394
395 case PRE_MODIFY:
396 /* Similar to the above; allow positive offsets. Further, since
397 auto-inc is only allowed in memories, the register must be a
398 pointer. */
399 if (GET_CODE (XEXP (x, 1)) == CONST_INT
400 && INTVAL (XEXP (x, 1)) > 0)
401 return true;
402 return nonzero_address_p (XEXP (x, 0));
403
404 case PRE_INC:
405 /* Similarly. Further, the offset is always positive. */
406 return true;
407
408 case PRE_DEC:
409 case POST_DEC:
410 case POST_INC:
411 case POST_MODIFY:
412 return nonzero_address_p (XEXP (x, 0));
413
414 case LO_SUM:
415 return nonzero_address_p (XEXP (x, 1));
416
417 default:
418 break;
419 }
420
421 /* If it isn't one of the case above, might be zero. */
422 return false;
423}
424
a6a2274a 425/* Return 1 if X refers to a memory location whose address
2c88418c 426 cannot be compared reliably with constant addresses,
a6a2274a 427 or if X refers to a BLKmode memory object.
e38fe8e0
BS
428 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
429 zero, we are slightly more conservative. */
2c88418c
RS
430
431int
0c20a65f 432rtx_addr_varies_p (rtx x, int for_alias)
2c88418c 433{
b3694847
SS
434 enum rtx_code code;
435 int i;
436 const char *fmt;
2c88418c
RS
437
438 if (x == 0)
439 return 0;
440
441 code = GET_CODE (x);
442 if (code == MEM)
e38fe8e0 443 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
444
445 fmt = GET_RTX_FORMAT (code);
446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
447 if (fmt[i] == 'e')
833c0b26 448 {
e38fe8e0 449 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
450 return 1;
451 }
452 else if (fmt[i] == 'E')
453 {
454 int j;
455 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 456 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
457 return 1;
458 }
2c88418c
RS
459 return 0;
460}
461\f
462/* Return the value of the integer term in X, if one is apparent;
463 otherwise return 0.
464 Only obvious integer terms are detected.
3ef42a0c 465 This is used in cse.c with the `related_value' field. */
2c88418c 466
c166a311 467HOST_WIDE_INT
0c20a65f 468get_integer_term (rtx x)
2c88418c
RS
469{
470 if (GET_CODE (x) == CONST)
471 x = XEXP (x, 0);
472
473 if (GET_CODE (x) == MINUS
474 && GET_CODE (XEXP (x, 1)) == CONST_INT)
475 return - INTVAL (XEXP (x, 1));
476 if (GET_CODE (x) == PLUS
477 && GET_CODE (XEXP (x, 1)) == CONST_INT)
478 return INTVAL (XEXP (x, 1));
479 return 0;
480}
481
482/* If X is a constant, return the value sans apparent integer term;
483 otherwise return 0.
484 Only obvious integer terms are detected. */
485
486rtx
0c20a65f 487get_related_value (rtx x)
2c88418c
RS
488{
489 if (GET_CODE (x) != CONST)
490 return 0;
491 x = XEXP (x, 0);
492 if (GET_CODE (x) == PLUS
493 && GET_CODE (XEXP (x, 1)) == CONST_INT)
494 return XEXP (x, 0);
495 else if (GET_CODE (x) == MINUS
496 && GET_CODE (XEXP (x, 1)) == CONST_INT)
497 return XEXP (x, 0);
498 return 0;
499}
500\f
7ffb5e78
RS
501/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
502 to somewhere in the same object or object_block as SYMBOL. */
503
504bool
505offset_within_block_p (rtx symbol, HOST_WIDE_INT offset)
506{
507 tree decl;
508
509 if (GET_CODE (symbol) != SYMBOL_REF)
510 return false;
511
512 if (offset == 0)
513 return true;
514
515 if (offset > 0)
516 {
517 if (CONSTANT_POOL_ADDRESS_P (symbol)
518 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
519 return true;
520
521 decl = SYMBOL_REF_DECL (symbol);
522 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
523 return true;
524 }
525
526 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
527 && SYMBOL_REF_BLOCK (symbol)
528 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
529 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
530 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
531 return true;
532
533 return false;
534}
535
536/* Split X into a base and a constant offset, storing them in *BASE_OUT
537 and *OFFSET_OUT respectively. */
538
539void
540split_const (rtx x, rtx *base_out, rtx *offset_out)
541{
542 if (GET_CODE (x) == CONST)
543 {
544 x = XEXP (x, 0);
545 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
546 {
547 *base_out = XEXP (x, 0);
548 *offset_out = XEXP (x, 1);
549 return;
550 }
551 }
552 *base_out = x;
553 *offset_out = const0_rtx;
554}
555\f
4b983fdc
RH
556/* Return the number of places FIND appears within X. If COUNT_DEST is
557 zero, we do not count occurrences inside the destination of a SET. */
558
559int
0c20a65f 560count_occurrences (rtx x, rtx find, int count_dest)
4b983fdc
RH
561{
562 int i, j;
563 enum rtx_code code;
564 const char *format_ptr;
565 int count;
566
567 if (x == find)
568 return 1;
569
570 code = GET_CODE (x);
571
572 switch (code)
573 {
574 case REG:
575 case CONST_INT:
576 case CONST_DOUBLE:
69ef87e2 577 case CONST_VECTOR:
4b983fdc
RH
578 case SYMBOL_REF:
579 case CODE_LABEL:
580 case PC:
581 case CC0:
582 return 0;
583
2372a062
BS
584 case EXPR_LIST:
585 count = count_occurrences (XEXP (x, 0), find, count_dest);
586 if (XEXP (x, 1))
587 count += count_occurrences (XEXP (x, 1), find, count_dest);
588 return count;
589
4b983fdc 590 case MEM:
3c0cb5de 591 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
592 return 1;
593 break;
594
595 case SET:
596 if (SET_DEST (x) == find && ! count_dest)
597 return count_occurrences (SET_SRC (x), find, count_dest);
598 break;
599
600 default:
601 break;
602 }
603
604 format_ptr = GET_RTX_FORMAT (code);
605 count = 0;
606
607 for (i = 0; i < GET_RTX_LENGTH (code); i++)
608 {
609 switch (*format_ptr++)
610 {
611 case 'e':
612 count += count_occurrences (XEXP (x, i), find, count_dest);
613 break;
614
615 case 'E':
616 for (j = 0; j < XVECLEN (x, i); j++)
617 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
618 break;
619 }
620 }
621 return count;
622}
6fb5fa3c 623
4b983fdc 624\f
2c88418c
RS
625/* Nonzero if register REG appears somewhere within IN.
626 Also works if REG is not a register; in this case it checks
627 for a subexpression of IN that is Lisp "equal" to REG. */
628
629int
0c20a65f 630reg_mentioned_p (rtx reg, rtx in)
2c88418c 631{
b3694847
SS
632 const char *fmt;
633 int i;
634 enum rtx_code code;
2c88418c
RS
635
636 if (in == 0)
637 return 0;
638
639 if (reg == in)
640 return 1;
641
642 if (GET_CODE (in) == LABEL_REF)
643 return reg == XEXP (in, 0);
644
645 code = GET_CODE (in);
646
647 switch (code)
648 {
649 /* Compare registers by number. */
650 case REG:
f8cfc6aa 651 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
652
653 /* These codes have no constituent expressions
654 and are unique. */
655 case SCRATCH:
656 case CC0:
657 case PC:
658 return 0;
659
660 case CONST_INT:
69ef87e2 661 case CONST_VECTOR:
2c88418c
RS
662 case CONST_DOUBLE:
663 /* These are kept unique for a given value. */
664 return 0;
a6a2274a 665
e9a25f70
JL
666 default:
667 break;
2c88418c
RS
668 }
669
670 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
671 return 1;
672
673 fmt = GET_RTX_FORMAT (code);
674
675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
676 {
677 if (fmt[i] == 'E')
678 {
b3694847 679 int j;
2c88418c
RS
680 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
681 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
682 return 1;
683 }
684 else if (fmt[i] == 'e'
685 && reg_mentioned_p (reg, XEXP (in, i)))
686 return 1;
687 }
688 return 0;
689}
690\f
691/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
692 no CODE_LABEL insn. */
693
694int
0c20a65f 695no_labels_between_p (rtx beg, rtx end)
2c88418c 696{
b3694847 697 rtx p;
978f547f
JH
698 if (beg == end)
699 return 0;
2c88418c 700 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 701 if (LABEL_P (p))
2c88418c
RS
702 return 0;
703 return 1;
704}
705
706/* Nonzero if register REG is used in an insn between
707 FROM_INSN and TO_INSN (exclusive of those two). */
708
709int
0c20a65f 710reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 711{
b3694847 712 rtx insn;
2c88418c
RS
713
714 if (from_insn == to_insn)
715 return 0;
716
717 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 718 if (INSN_P (insn)
8f3e7a26 719 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
76dd5923 720 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
721 return 1;
722 return 0;
723}
724\f
725/* Nonzero if the old value of X, a register, is referenced in BODY. If X
726 is entirely replaced by a new value and the only use is as a SET_DEST,
727 we do not consider it a reference. */
728
729int
0c20a65f 730reg_referenced_p (rtx x, rtx body)
2c88418c
RS
731{
732 int i;
733
734 switch (GET_CODE (body))
735 {
736 case SET:
737 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
738 return 1;
739
740 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
741 of a REG that occupies all of the REG, the insn references X if
742 it is mentioned in the destination. */
743 if (GET_CODE (SET_DEST (body)) != CC0
744 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 745 && !REG_P (SET_DEST (body))
2c88418c 746 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 747 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
748 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
749 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
750 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
751 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
752 && reg_overlap_mentioned_p (x, SET_DEST (body)))
753 return 1;
e9a25f70 754 return 0;
2c88418c
RS
755
756 case ASM_OPERANDS:
757 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
758 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
759 return 1;
e9a25f70 760 return 0;
2c88418c
RS
761
762 case CALL:
763 case USE:
14a774a9 764 case IF_THEN_ELSE:
2c88418c
RS
765 return reg_overlap_mentioned_p (x, body);
766
767 case TRAP_IF:
768 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
769
21b8482a
JJ
770 case PREFETCH:
771 return reg_overlap_mentioned_p (x, XEXP (body, 0));
772
2ac4fed0
RK
773 case UNSPEC:
774 case UNSPEC_VOLATILE:
2f9fb4c2
R
775 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
776 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
777 return 1;
778 return 0;
779
2c88418c
RS
780 case PARALLEL:
781 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
782 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
783 return 1;
e9a25f70 784 return 0;
a6a2274a 785
0d3ffb5a 786 case CLOBBER:
3c0cb5de 787 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
788 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
789 return 1;
790 return 0;
791
0c99ec5c
RH
792 case COND_EXEC:
793 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
794 return 1;
795 return reg_referenced_p (x, COND_EXEC_CODE (body));
796
e9a25f70
JL
797 default:
798 return 0;
2c88418c 799 }
2c88418c 800}
2c88418c
RS
801\f
802/* Nonzero if register REG is set or clobbered in an insn between
803 FROM_INSN and TO_INSN (exclusive of those two). */
804
805int
0c20a65f 806reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 807{
b3694847 808 rtx insn;
2c88418c
RS
809
810 if (from_insn == to_insn)
811 return 0;
812
813 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 814 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
815 return 1;
816 return 0;
817}
818
819/* Internals of reg_set_between_p. */
2c88418c 820int
0c20a65f 821reg_set_p (rtx reg, rtx insn)
2c88418c 822{
2c88418c
RS
823 /* We can be passed an insn or part of one. If we are passed an insn,
824 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
825 if (INSN_P (insn)
826 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 827 || (CALL_P (insn)
f8cfc6aa 828 && ((REG_P (reg)
4f1605d2 829 && REGNO (reg) < FIRST_PSEUDO_REGISTER
5da20cfe
RS
830 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
831 GET_MODE (reg), REGNO (reg)))
3c0cb5de 832 || MEM_P (reg)
4977bab6
ZW
833 || find_reg_fusage (insn, CLOBBER, reg)))))
834 return 1;
2c88418c 835
91b2d119 836 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
837}
838
839/* Similar to reg_set_between_p, but check all registers in X. Return 0
840 only if none of them are modified between START and END. Return 1 if
7b52eede 841 X contains a MEM; this routine does usememory aliasing. */
2c88418c
RS
842
843int
0c20a65f 844modified_between_p (rtx x, rtx start, rtx end)
2c88418c
RS
845{
846 enum rtx_code code = GET_CODE (x);
6f7d635c 847 const char *fmt;
f8163c92 848 int i, j;
7b52eede
JH
849 rtx insn;
850
851 if (start == end)
852 return 0;
2c88418c
RS
853
854 switch (code)
855 {
856 case CONST_INT:
857 case CONST_DOUBLE:
69ef87e2 858 case CONST_VECTOR:
2c88418c
RS
859 case CONST:
860 case SYMBOL_REF:
861 case LABEL_REF:
862 return 0;
863
864 case PC:
865 case CC0:
866 return 1;
867
868 case MEM:
7b52eede 869 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 870 return 1;
550b7784
KK
871 if (MEM_READONLY_P (x))
872 return 0;
7b52eede
JH
873 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
874 if (memory_modified_in_insn_p (x, insn))
875 return 1;
876 return 0;
2c88418c
RS
877 break;
878
879 case REG:
880 return reg_set_between_p (x, start, end);
a6a2274a 881
e9a25f70
JL
882 default:
883 break;
2c88418c
RS
884 }
885
886 fmt = GET_RTX_FORMAT (code);
887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
888 {
889 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
890 return 1;
891
d4757e6a 892 else if (fmt[i] == 'E')
f8163c92
RK
893 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
894 if (modified_between_p (XVECEXP (x, i, j), start, end))
895 return 1;
896 }
897
898 return 0;
899}
900
901/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
902 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 903 does use memory aliasing. */
f8163c92
RK
904
905int
0c20a65f 906modified_in_p (rtx x, rtx insn)
f8163c92
RK
907{
908 enum rtx_code code = GET_CODE (x);
6f7d635c 909 const char *fmt;
f8163c92
RK
910 int i, j;
911
912 switch (code)
913 {
914 case CONST_INT:
915 case CONST_DOUBLE:
69ef87e2 916 case CONST_VECTOR:
f8163c92
RK
917 case CONST:
918 case SYMBOL_REF:
919 case LABEL_REF:
920 return 0;
921
922 case PC:
923 case CC0:
2c88418c
RS
924 return 1;
925
f8163c92 926 case MEM:
7b52eede 927 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 928 return 1;
550b7784
KK
929 if (MEM_READONLY_P (x))
930 return 0;
7b52eede
JH
931 if (memory_modified_in_insn_p (x, insn))
932 return 1;
933 return 0;
f8163c92
RK
934 break;
935
936 case REG:
937 return reg_set_p (x, insn);
e9a25f70
JL
938
939 default:
940 break;
f8163c92
RK
941 }
942
943 fmt = GET_RTX_FORMAT (code);
944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
945 {
946 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
947 return 1;
948
d4757e6a 949 else if (fmt[i] == 'E')
f8163c92
RK
950 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
951 if (modified_in_p (XVECEXP (x, i, j), insn))
952 return 1;
953 }
954
2c88418c
RS
955 return 0;
956}
957\f
91b2d119
JH
958/* Helper function for set_of. */
959struct set_of_data
960 {
961 rtx found;
962 rtx pat;
963 };
964
965static void
0c20a65f 966set_of_1 (rtx x, rtx pat, void *data1)
91b2d119
JH
967{
968 struct set_of_data *data = (struct set_of_data *) (data1);
969 if (rtx_equal_p (x, data->pat)
3c0cb5de 970 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
91b2d119
JH
971 data->found = pat;
972}
973
974/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 975 (either directly or via STRICT_LOW_PART and similar modifiers). */
91b2d119 976rtx
0c20a65f 977set_of (rtx pat, rtx insn)
91b2d119
JH
978{
979 struct set_of_data data;
980 data.found = NULL_RTX;
981 data.pat = pat;
982 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
983 return data.found;
984}
985\f
2c88418c
RS
986/* Given an INSN, return a SET expression if this insn has only a single SET.
987 It may also have CLOBBERs, USEs, or SET whose output
988 will not be used, which we ignore. */
989
990rtx
0c20a65f 991single_set_2 (rtx insn, rtx pat)
2c88418c 992{
c9b89a21
JH
993 rtx set = NULL;
994 int set_verified = 1;
2c88418c 995 int i;
c9b89a21 996
b1cdafbb 997 if (GET_CODE (pat) == PARALLEL)
2c88418c 998 {
c9b89a21 999 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 1000 {
c9b89a21
JH
1001 rtx sub = XVECEXP (pat, 0, i);
1002 switch (GET_CODE (sub))
1003 {
1004 case USE:
1005 case CLOBBER:
1006 break;
1007
1008 case SET:
1009 /* We can consider insns having multiple sets, where all
1010 but one are dead as single set insns. In common case
1011 only single set is present in the pattern so we want
f63d1bf7 1012 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
1013
1014 When we reach set first time, we just expect this is
1015 the single set we are looking for and only when more
1016 sets are found in the insn, we check them. */
1017 if (!set_verified)
1018 {
1019 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1020 && !side_effects_p (set))
1021 set = NULL;
1022 else
1023 set_verified = 1;
1024 }
1025 if (!set)
1026 set = sub, set_verified = 0;
1027 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1028 || side_effects_p (sub))
1029 return NULL_RTX;
1030 break;
1031
1032 default:
1033 return NULL_RTX;
1034 }
787ccee0 1035 }
2c88418c 1036 }
c9b89a21 1037 return set;
2c88418c 1038}
941c63ac
JL
1039
1040/* Given an INSN, return nonzero if it has more than one SET, else return
1041 zero. */
1042
5f7d3786 1043int
0c20a65f 1044multiple_sets (rtx insn)
941c63ac 1045{
cae8acdd 1046 int found;
941c63ac 1047 int i;
a6a2274a 1048
941c63ac 1049 /* INSN must be an insn. */
2c3c49de 1050 if (! INSN_P (insn))
941c63ac
JL
1051 return 0;
1052
1053 /* Only a PARALLEL can have multiple SETs. */
1054 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1055 {
1056 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1057 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1058 {
1059 /* If we have already found a SET, then return now. */
1060 if (found)
1061 return 1;
1062 else
1063 found = 1;
1064 }
1065 }
a6a2274a 1066
941c63ac
JL
1067 /* Either zero or one SET. */
1068 return 0;
1069}
2c88418c 1070\f
7142e318
JW
1071/* Return nonzero if the destination of SET equals the source
1072 and there are no side effects. */
1073
1074int
0c20a65f 1075set_noop_p (rtx set)
7142e318
JW
1076{
1077 rtx src = SET_SRC (set);
1078 rtx dst = SET_DEST (set);
1079
371b8fc0
JH
1080 if (dst == pc_rtx && src == pc_rtx)
1081 return 1;
1082
3c0cb5de 1083 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1084 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1085
46d096a3 1086 if (GET_CODE (dst) == ZERO_EXTRACT)
7142e318 1087 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1088 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1089 && !side_effects_p (src);
7142e318
JW
1090
1091 if (GET_CODE (dst) == STRICT_LOW_PART)
1092 dst = XEXP (dst, 0);
1093
1094 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1095 {
1096 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1097 return 0;
1098 src = SUBREG_REG (src);
1099 dst = SUBREG_REG (dst);
1100 }
1101
f8cfc6aa 1102 return (REG_P (src) && REG_P (dst)
7142e318
JW
1103 && REGNO (src) == REGNO (dst));
1104}
0005550b
JH
1105\f
1106/* Return nonzero if an insn consists only of SETs, each of which only sets a
1107 value to itself. */
1108
1109int
0c20a65f 1110noop_move_p (rtx insn)
0005550b
JH
1111{
1112 rtx pat = PATTERN (insn);
1113
b5832b43
JH
1114 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1115 return 1;
1116
0005550b
JH
1117 /* Insns carrying these notes are useful later on. */
1118 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1119 return 0;
1120
eb9d8e4d
JW
1121 /* For now treat an insn with a REG_RETVAL note as a
1122 a special insn which should not be considered a no-op. */
1123 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1124 return 0;
1125
0005550b
JH
1126 if (GET_CODE (pat) == SET && set_noop_p (pat))
1127 return 1;
1128
1129 if (GET_CODE (pat) == PARALLEL)
1130 {
1131 int i;
1132 /* If nothing but SETs of registers to themselves,
1133 this insn can also be deleted. */
1134 for (i = 0; i < XVECLEN (pat, 0); i++)
1135 {
1136 rtx tem = XVECEXP (pat, 0, i);
1137
1138 if (GET_CODE (tem) == USE
1139 || GET_CODE (tem) == CLOBBER)
1140 continue;
1141
1142 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1143 return 0;
1144 }
1145
1146 return 1;
1147 }
1148 return 0;
1149}
1150\f
7142e318 1151
63be01fb
JW
1152/* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1153 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1154 If the object was modified, if we hit a partial assignment to X, or hit a
1155 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1156 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1157 be the src. */
2c88418c
RS
1158
1159rtx
0c20a65f 1160find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
2c88418c
RS
1161{
1162 rtx p;
1163
4b4bf941 1164 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
2c88418c 1165 p = PREV_INSN (p))
2c3c49de 1166 if (INSN_P (p))
2c88418c
RS
1167 {
1168 rtx set = single_set (p);
c166a311 1169 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
2c88418c
RS
1170
1171 if (set && rtx_equal_p (x, SET_DEST (set)))
1172 {
1173 rtx src = SET_SRC (set);
1174
1175 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1176 src = XEXP (note, 0);
1177
63be01fb
JW
1178 if ((valid_to == NULL_RTX
1179 || ! modified_between_p (src, PREV_INSN (p), valid_to))
2c88418c
RS
1180 /* Reject hard registers because we don't usually want
1181 to use them; we'd rather use a pseudo. */
f8cfc6aa 1182 && (! (REG_P (src)
89d3d442 1183 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
2c88418c
RS
1184 {
1185 *pinsn = p;
1186 return src;
1187 }
1188 }
a6a2274a 1189
2c88418c
RS
1190 /* If set in non-simple way, we don't have a value. */
1191 if (reg_set_p (x, p))
1192 break;
1193 }
1194
1195 return x;
a6a2274a 1196}
2c88418c
RS
1197\f
1198/* Return nonzero if register in range [REGNO, ENDREGNO)
1199 appears either explicitly or implicitly in X
1200 other than being stored into.
1201
1202 References contained within the substructure at LOC do not count.
1203 LOC may be zero, meaning don't ignore anything. */
1204
1205int
0c20a65f
AJ
1206refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1207 rtx *loc)
2c88418c 1208{
770ae6cc
RK
1209 int i;
1210 unsigned int x_regno;
1211 RTX_CODE code;
1212 const char *fmt;
2c88418c
RS
1213
1214 repeat:
1215 /* The contents of a REG_NONNEG note is always zero, so we must come here
1216 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1217 if (x == 0)
1218 return 0;
1219
1220 code = GET_CODE (x);
1221
1222 switch (code)
1223 {
1224 case REG:
770ae6cc 1225 x_regno = REGNO (x);
f8163c92
RK
1226
1227 /* If we modifying the stack, frame, or argument pointer, it will
1228 clobber a virtual register. In fact, we could be more precise,
1229 but it isn't worth it. */
770ae6cc 1230 if ((x_regno == STACK_POINTER_REGNUM
f8163c92 1231#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
770ae6cc 1232 || x_regno == ARG_POINTER_REGNUM
f8163c92 1233#endif
770ae6cc 1234 || x_regno == FRAME_POINTER_REGNUM)
f8163c92
RK
1235 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1236 return 1;
1237
09e18274 1238 return endregno > x_regno && regno < END_REGNO (x);
2c88418c
RS
1239
1240 case SUBREG:
1241 /* If this is a SUBREG of a hard reg, we can see exactly which
1242 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1243 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1244 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1245 {
ddef6bc7 1246 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1247 unsigned int inner_endregno
403c659c 1248 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
f1f4e530 1249 ? subreg_nregs (x) : 1);
2c88418c
RS
1250
1251 return endregno > inner_regno && regno < inner_endregno;
1252 }
1253 break;
1254
1255 case CLOBBER:
1256 case SET:
1257 if (&SET_DEST (x) != loc
1258 /* Note setting a SUBREG counts as referring to the REG it is in for
1259 a pseudo but not for hard registers since we can
1260 treat each word individually. */
1261 && ((GET_CODE (SET_DEST (x)) == SUBREG
1262 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1263 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1264 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1265 && refers_to_regno_p (regno, endregno,
1266 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1267 || (!REG_P (SET_DEST (x))
2c88418c
RS
1268 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1269 return 1;
1270
1271 if (code == CLOBBER || loc == &SET_SRC (x))
1272 return 0;
1273 x = SET_SRC (x);
1274 goto repeat;
e9a25f70
JL
1275
1276 default:
1277 break;
2c88418c
RS
1278 }
1279
1280 /* X does not match, so try its subexpressions. */
1281
1282 fmt = GET_RTX_FORMAT (code);
1283 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1284 {
1285 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1286 {
1287 if (i == 0)
1288 {
1289 x = XEXP (x, 0);
1290 goto repeat;
1291 }
1292 else
1293 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1294 return 1;
1295 }
1296 else if (fmt[i] == 'E')
1297 {
b3694847 1298 int j;
6a87d634 1299 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1300 if (loc != &XVECEXP (x, i, j)
1301 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1302 return 1;
1303 }
1304 }
1305 return 0;
1306}
1307
1308/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1309 we check if any register number in X conflicts with the relevant register
1310 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1311 contains a MEM (we don't bother checking for memory addresses that can't
1312 conflict because we expect this to be a rare case. */
1313
1314int
0c20a65f 1315reg_overlap_mentioned_p (rtx x, rtx in)
2c88418c 1316{
770ae6cc 1317 unsigned int regno, endregno;
2c88418c 1318
6f626d1b
PB
1319 /* If either argument is a constant, then modifying X can not
1320 affect IN. Here we look at IN, we can profitably combine
1321 CONSTANT_P (x) with the switch statement below. */
1322 if (CONSTANT_P (in))
b98b49ac 1323 return 0;
0c99ec5c 1324
6f626d1b 1325 recurse:
0c99ec5c 1326 switch (GET_CODE (x))
2c88418c 1327 {
6f626d1b
PB
1328 case STRICT_LOW_PART:
1329 case ZERO_EXTRACT:
1330 case SIGN_EXTRACT:
1331 /* Overly conservative. */
1332 x = XEXP (x, 0);
1333 goto recurse;
1334
0c99ec5c 1335 case SUBREG:
2c88418c
RS
1336 regno = REGNO (SUBREG_REG (x));
1337 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1338 regno = subreg_regno (x);
f1f4e530
JM
1339 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1340 ? subreg_nregs (x) : 1);
0c99ec5c 1341 goto do_reg;
2c88418c 1342
0c99ec5c
RH
1343 case REG:
1344 regno = REGNO (x);
09e18274 1345 endregno = END_REGNO (x);
f1f4e530 1346 do_reg:
8e2e89f7 1347 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1348
0c99ec5c
RH
1349 case MEM:
1350 {
1351 const char *fmt;
1352 int i;
2c88418c 1353
3c0cb5de 1354 if (MEM_P (in))
2c88418c
RS
1355 return 1;
1356
0c99ec5c
RH
1357 fmt = GET_RTX_FORMAT (GET_CODE (in));
1358 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
3b009185
RH
1359 if (fmt[i] == 'e')
1360 {
1361 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1362 return 1;
1363 }
1364 else if (fmt[i] == 'E')
1365 {
1366 int j;
1367 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1368 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1369 return 1;
1370 }
c0222c21 1371
0c99ec5c
RH
1372 return 0;
1373 }
1374
1375 case SCRATCH:
1376 case PC:
1377 case CC0:
1378 return reg_mentioned_p (x, in);
1379
1380 case PARALLEL:
37ceff9d 1381 {
90d036a0 1382 int i;
37ceff9d
RH
1383
1384 /* If any register in here refers to it we return true. */
7193d1dc
RK
1385 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1386 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1387 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1388 return 1;
7193d1dc 1389 return 0;
37ceff9d 1390 }
2c88418c 1391
0c99ec5c 1392 default:
41374e13 1393 gcc_assert (CONSTANT_P (x));
6f626d1b
PB
1394 return 0;
1395 }
2c88418c
RS
1396}
1397\f
2c88418c 1398/* Call FUN on each register or MEM that is stored into or clobbered by X.
c3a1ef9d
MM
1399 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1400 ignored by note_stores, but passed to FUN.
1401
1402 FUN receives three arguments:
1403 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1404 2. the SET or CLOBBER rtx that does the store,
1405 3. the pointer DATA provided to note_stores.
2c88418c
RS
1406
1407 If the item being stored in or clobbered is a SUBREG of a hard register,
1408 the SUBREG will be passed. */
a6a2274a 1409
2c88418c 1410void
0c20a65f 1411note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
2c88418c 1412{
90d036a0
RK
1413 int i;
1414
0c99ec5c
RH
1415 if (GET_CODE (x) == COND_EXEC)
1416 x = COND_EXEC_CODE (x);
90d036a0 1417
0c99ec5c 1418 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2c88418c 1419 {
b3694847 1420 rtx dest = SET_DEST (x);
90d036a0 1421
2c88418c 1422 while ((GET_CODE (dest) == SUBREG
f8cfc6aa 1423 && (!REG_P (SUBREG_REG (dest))
2c88418c
RS
1424 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1425 || GET_CODE (dest) == ZERO_EXTRACT
2c88418c
RS
1426 || GET_CODE (dest) == STRICT_LOW_PART)
1427 dest = XEXP (dest, 0);
86465af7 1428
7193d1dc 1429 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
629111c7 1430 each of whose first operand is a register. */
7193d1dc
RK
1431 if (GET_CODE (dest) == PARALLEL)
1432 {
1433 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1434 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
629111c7 1435 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
7193d1dc 1436 }
86465af7 1437 else
84832317 1438 (*fun) (dest, x, data);
2c88418c 1439 }
770ae6cc 1440
90d036a0
RK
1441 else if (GET_CODE (x) == PARALLEL)
1442 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1443 note_stores (XVECEXP (x, 0, i), fun, data);
2c88418c
RS
1444}
1445\f
e2373f95
RK
1446/* Like notes_stores, but call FUN for each expression that is being
1447 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1448 FUN for each expression, not any interior subexpressions. FUN receives a
1449 pointer to the expression and the DATA passed to this function.
1450
1451 Note that this is not quite the same test as that done in reg_referenced_p
1452 since that considers something as being referenced if it is being
1453 partially set, while we do not. */
1454
1455void
0c20a65f 1456note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1457{
1458 rtx body = *pbody;
1459 int i;
1460
1461 switch (GET_CODE (body))
1462 {
1463 case COND_EXEC:
1464 (*fun) (&COND_EXEC_TEST (body), data);
1465 note_uses (&COND_EXEC_CODE (body), fun, data);
1466 return;
1467
1468 case PARALLEL:
1469 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1470 note_uses (&XVECEXP (body, 0, i), fun, data);
1471 return;
1472
bbbc206e
BS
1473 case SEQUENCE:
1474 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1475 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1476 return;
1477
e2373f95
RK
1478 case USE:
1479 (*fun) (&XEXP (body, 0), data);
1480 return;
1481
1482 case ASM_OPERANDS:
1483 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1484 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1485 return;
1486
1487 case TRAP_IF:
1488 (*fun) (&TRAP_CONDITION (body), data);
1489 return;
1490
21b8482a
JJ
1491 case PREFETCH:
1492 (*fun) (&XEXP (body, 0), data);
1493 return;
1494
e2373f95
RK
1495 case UNSPEC:
1496 case UNSPEC_VOLATILE:
1497 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1498 (*fun) (&XVECEXP (body, 0, i), data);
1499 return;
1500
1501 case CLOBBER:
3c0cb5de 1502 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1503 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1504 return;
1505
1506 case SET:
1507 {
1508 rtx dest = SET_DEST (body);
1509
1510 /* For sets we replace everything in source plus registers in memory
1511 expression in store and operands of a ZERO_EXTRACT. */
1512 (*fun) (&SET_SRC (body), data);
1513
1514 if (GET_CODE (dest) == ZERO_EXTRACT)
1515 {
1516 (*fun) (&XEXP (dest, 1), data);
1517 (*fun) (&XEXP (dest, 2), data);
1518 }
1519
1520 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1521 dest = XEXP (dest, 0);
1522
3c0cb5de 1523 if (MEM_P (dest))
e2373f95
RK
1524 (*fun) (&XEXP (dest, 0), data);
1525 }
1526 return;
1527
1528 default:
1529 /* All the other possibilities never store. */
1530 (*fun) (pbody, data);
1531 return;
1532 }
1533}
1534\f
2c88418c
RS
1535/* Return nonzero if X's old contents don't survive after INSN.
1536 This will be true if X is (cc0) or if X is a register and
1537 X dies in INSN or because INSN entirely sets X.
1538
46d096a3
SB
1539 "Entirely set" means set directly and not through a SUBREG, or
1540 ZERO_EXTRACT, so no trace of the old contents remains.
2c88418c
RS
1541 Likewise, REG_INC does not count.
1542
1543 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1544 but for this use that makes no difference, since regs don't overlap
1545 during their lifetimes. Therefore, this function may be used
6fb5fa3c 1546 at any time after deaths have been computed.
2c88418c
RS
1547
1548 If REG is a hard reg that occupies multiple machine registers, this
1549 function will only return 1 if each of those registers will be replaced
1550 by INSN. */
1551
1552int
0c20a65f 1553dead_or_set_p (rtx insn, rtx x)
2c88418c 1554{
09e18274 1555 unsigned int regno, end_regno;
770ae6cc 1556 unsigned int i;
2c88418c
RS
1557
1558 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1559 if (GET_CODE (x) == CC0)
1560 return 1;
1561
41374e13 1562 gcc_assert (REG_P (x));
2c88418c
RS
1563
1564 regno = REGNO (x);
09e18274
RS
1565 end_regno = END_REGNO (x);
1566 for (i = regno; i < end_regno; i++)
2c88418c
RS
1567 if (! dead_or_set_regno_p (insn, i))
1568 return 0;
1569
1570 return 1;
1571}
1572
194acded
HPN
1573/* Return TRUE iff DEST is a register or subreg of a register and
1574 doesn't change the number of words of the inner register, and any
1575 part of the register is TEST_REGNO. */
1576
1577static bool
1578covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1579{
1580 unsigned int regno, endregno;
1581
1582 if (GET_CODE (dest) == SUBREG
1583 && (((GET_MODE_SIZE (GET_MODE (dest))
1584 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1585 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1586 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1587 dest = SUBREG_REG (dest);
1588
1589 if (!REG_P (dest))
1590 return false;
1591
1592 regno = REGNO (dest);
09e18274 1593 endregno = END_REGNO (dest);
194acded
HPN
1594 return (test_regno >= regno && test_regno < endregno);
1595}
1596
1597/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1598 any member matches the covers_regno_no_parallel_p criteria. */
1599
1600static bool
1601covers_regno_p (rtx dest, unsigned int test_regno)
1602{
1603 if (GET_CODE (dest) == PARALLEL)
1604 {
1605 /* Some targets place small structures in registers for return
1606 values of functions, and those registers are wrapped in
1607 PARALLELs that we may see as the destination of a SET. */
1608 int i;
1609
1610 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1611 {
1612 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1613 if (inner != NULL_RTX
1614 && covers_regno_no_parallel_p (inner, test_regno))
1615 return true;
1616 }
1617
1618 return false;
1619 }
1620 else
1621 return covers_regno_no_parallel_p (dest, test_regno);
1622}
1623
6fb5fa3c 1624/* Utility function for dead_or_set_p to check an individual register. */
2c88418c
RS
1625
1626int
0c20a65f 1627dead_or_set_regno_p (rtx insn, unsigned int test_regno)
2c88418c 1628{
8c8de5fc 1629 rtx pattern;
2c88418c 1630
0a2287bf
RH
1631 /* See if there is a death note for something that includes TEST_REGNO. */
1632 if (find_regno_note (insn, REG_DEAD, test_regno))
1633 return 1;
2c88418c 1634
4b4bf941 1635 if (CALL_P (insn)
8f3e7a26
RK
1636 && find_regno_fusage (insn, CLOBBER, test_regno))
1637 return 1;
1638
0c99ec5c
RH
1639 pattern = PATTERN (insn);
1640
1641 if (GET_CODE (pattern) == COND_EXEC)
1642 pattern = COND_EXEC_CODE (pattern);
1643
1644 if (GET_CODE (pattern) == SET)
194acded 1645 return covers_regno_p (SET_DEST (pattern), test_regno);
0c99ec5c 1646 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 1647 {
b3694847 1648 int i;
2c88418c 1649
0c99ec5c 1650 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 1651 {
0c99ec5c
RH
1652 rtx body = XVECEXP (pattern, 0, i);
1653
1654 if (GET_CODE (body) == COND_EXEC)
1655 body = COND_EXEC_CODE (body);
2c88418c 1656
194acded
HPN
1657 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1658 && covers_regno_p (SET_DEST (body), test_regno))
1659 return 1;
2c88418c
RS
1660 }
1661 }
1662
1663 return 0;
1664}
1665
1666/* Return the reg-note of kind KIND in insn INSN, if there is one.
1667 If DATUM is nonzero, look for one whose datum is DATUM. */
1668
1669rtx
0c20a65f 1670find_reg_note (rtx insn, enum reg_note kind, rtx datum)
2c88418c 1671{
b3694847 1672 rtx link;
2c88418c 1673
af082de3
BE
1674 gcc_assert (insn);
1675
ae78d276 1676 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1677 if (! INSN_P (insn))
ae78d276 1678 return 0;
cd798543
AP
1679 if (datum == 0)
1680 {
1681 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1682 if (REG_NOTE_KIND (link) == kind)
1683 return link;
1684 return 0;
1685 }
ae78d276 1686
2c88418c 1687 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 1688 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
1689 return link;
1690 return 0;
1691}
1692
1693/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
1694 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1695 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1696 it might be the case that the note overlaps REGNO. */
2c88418c
RS
1697
1698rtx
0c20a65f 1699find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 1700{
b3694847 1701 rtx link;
2c88418c 1702
ae78d276 1703 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1704 if (! INSN_P (insn))
ae78d276
MM
1705 return 0;
1706
2c88418c
RS
1707 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1708 if (REG_NOTE_KIND (link) == kind
1709 /* Verify that it is a register, so that scratch and MEM won't cause a
1710 problem here. */
f8cfc6aa 1711 && REG_P (XEXP (link, 0))
99309f3b 1712 && REGNO (XEXP (link, 0)) <= regno
09e18274 1713 && END_REGNO (XEXP (link, 0)) > regno)
2c88418c
RS
1714 return link;
1715 return 0;
1716}
8f3e7a26 1717
d9c695ff
RK
1718/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1719 has such a note. */
1720
1721rtx
0c20a65f 1722find_reg_equal_equiv_note (rtx insn)
d9c695ff 1723{
cd648cec 1724 rtx link;
d9c695ff 1725
cd648cec 1726 if (!INSN_P (insn))
d9c695ff 1727 return 0;
ea8f106d 1728
cd648cec
JH
1729 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1730 if (REG_NOTE_KIND (link) == REG_EQUAL
1731 || REG_NOTE_KIND (link) == REG_EQUIV)
1732 {
ea8f106d
SB
1733 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1734 insns that have multiple sets. Checking single_set to
1735 make sure of this is not the proper check, as explained
1736 in the comment in set_unique_reg_note.
1737
1738 This should be changed into an assert. */
1739 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
cd648cec
JH
1740 return 0;
1741 return link;
1742 }
1743 return NULL;
d9c695ff
RK
1744}
1745
2a450639
RS
1746/* Check whether INSN is a single_set whose source is known to be
1747 equivalent to a constant. Return that constant if so, otherwise
1748 return null. */
1749
1750rtx
1751find_constant_src (rtx insn)
1752{
1753 rtx note, set, x;
1754
1755 set = single_set (insn);
1756 if (set)
1757 {
1758 x = avoid_constant_pool_reference (SET_SRC (set));
1759 if (CONSTANT_P (x))
1760 return x;
1761 }
1762
1763 note = find_reg_equal_equiv_note (insn);
1764 if (note && CONSTANT_P (XEXP (note, 0)))
1765 return XEXP (note, 0);
1766
1767 return NULL_RTX;
1768}
1769
8f3e7a26
RK
1770/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1771 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1772
1773int
0c20a65f 1774find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
8f3e7a26
RK
1775{
1776 /* If it's not a CALL_INSN, it can't possibly have a
1777 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 1778 if (!CALL_P (insn))
8f3e7a26
RK
1779 return 0;
1780
41374e13 1781 gcc_assert (datum);
8f3e7a26 1782
f8cfc6aa 1783 if (!REG_P (datum))
8f3e7a26 1784 {
b3694847 1785 rtx link;
8f3e7a26
RK
1786
1787 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 1788 link;
8f3e7a26 1789 link = XEXP (link, 1))
a6a2274a 1790 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 1791 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 1792 return 1;
8f3e7a26
RK
1793 }
1794 else
1795 {
770ae6cc 1796 unsigned int regno = REGNO (datum);
8f3e7a26
RK
1797
1798 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1799 to pseudo registers, so don't bother checking. */
1800
1801 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 1802 {
09e18274 1803 unsigned int end_regno = END_HARD_REGNO (datum);
770ae6cc 1804 unsigned int i;
8f3e7a26
RK
1805
1806 for (i = regno; i < end_regno; i++)
1807 if (find_regno_fusage (insn, code, i))
1808 return 1;
a6a2274a 1809 }
8f3e7a26
RK
1810 }
1811
1812 return 0;
1813}
1814
1815/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1816 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1817
1818int
0c20a65f 1819find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 1820{
b3694847 1821 rtx link;
8f3e7a26
RK
1822
1823 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1824 to pseudo registers, so don't bother checking. */
1825
1826 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 1827 || !CALL_P (insn) )
8f3e7a26
RK
1828 return 0;
1829
1830 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 1831 {
770ae6cc 1832 rtx op, reg;
83ab3839
RH
1833
1834 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 1835 && REG_P (reg = XEXP (op, 0))
09e18274
RS
1836 && REGNO (reg) <= regno
1837 && END_HARD_REGNO (reg) > regno)
83ab3839
RH
1838 return 1;
1839 }
8f3e7a26
RK
1840
1841 return 0;
1842}
a6a063b8
AM
1843
1844/* Return true if INSN is a call to a pure function. */
1845
1846int
0c20a65f 1847pure_call_p (rtx insn)
a6a063b8
AM
1848{
1849 rtx link;
1850
4b4bf941 1851 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
a6a063b8
AM
1852 return 0;
1853
1854 /* Look for the note that differentiates const and pure functions. */
1855 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1856 {
1857 rtx u, m;
1858
1859 if (GET_CODE (u = XEXP (link, 0)) == USE
3c0cb5de 1860 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
a6a063b8
AM
1861 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1862 return 1;
1863 }
1864
1865 return 0;
1866}
2c88418c
RS
1867\f
1868/* Remove register note NOTE from the REG_NOTES of INSN. */
1869
1870void
0c20a65f 1871remove_note (rtx insn, rtx note)
2c88418c 1872{
b3694847 1873 rtx link;
2c88418c 1874
49c3bb12
RH
1875 if (note == NULL_RTX)
1876 return;
1877
2c88418c 1878 if (REG_NOTES (insn) == note)
6fb5fa3c
DB
1879 REG_NOTES (insn) = XEXP (note, 1);
1880 else
1881 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1882 if (XEXP (link, 1) == note)
1883 {
1884 XEXP (link, 1) = XEXP (note, 1);
1885 break;
1886 }
1887
1888 switch (REG_NOTE_KIND (note))
2c88418c 1889 {
6fb5fa3c
DB
1890 case REG_EQUAL:
1891 case REG_EQUIV:
1892 df_notes_rescan (insn);
1893 break;
1894 default:
1895 break;
2c88418c 1896 }
2c88418c 1897}
55a98783 1898
7cd689bc
SB
1899/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1900
1901void
1902remove_reg_equal_equiv_notes (rtx insn)
1903{
1904 rtx *loc;
1905
1906 loc = &REG_NOTES (insn);
1907 while (*loc)
1908 {
1909 enum reg_note kind = REG_NOTE_KIND (*loc);
1910 if (kind == REG_EQUAL || kind == REG_EQUIV)
1911 *loc = XEXP (*loc, 1);
1912 else
1913 loc = &XEXP (*loc, 1);
1914 }
1915}
1916
5f0d2358
RK
1917/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1918 return 1 if it is found. A simple equality test is used to determine if
1919 NODE matches. */
1920
1921int
0c20a65f 1922in_expr_list_p (rtx listp, rtx node)
5f0d2358
RK
1923{
1924 rtx x;
1925
1926 for (x = listp; x; x = XEXP (x, 1))
1927 if (node == XEXP (x, 0))
1928 return 1;
1929
1930 return 0;
1931}
1932
dd248abd
RK
1933/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1934 remove that entry from the list if it is found.
55a98783 1935
dd248abd 1936 A simple equality test is used to determine if NODE matches. */
55a98783
JL
1937
1938void
0c20a65f 1939remove_node_from_expr_list (rtx node, rtx *listp)
55a98783
JL
1940{
1941 rtx temp = *listp;
1942 rtx prev = NULL_RTX;
1943
1944 while (temp)
1945 {
1946 if (node == XEXP (temp, 0))
1947 {
1948 /* Splice the node out of the list. */
1949 if (prev)
1950 XEXP (prev, 1) = XEXP (temp, 1);
1951 else
1952 *listp = XEXP (temp, 1);
1953
1954 return;
1955 }
dd248abd
RK
1956
1957 prev = temp;
55a98783
JL
1958 temp = XEXP (temp, 1);
1959 }
1960}
2c88418c 1961\f
2b067faf
RS
1962/* Nonzero if X contains any volatile instructions. These are instructions
1963 which may cause unpredictable machine state instructions, and thus no
1964 instructions should be moved or combined across them. This includes
1965 only volatile asms and UNSPEC_VOLATILE instructions. */
1966
1967int
0c20a65f 1968volatile_insn_p (rtx x)
2b067faf 1969{
b3694847 1970 RTX_CODE code;
2b067faf
RS
1971
1972 code = GET_CODE (x);
1973 switch (code)
1974 {
1975 case LABEL_REF:
1976 case SYMBOL_REF:
1977 case CONST_INT:
1978 case CONST:
1979 case CONST_DOUBLE:
69ef87e2 1980 case CONST_VECTOR:
2b067faf
RS
1981 case CC0:
1982 case PC:
1983 case REG:
1984 case SCRATCH:
1985 case CLOBBER:
2b067faf
RS
1986 case ADDR_VEC:
1987 case ADDR_DIFF_VEC:
1988 case CALL:
1989 case MEM:
1990 return 0;
1991
1992 case UNSPEC_VOLATILE:
1993 /* case TRAP_IF: This isn't clear yet. */
1994 return 1;
1995
4c46ea23 1996 case ASM_INPUT:
2b067faf
RS
1997 case ASM_OPERANDS:
1998 if (MEM_VOLATILE_P (x))
1999 return 1;
e9a25f70
JL
2000
2001 default:
2002 break;
2b067faf
RS
2003 }
2004
2005 /* Recursively scan the operands of this expression. */
2006
2007 {
b3694847
SS
2008 const char *fmt = GET_RTX_FORMAT (code);
2009 int i;
a6a2274a 2010
2b067faf
RS
2011 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2012 {
2013 if (fmt[i] == 'e')
2014 {
31001f72 2015 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
2016 return 1;
2017 }
d4757e6a 2018 else if (fmt[i] == 'E')
2b067faf 2019 {
b3694847 2020 int j;
2b067faf 2021 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 2022 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
2023 return 1;
2024 }
2025 }
2026 }
2027 return 0;
2028}
2029
2c88418c 2030/* Nonzero if X contains any volatile memory references
2ac4fed0 2031 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
2032
2033int
0c20a65f 2034volatile_refs_p (rtx x)
2c88418c 2035{
b3694847 2036 RTX_CODE code;
2c88418c
RS
2037
2038 code = GET_CODE (x);
2039 switch (code)
2040 {
2041 case LABEL_REF:
2042 case SYMBOL_REF:
2043 case CONST_INT:
2044 case CONST:
2045 case CONST_DOUBLE:
69ef87e2 2046 case CONST_VECTOR:
2c88418c
RS
2047 case CC0:
2048 case PC:
2049 case REG:
2050 case SCRATCH:
2051 case CLOBBER:
2c88418c
RS
2052 case ADDR_VEC:
2053 case ADDR_DIFF_VEC:
2054 return 0;
2055
2ac4fed0 2056 case UNSPEC_VOLATILE:
2c88418c
RS
2057 return 1;
2058
2059 case MEM:
4c46ea23 2060 case ASM_INPUT:
2c88418c
RS
2061 case ASM_OPERANDS:
2062 if (MEM_VOLATILE_P (x))
2063 return 1;
e9a25f70
JL
2064
2065 default:
2066 break;
2c88418c
RS
2067 }
2068
2069 /* Recursively scan the operands of this expression. */
2070
2071 {
b3694847
SS
2072 const char *fmt = GET_RTX_FORMAT (code);
2073 int i;
a6a2274a 2074
2c88418c
RS
2075 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2076 {
2077 if (fmt[i] == 'e')
2078 {
2079 if (volatile_refs_p (XEXP (x, i)))
2080 return 1;
2081 }
d4757e6a 2082 else if (fmt[i] == 'E')
2c88418c 2083 {
b3694847 2084 int j;
2c88418c
RS
2085 for (j = 0; j < XVECLEN (x, i); j++)
2086 if (volatile_refs_p (XVECEXP (x, i, j)))
2087 return 1;
2088 }
2089 }
2090 }
2091 return 0;
2092}
2093
2094/* Similar to above, except that it also rejects register pre- and post-
2095 incrementing. */
2096
2097int
0c20a65f 2098side_effects_p (rtx x)
2c88418c 2099{
b3694847 2100 RTX_CODE code;
2c88418c
RS
2101
2102 code = GET_CODE (x);
2103 switch (code)
2104 {
2105 case LABEL_REF:
2106 case SYMBOL_REF:
2107 case CONST_INT:
2108 case CONST:
2109 case CONST_DOUBLE:
69ef87e2 2110 case CONST_VECTOR:
2c88418c
RS
2111 case CC0:
2112 case PC:
2113 case REG:
2114 case SCRATCH:
2c88418c
RS
2115 case ADDR_VEC:
2116 case ADDR_DIFF_VEC:
2117 return 0;
2118
2119 case CLOBBER:
2120 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2121 when some combination can't be done. If we see one, don't think
2122 that we can simplify the expression. */
2123 return (GET_MODE (x) != VOIDmode);
2124
2125 case PRE_INC:
2126 case PRE_DEC:
2127 case POST_INC:
2128 case POST_DEC:
1fb9c5cd
MH
2129 case PRE_MODIFY:
2130 case POST_MODIFY:
2c88418c 2131 case CALL:
2ac4fed0 2132 case UNSPEC_VOLATILE:
2c88418c
RS
2133 /* case TRAP_IF: This isn't clear yet. */
2134 return 1;
2135
2136 case MEM:
4c46ea23 2137 case ASM_INPUT:
2c88418c
RS
2138 case ASM_OPERANDS:
2139 if (MEM_VOLATILE_P (x))
2140 return 1;
e9a25f70
JL
2141
2142 default:
2143 break;
2c88418c
RS
2144 }
2145
2146 /* Recursively scan the operands of this expression. */
2147
2148 {
b3694847
SS
2149 const char *fmt = GET_RTX_FORMAT (code);
2150 int i;
a6a2274a 2151
2c88418c
RS
2152 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2153 {
2154 if (fmt[i] == 'e')
2155 {
2156 if (side_effects_p (XEXP (x, i)))
2157 return 1;
2158 }
d4757e6a 2159 else if (fmt[i] == 'E')
2c88418c 2160 {
b3694847 2161 int j;
2c88418c
RS
2162 for (j = 0; j < XVECLEN (x, i); j++)
2163 if (side_effects_p (XVECEXP (x, i, j)))
2164 return 1;
2165 }
2166 }
2167 }
2168 return 0;
2169}
2170\f
e755fcf5
ZD
2171enum may_trap_p_flags
2172{
2173 MTP_UNALIGNED_MEMS = 1,
2174 MTP_AFTER_MOVE = 2
2175};
2176/* Return nonzero if evaluating rtx X might cause a trap.
2177 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2178 unaligned memory accesses on strict alignment machines. If
2179 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2180 cannot trap at its current location, but it might become trapping if moved
2181 elsewhere. */
2c88418c 2182
2358ff91 2183static int
e755fcf5 2184may_trap_p_1 (rtx x, unsigned flags)
2c88418c
RS
2185{
2186 int i;
2187 enum rtx_code code;
6f7d635c 2188 const char *fmt;
e755fcf5 2189 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2c88418c
RS
2190
2191 if (x == 0)
2192 return 0;
2193 code = GET_CODE (x);
2194 switch (code)
2195 {
2196 /* Handle these cases quickly. */
2197 case CONST_INT:
2198 case CONST_DOUBLE:
69ef87e2 2199 case CONST_VECTOR:
2c88418c
RS
2200 case SYMBOL_REF:
2201 case LABEL_REF:
2202 case CONST:
2203 case PC:
2204 case CC0:
2205 case REG:
2206 case SCRATCH:
2207 return 0;
2208
22aa60a1 2209 case ASM_INPUT:
2ac4fed0 2210 case UNSPEC_VOLATILE:
2c88418c
RS
2211 case TRAP_IF:
2212 return 1;
2213
22aa60a1
RH
2214 case ASM_OPERANDS:
2215 return MEM_VOLATILE_P (x);
2216
2c88418c
RS
2217 /* Memory ref can trap unless it's a static var or a stack slot. */
2218 case MEM:
e755fcf5
ZD
2219 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2220 reference; moving it out of condition might cause its address
2221 become invalid. */
2222 !(flags & MTP_AFTER_MOVE)
2223 && MEM_NOTRAP_P (x)
2358ff91 2224 && (!STRICT_ALIGNMENT || !unaligned_mems))
4da2eb6b 2225 return 0;
2358ff91
EB
2226 return
2227 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2c88418c
RS
2228
2229 /* Division by a non-constant might trap. */
2230 case DIV:
2231 case MOD:
2232 case UDIV:
2233 case UMOD:
52bfebf0
RS
2234 if (HONOR_SNANS (GET_MODE (x)))
2235 return 1;
3d8bf70f 2236 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
f9013075
DE
2237 return flag_trapping_math;
2238 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2c88418c 2239 return 1;
e9a25f70
JL
2240 break;
2241
b278301b
RK
2242 case EXPR_LIST:
2243 /* An EXPR_LIST is used to represent a function call. This
2244 certainly may trap. */
2245 return 1;
e9a25f70 2246
734508ea
JW
2247 case GE:
2248 case GT:
2249 case LE:
2250 case LT:
19aec195 2251 case LTGT:
55143861 2252 case COMPARE:
734508ea 2253 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2254 if (!flag_trapping_math)
2255 break;
734508ea
JW
2256 /* ??? There is no machine independent way to check for tests that trap
2257 when COMPARE is used, though many targets do make this distinction.
2258 For instance, sparc uses CCFPE for compares which generate exceptions
2259 and CCFP for compares which do not generate exceptions. */
52bfebf0 2260 if (HONOR_NANS (GET_MODE (x)))
55143861
JJ
2261 return 1;
2262 /* But often the compare has some CC mode, so check operand
2263 modes as well. */
52bfebf0
RS
2264 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2265 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2266 return 1;
2267 break;
2268
2269 case EQ:
2270 case NE:
2271 if (HONOR_SNANS (GET_MODE (x)))
2272 return 1;
2273 /* Often comparison is CC mode, so check operand modes. */
2274 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2275 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
55143861
JJ
2276 return 1;
2277 break;
2278
22fd5743
FH
2279 case FIX:
2280 /* Conversion of floating point might trap. */
2281 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2282 return 1;
2283 break;
2284
05cc23e8
RH
2285 case NEG:
2286 case ABS:
e3947b34 2287 case SUBREG:
05cc23e8
RH
2288 /* These operations don't trap even with floating point. */
2289 break;
2290
2c88418c
RS
2291 default:
2292 /* Any floating arithmetic may trap. */
3d8bf70f 2293 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
f5eb5fd0 2294 && flag_trapping_math)
2c88418c
RS
2295 return 1;
2296 }
2297
2298 fmt = GET_RTX_FORMAT (code);
2299 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2300 {
2301 if (fmt[i] == 'e')
2302 {
e755fcf5 2303 if (may_trap_p_1 (XEXP (x, i), flags))
2c88418c
RS
2304 return 1;
2305 }
2306 else if (fmt[i] == 'E')
2307 {
b3694847 2308 int j;
2c88418c 2309 for (j = 0; j < XVECLEN (x, i); j++)
e755fcf5 2310 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2c88418c
RS
2311 return 1;
2312 }
2313 }
2314 return 0;
2315}
2358ff91
EB
2316
2317/* Return nonzero if evaluating rtx X might cause a trap. */
2318
2319int
2320may_trap_p (rtx x)
2321{
e755fcf5
ZD
2322 return may_trap_p_1 (x, 0);
2323}
2324
2325/* Return nonzero if evaluating rtx X might cause a trap, when the expression
2326 is moved from its current location by some optimization. */
2327
2328int
2329may_trap_after_code_motion_p (rtx x)
2330{
2331 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2358ff91
EB
2332}
2333
c0220ea4 2334/* Same as above, but additionally return nonzero if evaluating rtx X might
2358ff91
EB
2335 cause a fault. We define a fault for the purpose of this function as a
2336 erroneous execution condition that cannot be encountered during the normal
2337 execution of a valid program; the typical example is an unaligned memory
2338 access on a strict alignment machine. The compiler guarantees that it
2339 doesn't generate code that will fault from a valid program, but this
2340 guarantee doesn't mean anything for individual instructions. Consider
2341 the following example:
2342
2343 struct S { int d; union { char *cp; int *ip; }; };
2344
2345 int foo(struct S *s)
2346 {
2347 if (s->d == 1)
2348 return *s->ip;
2349 else
2350 return *s->cp;
2351 }
2352
2353 on a strict alignment machine. In a valid program, foo will never be
2354 invoked on a structure for which d is equal to 1 and the underlying
2355 unique field of the union not aligned on a 4-byte boundary, but the
2356 expression *s->ip might cause a fault if considered individually.
2357
2358 At the RTL level, potentially problematic expressions will almost always
2359 verify may_trap_p; for example, the above dereference can be emitted as
2360 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2361 However, suppose that foo is inlined in a caller that causes s->cp to
2362 point to a local character variable and guarantees that s->d is not set
2363 to 1; foo may have been effectively translated into pseudo-RTL as:
2364
2365 if ((reg:SI) == 1)
2366 (set (reg:SI) (mem:SI (%fp - 7)))
2367 else
2368 (set (reg:QI) (mem:QI (%fp - 7)))
2369
2370 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2371 memory reference to a stack slot, but it will certainly cause a fault
2372 on a strict alignment machine. */
2373
2374int
2375may_trap_or_fault_p (rtx x)
2376{
e755fcf5 2377 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2358ff91 2378}
2c88418c
RS
2379\f
2380/* Return nonzero if X contains a comparison that is not either EQ or NE,
2381 i.e., an inequality. */
2382
2383int
0c20a65f 2384inequality_comparisons_p (rtx x)
2c88418c 2385{
b3694847
SS
2386 const char *fmt;
2387 int len, i;
2388 enum rtx_code code = GET_CODE (x);
2c88418c
RS
2389
2390 switch (code)
2391 {
2392 case REG:
2393 case SCRATCH:
2394 case PC:
2395 case CC0:
2396 case CONST_INT:
2397 case CONST_DOUBLE:
69ef87e2 2398 case CONST_VECTOR:
2c88418c
RS
2399 case CONST:
2400 case LABEL_REF:
2401 case SYMBOL_REF:
2402 return 0;
2403
2404 case LT:
2405 case LTU:
2406 case GT:
2407 case GTU:
2408 case LE:
2409 case LEU:
2410 case GE:
2411 case GEU:
2412 return 1;
a6a2274a 2413
e9a25f70
JL
2414 default:
2415 break;
2c88418c
RS
2416 }
2417
2418 len = GET_RTX_LENGTH (code);
2419 fmt = GET_RTX_FORMAT (code);
2420
2421 for (i = 0; i < len; i++)
2422 {
2423 if (fmt[i] == 'e')
2424 {
2425 if (inequality_comparisons_p (XEXP (x, i)))
2426 return 1;
2427 }
2428 else if (fmt[i] == 'E')
2429 {
b3694847 2430 int j;
2c88418c
RS
2431 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2432 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2433 return 1;
2434 }
2435 }
a6a2274a 2436
2c88418c
RS
2437 return 0;
2438}
2439\f
1ed0205e
VM
2440/* Replace any occurrence of FROM in X with TO. The function does
2441 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2442
2443 Note that copying is not done so X must not be shared unless all copies
2444 are to be modified. */
2445
2446rtx
0c20a65f 2447replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2448{
b3694847
SS
2449 int i, j;
2450 const char *fmt;
2c88418c 2451
1ed0205e 2452 /* The following prevents loops occurrence when we change MEM in
dc297297 2453 CONST_DOUBLE onto the same CONST_DOUBLE. */
1ed0205e
VM
2454 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2455 return x;
2456
2c88418c
RS
2457 if (x == from)
2458 return to;
2459
2460 /* Allow this function to make replacements in EXPR_LISTs. */
2461 if (x == 0)
2462 return 0;
2463
9dd791c8
AO
2464 if (GET_CODE (x) == SUBREG)
2465 {
2466 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2467
2468 if (GET_CODE (new) == CONST_INT)
2469 {
2470 x = simplify_subreg (GET_MODE (x), new,
2471 GET_MODE (SUBREG_REG (x)),
2472 SUBREG_BYTE (x));
41374e13 2473 gcc_assert (x);
9dd791c8
AO
2474 }
2475 else
2476 SUBREG_REG (x) = new;
2477
2478 return x;
2479 }
2480 else if (GET_CODE (x) == ZERO_EXTEND)
2481 {
2482 rtx new = replace_rtx (XEXP (x, 0), from, to);
2483
2484 if (GET_CODE (new) == CONST_INT)
2485 {
2486 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2487 new, GET_MODE (XEXP (x, 0)));
41374e13 2488 gcc_assert (x);
9dd791c8
AO
2489 }
2490 else
2491 XEXP (x, 0) = new;
2492
2493 return x;
2494 }
2495
2c88418c
RS
2496 fmt = GET_RTX_FORMAT (GET_CODE (x));
2497 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2498 {
2499 if (fmt[i] == 'e')
2500 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2501 else if (fmt[i] == 'E')
2502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2503 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2504 }
2505
2506 return x;
a6a2274a 2507}
2c88418c 2508\f
39811184 2509/* Replace occurrences of the old label in *X with the new one.
4af16369 2510 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
39811184
JZ
2511
2512int
0c20a65f 2513replace_label (rtx *x, void *data)
39811184
JZ
2514{
2515 rtx l = *x;
4af16369
JZ
2516 rtx old_label = ((replace_label_data *) data)->r1;
2517 rtx new_label = ((replace_label_data *) data)->r2;
2518 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
39811184
JZ
2519
2520 if (l == NULL_RTX)
2521 return 0;
2522
173cd571
JZ
2523 if (GET_CODE (l) == SYMBOL_REF
2524 && CONSTANT_POOL_ADDRESS_P (l))
4af16369 2525 {
173cd571 2526 rtx c = get_pool_constant (l);
4af16369
JZ
2527 if (rtx_referenced_p (old_label, c))
2528 {
2529 rtx new_c, new_l;
2530 replace_label_data *d = (replace_label_data *) data;
0c20a65f 2531
4af16369
JZ
2532 /* Create a copy of constant C; replace the label inside
2533 but do not update LABEL_NUSES because uses in constant pool
2534 are not counted. */
2535 new_c = copy_rtx (c);
2536 d->update_label_nuses = false;
2537 for_each_rtx (&new_c, replace_label, data);
2538 d->update_label_nuses = update_label_nuses;
2539
2540 /* Add the new constant NEW_C to constant pool and replace
2541 the old reference to constant by new reference. */
173cd571 2542 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
4af16369
JZ
2543 *x = replace_rtx (l, l, new_l);
2544 }
2545 return 0;
2546 }
2547
39811184
JZ
2548 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2549 field. This is not handled by for_each_rtx because it doesn't
2550 handle unprinted ('0') fields. */
4b4bf941 2551 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
39811184 2552 JUMP_LABEL (l) = new_label;
39811184 2553
4af16369
JZ
2554 if ((GET_CODE (l) == LABEL_REF
2555 || GET_CODE (l) == INSN_LIST)
2556 && XEXP (l, 0) == old_label)
2557 {
2558 XEXP (l, 0) = new_label;
2559 if (update_label_nuses)
2560 {
2561 ++LABEL_NUSES (new_label);
2562 --LABEL_NUSES (old_label);
2563 }
2564 return 0;
2565 }
39811184
JZ
2566
2567 return 0;
2568}
2569
4af16369
JZ
2570/* When *BODY is equal to X or X is directly referenced by *BODY
2571 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2572 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
39811184
JZ
2573
2574static int
0c20a65f 2575rtx_referenced_p_1 (rtx *body, void *x)
39811184 2576{
4af16369
JZ
2577 rtx y = (rtx) x;
2578
2579 if (*body == NULL_RTX)
2580 return y == NULL_RTX;
2581
2582 /* Return true if a label_ref *BODY refers to label Y. */
4b4bf941 2583 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
4af16369
JZ
2584 return XEXP (*body, 0) == y;
2585
2586 /* If *BODY is a reference to pool constant traverse the constant. */
2587 if (GET_CODE (*body) == SYMBOL_REF
2588 && CONSTANT_POOL_ADDRESS_P (*body))
2589 return rtx_referenced_p (y, get_pool_constant (*body));
2590
2591 /* By default, compare the RTL expressions. */
2592 return rtx_equal_p (*body, y);
39811184
JZ
2593}
2594
4af16369 2595/* Return true if X is referenced in BODY. */
39811184
JZ
2596
2597int
0c20a65f 2598rtx_referenced_p (rtx x, rtx body)
39811184 2599{
4af16369 2600 return for_each_rtx (&body, rtx_referenced_p_1, x);
39811184
JZ
2601}
2602
ee735eef
JZ
2603/* If INSN is a tablejump return true and store the label (before jump table) to
2604 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
2605
2606bool
ee735eef 2607tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
39811184 2608{
ee735eef
JZ
2609 rtx label, table;
2610
4b4bf941 2611 if (JUMP_P (insn)
ee735eef
JZ
2612 && (label = JUMP_LABEL (insn)) != NULL_RTX
2613 && (table = next_active_insn (label)) != NULL_RTX
4b4bf941 2614 && JUMP_P (table)
ee735eef
JZ
2615 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2616 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
39811184 2617 {
ee735eef
JZ
2618 if (labelp)
2619 *labelp = label;
2620 if (tablep)
2621 *tablep = table;
39811184
JZ
2622 return true;
2623 }
2624 return false;
2625}
2626
fce7e199
RH
2627/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2628 constant that is not in the constant pool and not in the condition
2629 of an IF_THEN_ELSE. */
2a1777af
JL
2630
2631static int
0c20a65f 2632computed_jump_p_1 (rtx x)
2a1777af
JL
2633{
2634 enum rtx_code code = GET_CODE (x);
2635 int i, j;
6f7d635c 2636 const char *fmt;
2a1777af
JL
2637
2638 switch (code)
2639 {
2a1777af
JL
2640 case LABEL_REF:
2641 case PC:
2642 return 0;
2643
fce7e199
RH
2644 case CONST:
2645 case CONST_INT:
2646 case CONST_DOUBLE:
69ef87e2 2647 case CONST_VECTOR:
fce7e199 2648 case SYMBOL_REF:
2a1777af
JL
2649 case REG:
2650 return 1;
2651
2652 case MEM:
2653 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2654 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2655
2656 case IF_THEN_ELSE:
fce7e199
RH
2657 return (computed_jump_p_1 (XEXP (x, 1))
2658 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
2659
2660 default:
2661 break;
2a1777af
JL
2662 }
2663
2664 fmt = GET_RTX_FORMAT (code);
2665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2666 {
2667 if (fmt[i] == 'e'
fce7e199 2668 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
2669 return 1;
2670
d4757e6a 2671 else if (fmt[i] == 'E')
2a1777af 2672 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 2673 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
2674 return 1;
2675 }
2676
2677 return 0;
2678}
2679
2680/* Return nonzero if INSN is an indirect jump (aka computed jump).
2681
2682 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 2683 we can recognize them by a (use (label_ref)). */
2a1777af
JL
2684
2685int
0c20a65f 2686computed_jump_p (rtx insn)
2a1777af
JL
2687{
2688 int i;
4b4bf941 2689 if (JUMP_P (insn))
2a1777af
JL
2690 {
2691 rtx pat = PATTERN (insn);
2a1777af 2692
f759eb8b
AO
2693 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2694 return 0;
2695 else if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
2696 {
2697 int len = XVECLEN (pat, 0);
2698 int has_use_labelref = 0;
2699
2700 for (i = len - 1; i >= 0; i--)
2701 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2702 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2703 == LABEL_REF))
2704 has_use_labelref = 1;
2705
2706 if (! has_use_labelref)
2707 for (i = len - 1; i >= 0; i--)
2708 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2709 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 2710 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
2711 return 1;
2712 }
2713 else if (GET_CODE (pat) == SET
2714 && SET_DEST (pat) == pc_rtx
fce7e199 2715 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
2716 return 1;
2717 }
2718 return 0;
2719}
ccc2d6d0 2720
cf94b0fc
PB
2721/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2722 calls. Processes the subexpressions of EXP and passes them to F. */
2723static int
2724for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2725{
2726 int result, i, j;
2727 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2728 rtx *x;
2729
2730 for (; format[n] != '\0'; n++)
2731 {
2732 switch (format[n])
2733 {
2734 case 'e':
2735 /* Call F on X. */
2736 x = &XEXP (exp, n);
2737 result = (*f) (x, data);
2738 if (result == -1)
2739 /* Do not traverse sub-expressions. */
2740 continue;
2741 else if (result != 0)
2742 /* Stop the traversal. */
2743 return result;
2744
2745 if (*x == NULL_RTX)
2746 /* There are no sub-expressions. */
2747 continue;
2748
2749 i = non_rtx_starting_operands[GET_CODE (*x)];
2750 if (i >= 0)
2751 {
2752 result = for_each_rtx_1 (*x, i, f, data);
2753 if (result != 0)
2754 return result;
2755 }
2756 break;
2757
2758 case 'V':
2759 case 'E':
2760 if (XVEC (exp, n) == 0)
2761 continue;
2762 for (j = 0; j < XVECLEN (exp, n); ++j)
2763 {
2764 /* Call F on X. */
2765 x = &XVECEXP (exp, n, j);
2766 result = (*f) (x, data);
2767 if (result == -1)
2768 /* Do not traverse sub-expressions. */
2769 continue;
2770 else if (result != 0)
2771 /* Stop the traversal. */
2772 return result;
2773
2774 if (*x == NULL_RTX)
2775 /* There are no sub-expressions. */
2776 continue;
2777
2778 i = non_rtx_starting_operands[GET_CODE (*x)];
2779 if (i >= 0)
2780 {
2781 result = for_each_rtx_1 (*x, i, f, data);
2782 if (result != 0)
2783 return result;
2784 }
2785 }
2786 break;
2787
2788 default:
2789 /* Nothing to do. */
2790 break;
2791 }
2792 }
2793
2794 return 0;
2795}
2796
ccc2d6d0
MM
2797/* Traverse X via depth-first search, calling F for each
2798 sub-expression (including X itself). F is also passed the DATA.
2799 If F returns -1, do not traverse sub-expressions, but continue
2800 traversing the rest of the tree. If F ever returns any other
40f03658 2801 nonzero value, stop the traversal, and return the value returned
ccc2d6d0
MM
2802 by F. Otherwise, return 0. This function does not traverse inside
2803 tree structure that contains RTX_EXPRs, or into sub-expressions
2804 whose format code is `0' since it is not known whether or not those
2805 codes are actually RTL.
2806
2807 This routine is very general, and could (should?) be used to
2808 implement many of the other routines in this file. */
2809
ae0b51ef 2810int
0c20a65f 2811for_each_rtx (rtx *x, rtx_function f, void *data)
ccc2d6d0
MM
2812{
2813 int result;
ccc2d6d0
MM
2814 int i;
2815
2816 /* Call F on X. */
b987f237 2817 result = (*f) (x, data);
ccc2d6d0
MM
2818 if (result == -1)
2819 /* Do not traverse sub-expressions. */
2820 return 0;
2821 else if (result != 0)
2822 /* Stop the traversal. */
2823 return result;
2824
2825 if (*x == NULL_RTX)
2826 /* There are no sub-expressions. */
2827 return 0;
2828
cf94b0fc
PB
2829 i = non_rtx_starting_operands[GET_CODE (*x)];
2830 if (i < 0)
2831 return 0;
ccc2d6d0 2832
cf94b0fc 2833 return for_each_rtx_1 (*x, i, f, data);
ccc2d6d0 2834}
3ec2b590 2835
cf94b0fc 2836
777b1b71
RH
2837/* Searches X for any reference to REGNO, returning the rtx of the
2838 reference found if any. Otherwise, returns NULL_RTX. */
2839
2840rtx
0c20a65f 2841regno_use_in (unsigned int regno, rtx x)
777b1b71 2842{
b3694847 2843 const char *fmt;
777b1b71
RH
2844 int i, j;
2845 rtx tem;
2846
f8cfc6aa 2847 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
2848 return x;
2849
2850 fmt = GET_RTX_FORMAT (GET_CODE (x));
2851 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2852 {
2853 if (fmt[i] == 'e')
2854 {
2855 if ((tem = regno_use_in (regno, XEXP (x, i))))
2856 return tem;
2857 }
2858 else if (fmt[i] == 'E')
2859 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2860 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2861 return tem;
2862 }
2863
2864 return NULL_RTX;
2865}
2dfa9a87 2866
e5c56fd9
JH
2867/* Return a value indicating whether OP, an operand of a commutative
2868 operation, is preferred as the first or second operand. The higher
2869 the value, the stronger the preference for being the first operand.
2870 We use negative values to indicate a preference for the first operand
2871 and positive values for the second operand. */
2872
9b3bd424 2873int
0c20a65f 2874commutative_operand_precedence (rtx op)
e5c56fd9 2875{
e3d6e740 2876 enum rtx_code code = GET_CODE (op);
e3d6e740 2877
e5c56fd9 2878 /* Constants always come the second operand. Prefer "nice" constants. */
e3d6e740 2879 if (code == CONST_INT)
9ce79a7a 2880 return -7;
e3d6e740 2881 if (code == CONST_DOUBLE)
9ce79a7a
RS
2882 return -6;
2883 op = avoid_constant_pool_reference (op);
79b82df3 2884 code = GET_CODE (op);
ec8e098d
PB
2885
2886 switch (GET_RTX_CLASS (code))
2887 {
2888 case RTX_CONST_OBJ:
2889 if (code == CONST_INT)
2890 return -5;
2891 if (code == CONST_DOUBLE)
2892 return -4;
2893 return -3;
2894
2895 case RTX_EXTRA:
2896 /* SUBREGs of objects should come second. */
2897 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2898 return -2;
2899
6fb5fa3c 2900 return 0;
ec8e098d
PB
2901
2902 case RTX_OBJ:
2903 /* Complex expressions should be the first, so decrease priority
2904 of objects. */
2905 return -1;
2906
2907 case RTX_COMM_ARITH:
2908 /* Prefer operands that are themselves commutative to be first.
2909 This helps to make things linear. In particular,
2910 (and (and (reg) (reg)) (not (reg))) is canonical. */
2911 return 4;
2912
2913 case RTX_BIN_ARITH:
2914 /* If only one operand is a binary expression, it will be the first
2915 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2916 is canonical, although it will usually be further simplified. */
2917 return 2;
e3d6e740 2918
ec8e098d
PB
2919 case RTX_UNARY:
2920 /* Then prefer NEG and NOT. */
2921 if (code == NEG || code == NOT)
2922 return 1;
e5c56fd9 2923
ec8e098d
PB
2924 default:
2925 return 0;
2926 }
e5c56fd9
JH
2927}
2928
f63d1bf7 2929/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
2930 in order to canonicalize expression. */
2931
2932int
0c20a65f 2933swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 2934{
9b3bd424
RH
2935 return (commutative_operand_precedence (x)
2936 < commutative_operand_precedence (y));
e5c56fd9 2937}
2dfa9a87
MH
2938
2939/* Return 1 if X is an autoincrement side effect and the register is
2940 not the stack pointer. */
2941int
0c20a65f 2942auto_inc_p (rtx x)
2dfa9a87
MH
2943{
2944 switch (GET_CODE (x))
2945 {
2946 case PRE_INC:
2947 case POST_INC:
2948 case PRE_DEC:
2949 case POST_DEC:
2950 case PRE_MODIFY:
2951 case POST_MODIFY:
2952 /* There are no REG_INC notes for SP. */
2953 if (XEXP (x, 0) != stack_pointer_rtx)
2954 return 1;
2955 default:
2956 break;
2957 }
2958 return 0;
2959}
3b10cf4b 2960
f9da5064 2961/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 2962int
0c20a65f 2963loc_mentioned_in_p (rtx *loc, rtx in)
db7ba742 2964{
a52b023a
PB
2965 enum rtx_code code;
2966 const char *fmt;
db7ba742
R
2967 int i, j;
2968
a52b023a
PB
2969 if (!in)
2970 return 0;
2971
2972 code = GET_CODE (in);
2973 fmt = GET_RTX_FORMAT (code);
db7ba742
R
2974 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2975 {
9ce88f5e 2976 if (loc == &in->u.fld[i].rt_rtx)
db7ba742
R
2977 return 1;
2978 if (fmt[i] == 'e')
2979 {
2980 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2981 return 1;
2982 }
2983 else if (fmt[i] == 'E')
2984 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2985 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2986 return 1;
2987 }
2988 return 0;
2989}
ddef6bc7 2990
bb51e270
RS
2991/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2992 and SUBREG_BYTE, return the bit offset where the subreg begins
2993 (counting from the least significant bit of the operand). */
33aceff2
JW
2994
2995unsigned int
bb51e270
RS
2996subreg_lsb_1 (enum machine_mode outer_mode,
2997 enum machine_mode inner_mode,
2998 unsigned int subreg_byte)
33aceff2 2999{
33aceff2
JW
3000 unsigned int bitpos;
3001 unsigned int byte;
3002 unsigned int word;
3003
3004 /* A paradoxical subreg begins at bit position 0. */
bb51e270 3005 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
33aceff2
JW
3006 return 0;
3007
3008 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3009 /* If the subreg crosses a word boundary ensure that
3010 it also begins and ends on a word boundary. */
41374e13
NS
3011 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3012 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3013 && (subreg_byte % UNITS_PER_WORD
3014 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
33aceff2
JW
3015
3016 if (WORDS_BIG_ENDIAN)
3017 word = (GET_MODE_SIZE (inner_mode)
bb51e270 3018 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 3019 else
bb51e270 3020 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
3021 bitpos = word * BITS_PER_WORD;
3022
3023 if (BYTES_BIG_ENDIAN)
3024 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 3025 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 3026 else
bb51e270 3027 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
3028 bitpos += byte * BITS_PER_UNIT;
3029
3030 return bitpos;
3031}
3032
bb51e270
RS
3033/* Given a subreg X, return the bit offset where the subreg begins
3034 (counting from the least significant bit of the reg). */
3035
3036unsigned int
3037subreg_lsb (rtx x)
3038{
3039 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3040 SUBREG_BYTE (x));
3041}
3042
f1f4e530 3043/* Fill in information about a subreg of a hard register.
ddef6bc7
JJ
3044 xregno - A regno of an inner hard subreg_reg (or what will become one).
3045 xmode - The mode of xregno.
3046 offset - The byte offset.
3047 ymode - The mode of a top level SUBREG (or what may become one).
f1f4e530
JM
3048 info - Pointer to structure to fill in. */
3049static void
3050subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3051 unsigned int offset, enum machine_mode ymode,
3052 struct subreg_info *info)
04c5580f 3053{
8521c414 3054 int nregs_xmode, nregs_ymode;
04c5580f 3055 int mode_multiple, nregs_multiple;
f1f4e530 3056 int offset_adj, y_offset, y_offset_adj;
8521c414 3057 int regsize_xmode, regsize_ymode;
f1f4e530 3058 bool rknown;
04c5580f 3059
41374e13 3060 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
04c5580f 3061
f1f4e530
JM
3062 rknown = false;
3063
dd79bb7e
GK
3064 /* If there are holes in a non-scalar mode in registers, we expect
3065 that it is made up of its units concatenated together. */
8521c414 3066 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
dd79bb7e 3067 {
8521c414
JM
3068 enum machine_mode xmode_unit;
3069
3070 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3071 if (GET_MODE_INNER (xmode) == VOIDmode)
3072 xmode_unit = xmode;
3073 else
3074 xmode_unit = GET_MODE_INNER (xmode);
3075 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3076 gcc_assert (nregs_xmode
3077 == (GET_MODE_NUNITS (xmode)
3078 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3079 gcc_assert (hard_regno_nregs[xregno][xmode]
3080 == (hard_regno_nregs[xregno][xmode_unit]
3081 * GET_MODE_NUNITS (xmode)));
dd79bb7e
GK
3082
3083 /* You can only ask for a SUBREG of a value with holes in the middle
3084 if you don't cross the holes. (Such a SUBREG should be done by
3085 picking a different register class, or doing it in memory if
3086 necessary.) An example of a value with holes is XCmode on 32-bit
3087 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3088 3 for each part, but in memory it's two 128-bit parts.
3089 Padding is assumed to be at the end (not necessarily the 'high part')
3090 of each unit. */
8521c414
JM
3091 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3092 < GET_MODE_NUNITS (xmode))
3093 && (offset / GET_MODE_SIZE (xmode_unit)
dd79bb7e 3094 != ((offset + GET_MODE_SIZE (ymode) - 1)
8521c414 3095 / GET_MODE_SIZE (xmode_unit))))
f1f4e530
JM
3096 {
3097 info->representable_p = false;
3098 rknown = true;
3099 }
dd79bb7e
GK
3100 }
3101 else
3102 nregs_xmode = hard_regno_nregs[xregno][xmode];
3103
66fd46b6 3104 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3105
dd79bb7e 3106 /* Paradoxical subregs are otherwise valid. */
f1f4e530
JM
3107 if (!rknown
3108 && offset == 0
3109 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3110 {
3111 info->representable_p = true;
3112 /* If this is a big endian paradoxical subreg, which uses more
3113 actual hard registers than the original register, we must
3114 return a negative offset so that we find the proper highpart
3115 of the register. */
3116 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3117 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3118 info->offset = nregs_xmode - nregs_ymode;
3119 else
3120 info->offset = 0;
3121 info->nregs = nregs_ymode;
3122 return;
3123 }
04c5580f 3124
8521c414
JM
3125 /* If registers store different numbers of bits in the different
3126 modes, we cannot generally form this subreg. */
f1f4e530 3127 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
5f7fc2b8
JM
3128 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3129 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3130 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
f1f4e530
JM
3131 {
3132 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
f1f4e530 3133 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
f1f4e530
JM
3134 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3135 {
3136 info->representable_p = false;
3137 info->nregs
3138 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3139 info->offset = offset / regsize_xmode;
3140 return;
3141 }
3142 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3143 {
3144 info->representable_p = false;
3145 info->nregs
3146 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3147 info->offset = offset / regsize_xmode;
3148 return;
3149 }
3150 }
8521c414 3151
dd79bb7e 3152 /* Lowpart subregs are otherwise valid. */
f1f4e530
JM
3153 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3154 {
3155 info->representable_p = true;
3156 rknown = true;
a446b4e8
JM
3157
3158 if (offset == 0 || nregs_xmode == nregs_ymode)
3159 {
3160 info->offset = 0;
3161 info->nregs = nregs_ymode;
3162 return;
3163 }
f1f4e530 3164 }
04c5580f 3165
dd79bb7e
GK
3166 /* This should always pass, otherwise we don't know how to verify
3167 the constraint. These conditions may be relaxed but
3168 subreg_regno_offset would need to be redesigned. */
41374e13 3169 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
41374e13 3170 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
04c5580f 3171
b20b352b 3172 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3173 values. The subreg must represent a lowpart of given field.
04c5580f 3174 Compute what field it is. */
f1f4e530
JM
3175 offset_adj = offset;
3176 offset_adj -= subreg_lowpart_offset (ymode,
3177 mode_for_size (GET_MODE_BITSIZE (xmode)
3178 / nregs_xmode,
3179 MODE_INT, 0));
04c5580f 3180
dd79bb7e 3181 /* Size of ymode must not be greater than the size of xmode. */
04c5580f 3182 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 3183 gcc_assert (mode_multiple != 0);
04c5580f
JH
3184
3185 y_offset = offset / GET_MODE_SIZE (ymode);
f1f4e530
JM
3186 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3187 nregs_multiple = nregs_xmode / nregs_ymode;
41374e13 3188
f1f4e530 3189 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
41374e13
NS
3190 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3191
f1f4e530
JM
3192 if (!rknown)
3193 {
3194 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3195 rknown = true;
3196 }
3197 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3198 info->nregs = nregs_ymode;
3199}
3200
3201/* This function returns the regno offset of a subreg expression.
3202 xregno - A regno of an inner hard subreg_reg (or what will become one).
3203 xmode - The mode of xregno.
3204 offset - The byte offset.
3205 ymode - The mode of a top level SUBREG (or what may become one).
3206 RETURN - The regno offset which would be used. */
3207unsigned int
3208subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3209 unsigned int offset, enum machine_mode ymode)
3210{
3211 struct subreg_info info;
3212 subreg_get_info (xregno, xmode, offset, ymode, &info);
3213 return info.offset;
3214}
3215
3216/* This function returns true when the offset is representable via
3217 subreg_offset in the given regno.
3218 xregno - A regno of an inner hard subreg_reg (or what will become one).
3219 xmode - The mode of xregno.
3220 offset - The byte offset.
3221 ymode - The mode of a top level SUBREG (or what may become one).
3222 RETURN - Whether the offset is representable. */
3223bool
3224subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3225 unsigned int offset, enum machine_mode ymode)
3226{
3227 struct subreg_info info;
3228 subreg_get_info (xregno, xmode, offset, ymode, &info);
3229 return info.representable_p;
04c5580f
JH
3230}
3231
dc297297 3232/* Return the final regno that a subreg expression refers to. */
a6a2274a 3233unsigned int
0c20a65f 3234subreg_regno (rtx x)
ddef6bc7
JJ
3235{
3236 unsigned int ret;
3237 rtx subreg = SUBREG_REG (x);
3238 int regno = REGNO (subreg);
3239
a6a2274a
KH
3240 ret = regno + subreg_regno_offset (regno,
3241 GET_MODE (subreg),
ddef6bc7
JJ
3242 SUBREG_BYTE (x),
3243 GET_MODE (x));
3244 return ret;
3245
3246}
f1f4e530
JM
3247
3248/* Return the number of registers that a subreg expression refers
3249 to. */
3250unsigned int
3251subreg_nregs (rtx x)
3252{
3253 struct subreg_info info;
3254 rtx subreg = SUBREG_REG (x);
3255 int regno = REGNO (subreg);
3256
3257 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3258 &info);
3259 return info.nregs;
3260}
3261
833366d6
JH
3262struct parms_set_data
3263{
3264 int nregs;
3265 HARD_REG_SET regs;
3266};
3267
3268/* Helper function for noticing stores to parameter registers. */
3269static void
0c20a65f 3270parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6
JH
3271{
3272 struct parms_set_data *d = data;
3273 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3274 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3275 {
3276 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3277 d->nregs--;
3278 }
3279}
3280
a6a2274a 3281/* Look backward for first parameter to be loaded.
b2df20b4
DJ
3282 Note that loads of all parameters will not necessarily be
3283 found if CSE has eliminated some of them (e.g., an argument
3284 to the outer function is passed down as a parameter).
833366d6
JH
3285 Do not skip BOUNDARY. */
3286rtx
0c20a65f 3287find_first_parameter_load (rtx call_insn, rtx boundary)
833366d6
JH
3288{
3289 struct parms_set_data parm;
b2df20b4 3290 rtx p, before, first_set;
833366d6
JH
3291
3292 /* Since different machines initialize their parameter registers
3293 in different orders, assume nothing. Collect the set of all
3294 parameter registers. */
3295 CLEAR_HARD_REG_SET (parm.regs);
3296 parm.nregs = 0;
3297 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3298 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3299 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6 3300 {
41374e13 3301 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
833366d6
JH
3302
3303 /* We only care about registers which can hold function
3304 arguments. */
3305 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3306 continue;
3307
3308 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3309 parm.nregs++;
3310 }
3311 before = call_insn;
b2df20b4 3312 first_set = call_insn;
833366d6
JH
3313
3314 /* Search backward for the first set of a register in this set. */
3315 while (parm.nregs && before != boundary)
3316 {
3317 before = PREV_INSN (before);
3318
3319 /* It is possible that some loads got CSEed from one call to
3320 another. Stop in that case. */
4b4bf941 3321 if (CALL_P (before))
833366d6
JH
3322 break;
3323
dbc1a163 3324 /* Our caller needs either ensure that we will find all sets
833366d6 3325 (in case code has not been optimized yet), or take care
eaec9b3d 3326 for possible labels in a way by setting boundary to preceding
833366d6 3327 CODE_LABEL. */
4b4bf941 3328 if (LABEL_P (before))
dbc1a163 3329 {
41374e13 3330 gcc_assert (before == boundary);
dbc1a163
RH
3331 break;
3332 }
833366d6 3333
0d025d43 3334 if (INSN_P (before))
b2df20b4
DJ
3335 {
3336 int nregs_old = parm.nregs;
3337 note_stores (PATTERN (before), parms_set, &parm);
3338 /* If we found something that did not set a parameter reg,
3339 we're done. Do not keep going, as that might result
3340 in hoisting an insn before the setting of a pseudo
3341 that is used by the hoisted insn. */
3342 if (nregs_old != parm.nregs)
3343 first_set = before;
3344 else
3345 break;
3346 }
833366d6 3347 }
b2df20b4 3348 return first_set;
833366d6 3349}
3dec4024 3350
14b493d6 3351/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3352 call instruction. */
3353
3354bool
0c20a65f 3355keep_with_call_p (rtx insn)
3dec4024
JH
3356{
3357 rtx set;
3358
3359 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3360 {
f8cfc6aa 3361 if (REG_P (SET_DEST (set))
5df533b3 3362 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3363 && fixed_regs[REGNO (SET_DEST (set))]
3364 && general_operand (SET_SRC (set), VOIDmode))
3365 return true;
f8cfc6aa 3366 if (REG_P (SET_SRC (set))
3dec4024 3367 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
f8cfc6aa 3368 && REG_P (SET_DEST (set))
3dec4024
JH
3369 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3370 return true;
bc204393
RH
3371 /* There may be a stack pop just after the call and before the store
3372 of the return register. Search for the actual store when deciding
3373 if we can break or not. */
3dec4024
JH
3374 if (SET_DEST (set) == stack_pointer_rtx)
3375 {
3376 rtx i2 = next_nonnote_insn (insn);
bc204393 3377 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3378 return true;
3379 }
3380 }
3381 return false;
3382}
71d2c5bd 3383
432f982f
JH
3384/* Return true if LABEL is a target of JUMP_INSN. This applies only
3385 to non-complex jumps. That is, direct unconditional, conditional,
3386 and tablejumps, but not computed jumps or returns. It also does
3387 not apply to the fallthru case of a conditional jump. */
3388
3389bool
3390label_is_jump_target_p (rtx label, rtx jump_insn)
3391{
3392 rtx tmp = JUMP_LABEL (jump_insn);
3393
3394 if (label == tmp)
3395 return true;
3396
3397 if (tablejump_p (jump_insn, NULL, &tmp))
3398 {
3399 rtvec vec = XVEC (PATTERN (tmp),
3400 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3401 int i, veclen = GET_NUM_ELEM (vec);
3402
3403 for (i = 0; i < veclen; ++i)
3404 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3405 return true;
3406 }
3407
3408 return false;
3409}
3410
f894b69b
PB
3411\f
3412/* Return an estimate of the cost of computing rtx X.
3413 One use is in cse, to decide which expression to keep in the hash table.
3414 Another is in rtl generation, to pick the cheapest way to multiply.
3415 Other uses like the latter are expected in the future. */
3416
3417int
3418rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3419{
3420 int i, j;
3421 enum rtx_code code;
3422 const char *fmt;
3423 int total;
3424
3425 if (x == 0)
3426 return 0;
3427
3428 /* Compute the default costs of certain things.
3429 Note that targetm.rtx_costs can override the defaults. */
3430
3431 code = GET_CODE (x);
3432 switch (code)
3433 {
3434 case MULT:
3435 total = COSTS_N_INSNS (5);
3436 break;
3437 case DIV:
3438 case UDIV:
3439 case MOD:
3440 case UMOD:
3441 total = COSTS_N_INSNS (7);
3442 break;
3443 case USE:
db3edc20 3444 /* Used in combine.c as a marker. */
f894b69b
PB
3445 total = 0;
3446 break;
3447 default:
3448 total = COSTS_N_INSNS (1);
3449 }
3450
3451 switch (code)
3452 {
3453 case REG:
3454 return 0;
3455
3456 case SUBREG:
edb81165 3457 total = 0;
f894b69b
PB
3458 /* If we can't tie these modes, make this expensive. The larger
3459 the mode, the more expensive it is. */
3460 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3461 return COSTS_N_INSNS (2
3462 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3463 break;
3464
3465 default:
5fd9b178 3466 if (targetm.rtx_costs (x, code, outer_code, &total))
f894b69b
PB
3467 return total;
3468 break;
3469 }
3470
3471 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3472 which is already in total. */
3473
3474 fmt = GET_RTX_FORMAT (code);
3475 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3476 if (fmt[i] == 'e')
3477 total += rtx_cost (XEXP (x, i), code);
3478 else if (fmt[i] == 'E')
3479 for (j = 0; j < XVECLEN (x, i); j++)
3480 total += rtx_cost (XVECEXP (x, i, j), code);
3481
3482 return total;
3483}
3484\f
3485/* Return cost of address expression X.
3486 Expect that X is properly formed address reference. */
3487
3488int
3489address_cost (rtx x, enum machine_mode mode)
3490{
f894b69b
PB
3491 /* We may be asked for cost of various unusual addresses, such as operands
3492 of push instruction. It is not worthwhile to complicate writing
3493 of the target hook by such cases. */
3494
3495 if (!memory_address_p (mode, x))
3496 return 1000;
3497
5fd9b178 3498 return targetm.address_cost (x);
f894b69b
PB
3499}
3500
3501/* If the target doesn't override, compute the cost as with arithmetic. */
3502
3503int
3504default_address_cost (rtx x)
3505{
3506 return rtx_cost (x, MEM);
3507}
2f93eea8
PB
3508\f
3509
3510unsigned HOST_WIDE_INT
3511nonzero_bits (rtx x, enum machine_mode mode)
3512{
3513 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3514}
3515
3516unsigned int
3517num_sign_bit_copies (rtx x, enum machine_mode mode)
3518{
3519 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3520}
3521
3522/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3523 It avoids exponential behavior in nonzero_bits1 when X has
3524 identical subexpressions on the first or the second level. */
3525
3526static unsigned HOST_WIDE_INT
3527cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3528 enum machine_mode known_mode,
3529 unsigned HOST_WIDE_INT known_ret)
3530{
3531 if (x == known_x && mode == known_mode)
3532 return known_ret;
3533
3534 /* Try to find identical subexpressions. If found call
3535 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3536 precomputed value for the subexpression as KNOWN_RET. */
3537
3538 if (ARITHMETIC_P (x))
3539 {
3540 rtx x0 = XEXP (x, 0);
3541 rtx x1 = XEXP (x, 1);
3542
3543 /* Check the first level. */
3544 if (x0 == x1)
3545 return nonzero_bits1 (x, mode, x0, mode,
3546 cached_nonzero_bits (x0, mode, known_x,
3547 known_mode, known_ret));
3548
3549 /* Check the second level. */
3550 if (ARITHMETIC_P (x0)
3551 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3552 return nonzero_bits1 (x, mode, x1, mode,
3553 cached_nonzero_bits (x1, mode, known_x,
3554 known_mode, known_ret));
3555
3556 if (ARITHMETIC_P (x1)
3557 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3558 return nonzero_bits1 (x, mode, x0, mode,
3559 cached_nonzero_bits (x0, mode, known_x,
3560 known_mode, known_ret));
3561 }
3562
3563 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3564}
3565
3566/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3567 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3568 is less useful. We can't allow both, because that results in exponential
3569 run time recursion. There is a nullstone testcase that triggered
3570 this. This macro avoids accidental uses of num_sign_bit_copies. */
3571#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3572
3573/* Given an expression, X, compute which bits in X can be nonzero.
3574 We don't care about bits outside of those defined in MODE.
3575
3576 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3577 an arithmetic operation, we can do better. */
3578
3579static unsigned HOST_WIDE_INT
3580nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3581 enum machine_mode known_mode,
3582 unsigned HOST_WIDE_INT known_ret)
3583{
3584 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3585 unsigned HOST_WIDE_INT inner_nz;
3586 enum rtx_code code;
3587 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3588
3589 /* For floating-point values, assume all bits are needed. */
3590 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3591 return nonzero;
3592
3593 /* If X is wider than MODE, use its mode instead. */
3594 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3595 {
3596 mode = GET_MODE (x);
3597 nonzero = GET_MODE_MASK (mode);
3598 mode_width = GET_MODE_BITSIZE (mode);
3599 }
3600
3601 if (mode_width > HOST_BITS_PER_WIDE_INT)
3602 /* Our only callers in this case look for single bit values. So
3603 just return the mode mask. Those tests will then be false. */
3604 return nonzero;
3605
3606#ifndef WORD_REGISTER_OPERATIONS
3607 /* If MODE is wider than X, but both are a single word for both the host
3608 and target machines, we can compute this from which bits of the
3609 object might be nonzero in its own mode, taking into account the fact
3610 that on many CISC machines, accessing an object in a wider mode
3611 causes the high-order bits to become undefined. So they are
3612 not known to be zero. */
3613
3614 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3615 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3616 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3617 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3618 {
3619 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3620 known_x, known_mode, known_ret);
3621 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3622 return nonzero;
3623 }
3624#endif
3625
3626 code = GET_CODE (x);
3627 switch (code)
3628 {
3629 case REG:
3630#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3631 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3632 all the bits above ptr_mode are known to be zero. */
3633 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3634 && REG_POINTER (x))
3635 nonzero &= GET_MODE_MASK (ptr_mode);
3636#endif
3637
3638 /* Include declared information about alignment of pointers. */
3639 /* ??? We don't properly preserve REG_POINTER changes across
3640 pointer-to-integer casts, so we can't trust it except for
3641 things that we know must be pointers. See execute/960116-1.c. */
3642 if ((x == stack_pointer_rtx
3643 || x == frame_pointer_rtx
3644 || x == arg_pointer_rtx)
3645 && REGNO_POINTER_ALIGN (REGNO (x)))
3646 {
3647 unsigned HOST_WIDE_INT alignment
3648 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3649
3650#ifdef PUSH_ROUNDING
3651 /* If PUSH_ROUNDING is defined, it is possible for the
3652 stack to be momentarily aligned only to that amount,
3653 so we pick the least alignment. */
3654 if (x == stack_pointer_rtx && PUSH_ARGS)
3655 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3656 alignment);
3657#endif
3658
3659 nonzero &= ~(alignment - 1);
3660 }
3661
3662 {
3663 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3664 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3665 known_mode, known_ret,
3666 &nonzero_for_hook);
3667
3668 if (new)
3669 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3670 known_mode, known_ret);
3671
3672 return nonzero_for_hook;
3673 }
3674
3675 case CONST_INT:
3676#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3677 /* If X is negative in MODE, sign-extend the value. */
3678 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3679 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3680 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3681#endif
3682
3683 return INTVAL (x);
3684
3685 case MEM:
3686#ifdef LOAD_EXTEND_OP
3687 /* In many, if not most, RISC machines, reading a byte from memory
3688 zeros the rest of the register. Noticing that fact saves a lot
3689 of extra zero-extends. */
3690 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3691 nonzero &= GET_MODE_MASK (GET_MODE (x));
3692#endif
3693 break;
3694
3695 case EQ: case NE:
3696 case UNEQ: case LTGT:
3697 case GT: case GTU: case UNGT:
3698 case LT: case LTU: case UNLT:
3699 case GE: case GEU: case UNGE:
3700 case LE: case LEU: case UNLE:
3701 case UNORDERED: case ORDERED:
2f93eea8
PB
3702 /* If this produces an integer result, we know which bits are set.
3703 Code here used to clear bits outside the mode of X, but that is
3704 now done above. */
505ac507
RH
3705 /* Mind that MODE is the mode the caller wants to look at this
3706 operation in, and not the actual operation mode. We can wind
3707 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3708 that describes the results of a vector compare. */
3709 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2f93eea8
PB
3710 && mode_width <= HOST_BITS_PER_WIDE_INT)
3711 nonzero = STORE_FLAG_VALUE;
3712 break;
3713
3714 case NEG:
3715#if 0
3716 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3717 and num_sign_bit_copies. */
3718 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3719 == GET_MODE_BITSIZE (GET_MODE (x)))
3720 nonzero = 1;
3721#endif
3722
3723 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3724 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3725 break;
3726
3727 case ABS:
3728#if 0
3729 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3730 and num_sign_bit_copies. */
3731 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3732 == GET_MODE_BITSIZE (GET_MODE (x)))
3733 nonzero = 1;
3734#endif
3735 break;
3736
3737 case TRUNCATE:
3738 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3739 known_x, known_mode, known_ret)
3740 & GET_MODE_MASK (mode));
3741 break;
3742
3743 case ZERO_EXTEND:
3744 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3745 known_x, known_mode, known_ret);
3746 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3747 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3748 break;
3749
3750 case SIGN_EXTEND:
3751 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3752 Otherwise, show all the bits in the outer mode but not the inner
3753 may be nonzero. */
3754 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3755 known_x, known_mode, known_ret);
3756 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3757 {
3758 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3759 if (inner_nz
3760 & (((HOST_WIDE_INT) 1
3761 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3762 inner_nz |= (GET_MODE_MASK (mode)
3763 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3764 }
3765
3766 nonzero &= inner_nz;
3767 break;
3768
3769 case AND:
3770 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3771 known_x, known_mode, known_ret)
3772 & cached_nonzero_bits (XEXP (x, 1), mode,
3773 known_x, known_mode, known_ret);
3774 break;
3775
3776 case XOR: case IOR:
3777 case UMIN: case UMAX: case SMIN: case SMAX:
3778 {
3779 unsigned HOST_WIDE_INT nonzero0 =
3780 cached_nonzero_bits (XEXP (x, 0), mode,
3781 known_x, known_mode, known_ret);
3782
3783 /* Don't call nonzero_bits for the second time if it cannot change
3784 anything. */
3785 if ((nonzero & nonzero0) != nonzero)
3786 nonzero &= nonzero0
3787 | cached_nonzero_bits (XEXP (x, 1), mode,
3788 known_x, known_mode, known_ret);
3789 }
3790 break;
3791
3792 case PLUS: case MINUS:
3793 case MULT:
3794 case DIV: case UDIV:
3795 case MOD: case UMOD:
3796 /* We can apply the rules of arithmetic to compute the number of
3797 high- and low-order zero bits of these operations. We start by
3798 computing the width (position of the highest-order nonzero bit)
3799 and the number of low-order zero bits for each value. */
3800 {
3801 unsigned HOST_WIDE_INT nz0 =
3802 cached_nonzero_bits (XEXP (x, 0), mode,
3803 known_x, known_mode, known_ret);
3804 unsigned HOST_WIDE_INT nz1 =
3805 cached_nonzero_bits (XEXP (x, 1), mode,
3806 known_x, known_mode, known_ret);
3807 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3808 int width0 = floor_log2 (nz0) + 1;
3809 int width1 = floor_log2 (nz1) + 1;
3810 int low0 = floor_log2 (nz0 & -nz0);
3811 int low1 = floor_log2 (nz1 & -nz1);
3812 HOST_WIDE_INT op0_maybe_minusp
3813 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3814 HOST_WIDE_INT op1_maybe_minusp
3815 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3816 unsigned int result_width = mode_width;
3817 int result_low = 0;
3818
3819 switch (code)
3820 {
3821 case PLUS:
3822 result_width = MAX (width0, width1) + 1;
3823 result_low = MIN (low0, low1);
3824 break;
3825 case MINUS:
3826 result_low = MIN (low0, low1);
3827 break;
3828 case MULT:
3829 result_width = width0 + width1;
3830 result_low = low0 + low1;
3831 break;
3832 case DIV:
3833 if (width1 == 0)
3834 break;
3835 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3836 result_width = width0;
3837 break;
3838 case UDIV:
3839 if (width1 == 0)
3840 break;
3841 result_width = width0;
3842 break;
3843 case MOD:
3844 if (width1 == 0)
3845 break;
3846 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3847 result_width = MIN (width0, width1);
3848 result_low = MIN (low0, low1);
3849 break;
3850 case UMOD:
3851 if (width1 == 0)
3852 break;
3853 result_width = MIN (width0, width1);
3854 result_low = MIN (low0, low1);
3855 break;
3856 default:
41374e13 3857 gcc_unreachable ();
2f93eea8
PB
3858 }
3859
3860 if (result_width < mode_width)
3861 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3862
3863 if (result_low > 0)
3864 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3865
3866#ifdef POINTERS_EXTEND_UNSIGNED
3867 /* If pointers extend unsigned and this is an addition or subtraction
3868 to a pointer in Pmode, all the bits above ptr_mode are known to be
3869 zero. */
3870 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3871 && (code == PLUS || code == MINUS)
f8cfc6aa 3872 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
3873 nonzero &= GET_MODE_MASK (ptr_mode);
3874#endif
3875 }
3876 break;
3877
3878 case ZERO_EXTRACT:
3879 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3880 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3881 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3882 break;
3883
3884 case SUBREG:
3885 /* If this is a SUBREG formed for a promoted variable that has
3886 been zero-extended, we know that at least the high-order bits
3887 are zero, though others might be too. */
3888
3889 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3890 nonzero = GET_MODE_MASK (GET_MODE (x))
3891 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3892 known_x, known_mode, known_ret);
3893
3894 /* If the inner mode is a single word for both the host and target
3895 machines, we can compute this from which bits of the inner
3896 object might be nonzero. */
3897 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3898 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3899 <= HOST_BITS_PER_WIDE_INT))
3900 {
3901 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3902 known_x, known_mode, known_ret);
3903
3904#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3905 /* If this is a typical RISC machine, we only have to worry
3906 about the way loads are extended. */
3907 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3908 ? (((nonzero
3909 & (((unsigned HOST_WIDE_INT) 1
3910 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3911 != 0))
3912 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3c0cb5de 3913 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
3914#endif
3915 {
3916 /* On many CISC machines, accessing an object in a wider mode
3917 causes the high-order bits to become undefined. So they are
3918 not known to be zero. */
3919 if (GET_MODE_SIZE (GET_MODE (x))
3920 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3921 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3922 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3923 }
3924 }
3925 break;
3926
3927 case ASHIFTRT:
3928 case LSHIFTRT:
3929 case ASHIFT:
3930 case ROTATE:
3931 /* The nonzero bits are in two classes: any bits within MODE
3932 that aren't in GET_MODE (x) are always significant. The rest of the
3933 nonzero bits are those that are significant in the operand of
3934 the shift when shifted the appropriate number of bits. This
3935 shows that high-order bits are cleared by the right shift and
3936 low-order bits by left shifts. */
3937 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3938 && INTVAL (XEXP (x, 1)) >= 0
3939 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3940 {
3941 enum machine_mode inner_mode = GET_MODE (x);
3942 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3943 int count = INTVAL (XEXP (x, 1));
3944 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3945 unsigned HOST_WIDE_INT op_nonzero =
3946 cached_nonzero_bits (XEXP (x, 0), mode,
3947 known_x, known_mode, known_ret);
3948 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3949 unsigned HOST_WIDE_INT outer = 0;
3950
3951 if (mode_width > width)
3952 outer = (op_nonzero & nonzero & ~mode_mask);
3953
3954 if (code == LSHIFTRT)
3955 inner >>= count;
3956 else if (code == ASHIFTRT)
3957 {
3958 inner >>= count;
3959
3960 /* If the sign bit may have been nonzero before the shift, we
3961 need to mark all the places it could have been copied to
3962 by the shift as possibly nonzero. */
3963 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3964 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3965 }
3966 else if (code == ASHIFT)
3967 inner <<= count;
3968 else
3969 inner = ((inner << (count % width)
3970 | (inner >> (width - (count % width)))) & mode_mask);
3971
3972 nonzero &= (outer | inner);
3973 }
3974 break;
3975
3976 case FFS:
3977 case POPCOUNT:
3978 /* This is at most the number of bits in the mode. */
3979 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3980 break;
3981
3982 case CLZ:
3983 /* If CLZ has a known value at zero, then the nonzero bits are
3984 that value, plus the number of bits in the mode minus one. */
3985 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3986 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3987 else
3988 nonzero = -1;
3989 break;
3990
3991 case CTZ:
3992 /* If CTZ has a known value at zero, then the nonzero bits are
3993 that value, plus the number of bits in the mode minus one. */
3994 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3995 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3996 else
3997 nonzero = -1;
3998 break;
3999
4000 case PARITY:
4001 nonzero = 1;
4002 break;
4003
4004 case IF_THEN_ELSE:
4005 {
4006 unsigned HOST_WIDE_INT nonzero_true =
4007 cached_nonzero_bits (XEXP (x, 1), mode,
4008 known_x, known_mode, known_ret);
4009
4010 /* Don't call nonzero_bits for the second time if it cannot change
4011 anything. */
4012 if ((nonzero & nonzero_true) != nonzero)
4013 nonzero &= nonzero_true
4014 | cached_nonzero_bits (XEXP (x, 2), mode,
4015 known_x, known_mode, known_ret);
4016 }
4017 break;
4018
4019 default:
4020 break;
4021 }
4022
4023 return nonzero;
4024}
4025
4026/* See the macro definition above. */
4027#undef cached_num_sign_bit_copies
4028
4029\f
4030/* The function cached_num_sign_bit_copies is a wrapper around
4031 num_sign_bit_copies1. It avoids exponential behavior in
4032 num_sign_bit_copies1 when X has identical subexpressions on the
4033 first or the second level. */
4034
4035static unsigned int
4036cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4037 enum machine_mode known_mode,
4038 unsigned int known_ret)
4039{
4040 if (x == known_x && mode == known_mode)
4041 return known_ret;
4042
4043 /* Try to find identical subexpressions. If found call
4044 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4045 the precomputed value for the subexpression as KNOWN_RET. */
4046
4047 if (ARITHMETIC_P (x))
4048 {
4049 rtx x0 = XEXP (x, 0);
4050 rtx x1 = XEXP (x, 1);
4051
4052 /* Check the first level. */
4053 if (x0 == x1)
4054 return
4055 num_sign_bit_copies1 (x, mode, x0, mode,
4056 cached_num_sign_bit_copies (x0, mode, known_x,
4057 known_mode,
4058 known_ret));
4059
4060 /* Check the second level. */
4061 if (ARITHMETIC_P (x0)
4062 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4063 return
4064 num_sign_bit_copies1 (x, mode, x1, mode,
4065 cached_num_sign_bit_copies (x1, mode, known_x,
4066 known_mode,
4067 known_ret));
4068
4069 if (ARITHMETIC_P (x1)
4070 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4071 return
4072 num_sign_bit_copies1 (x, mode, x0, mode,
4073 cached_num_sign_bit_copies (x0, mode, known_x,
4074 known_mode,
4075 known_ret));
4076 }
4077
4078 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4079}
4080
4081/* Return the number of bits at the high-order end of X that are known to
4082 be equal to the sign bit. X will be used in mode MODE; if MODE is
4083 VOIDmode, X will be used in its own mode. The returned value will always
4084 be between 1 and the number of bits in MODE. */
4085
4086static unsigned int
4087num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4088 enum machine_mode known_mode,
4089 unsigned int known_ret)
4090{
4091 enum rtx_code code = GET_CODE (x);
4092 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4093 int num0, num1, result;
4094 unsigned HOST_WIDE_INT nonzero;
4095
4096 /* If we weren't given a mode, use the mode of X. If the mode is still
4097 VOIDmode, we don't know anything. Likewise if one of the modes is
4098 floating-point. */
4099
4100 if (mode == VOIDmode)
4101 mode = GET_MODE (x);
4102
4103 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4104 return 1;
4105
4106 /* For a smaller object, just ignore the high bits. */
4107 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4108 {
4109 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4110 known_x, known_mode, known_ret);
4111 return MAX (1,
4112 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4113 }
4114
4115 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4116 {
4117#ifndef WORD_REGISTER_OPERATIONS
4118 /* If this machine does not do all register operations on the entire
4119 register and MODE is wider than the mode of X, we can say nothing
4120 at all about the high-order bits. */
4121 return 1;
4122#else
4123 /* Likewise on machines that do, if the mode of the object is smaller
4124 than a word and loads of that size don't sign extend, we can say
4125 nothing about the high order bits. */
4126 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4127#ifdef LOAD_EXTEND_OP
4128 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4129#endif
4130 )
4131 return 1;
4132#endif
4133 }
4134
4135 switch (code)
4136 {
4137 case REG:
4138
4139#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4140 /* If pointers extend signed and this is a pointer in Pmode, say that
4141 all the bits above ptr_mode are known to be sign bit copies. */
4142 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4143 && REG_POINTER (x))
4144 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4145#endif
4146
4147 {
4148 unsigned int copies_for_hook = 1, copies = 1;
4149 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4150 known_mode, known_ret,
4151 &copies_for_hook);
4152
4153 if (new)
4154 copies = cached_num_sign_bit_copies (new, mode, known_x,
4155 known_mode, known_ret);
4156
4157 if (copies > 1 || copies_for_hook > 1)
4158 return MAX (copies, copies_for_hook);
4159
4160 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4161 }
4162 break;
4163
4164 case MEM:
4165#ifdef LOAD_EXTEND_OP
4166 /* Some RISC machines sign-extend all loads of smaller than a word. */
4167 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4168 return MAX (1, ((int) bitwidth
4169 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4170#endif
4171 break;
4172
4173 case CONST_INT:
4174 /* If the constant is negative, take its 1's complement and remask.
4175 Then see how many zero bits we have. */
4176 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4177 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4178 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4179 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4180
4181 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4182
4183 case SUBREG:
4184 /* If this is a SUBREG for a promoted object that is sign-extended
4185 and we are looking at it in a wider mode, we know that at least the
4186 high-order bits are known to be sign bit copies. */
4187
4188 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4189 {
4190 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4191 known_x, known_mode, known_ret);
4192 return MAX ((int) bitwidth
4193 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4194 num0);
4195 }
4196
4197 /* For a smaller object, just ignore the high bits. */
4198 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4199 {
4200 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4201 known_x, known_mode, known_ret);
4202 return MAX (1, (num0
4203 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4204 - bitwidth)));
4205 }
4206
4207#ifdef WORD_REGISTER_OPERATIONS
4208#ifdef LOAD_EXTEND_OP
4209 /* For paradoxical SUBREGs on machines where all register operations
4210 affect the entire register, just look inside. Note that we are
4211 passing MODE to the recursive call, so the number of sign bit copies
4212 will remain relative to that mode, not the inner mode. */
4213
4214 /* This works only if loads sign extend. Otherwise, if we get a
4215 reload for the inner part, it may be loaded from the stack, and
4216 then we lose all sign bit copies that existed before the store
4217 to the stack. */
4218
4219 if ((GET_MODE_SIZE (GET_MODE (x))
4220 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4221 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4222 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4223 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4224 known_x, known_mode, known_ret);
4225#endif
4226#endif
4227 break;
4228
4229 case SIGN_EXTRACT:
4230 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4231 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4232 break;
4233
4234 case SIGN_EXTEND:
4235 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4236 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4237 known_x, known_mode, known_ret));
4238
4239 case TRUNCATE:
4240 /* For a smaller object, just ignore the high bits. */
4241 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4242 known_x, known_mode, known_ret);
4243 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4244 - bitwidth)));
4245
4246 case NOT:
4247 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4248 known_x, known_mode, known_ret);
4249
4250 case ROTATE: case ROTATERT:
4251 /* If we are rotating left by a number of bits less than the number
4252 of sign bit copies, we can just subtract that amount from the
4253 number. */
4254 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4255 && INTVAL (XEXP (x, 1)) >= 0
4256 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4257 {
4258 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4259 known_x, known_mode, known_ret);
4260 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4261 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4262 }
4263 break;
4264
4265 case NEG:
4266 /* In general, this subtracts one sign bit copy. But if the value
4267 is known to be positive, the number of sign bit copies is the
4268 same as that of the input. Finally, if the input has just one bit
4269 that might be nonzero, all the bits are copies of the sign bit. */
4270 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4271 known_x, known_mode, known_ret);
4272 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4273 return num0 > 1 ? num0 - 1 : 1;
4274
4275 nonzero = nonzero_bits (XEXP (x, 0), mode);
4276 if (nonzero == 1)
4277 return bitwidth;
4278
4279 if (num0 > 1
4280 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4281 num0--;
4282
4283 return num0;
4284
4285 case IOR: case AND: case XOR:
4286 case SMIN: case SMAX: case UMIN: case UMAX:
4287 /* Logical operations will preserve the number of sign-bit copies.
4288 MIN and MAX operations always return one of the operands. */
4289 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4290 known_x, known_mode, known_ret);
4291 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4292 known_x, known_mode, known_ret);
4293 return MIN (num0, num1);
4294
4295 case PLUS: case MINUS:
4296 /* For addition and subtraction, we can have a 1-bit carry. However,
4297 if we are subtracting 1 from a positive number, there will not
4298 be such a carry. Furthermore, if the positive number is known to
4299 be 0 or 1, we know the result is either -1 or 0. */
4300
4301 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4302 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4303 {
4304 nonzero = nonzero_bits (XEXP (x, 0), mode);
4305 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4306 return (nonzero == 1 || nonzero == 0 ? bitwidth
4307 : bitwidth - floor_log2 (nonzero) - 1);
4308 }
4309
4310 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4311 known_x, known_mode, known_ret);
4312 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4313 known_x, known_mode, known_ret);
4314 result = MAX (1, MIN (num0, num1) - 1);
4315
4316#ifdef POINTERS_EXTEND_UNSIGNED
4317 /* If pointers extend signed and this is an addition or subtraction
4318 to a pointer in Pmode, all the bits above ptr_mode are known to be
4319 sign bit copies. */
4320 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4321 && (code == PLUS || code == MINUS)
f8cfc6aa 4322 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
4323 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4324 - GET_MODE_BITSIZE (ptr_mode) + 1),
4325 result);
4326#endif
4327 return result;
4328
4329 case MULT:
4330 /* The number of bits of the product is the sum of the number of
4331 bits of both terms. However, unless one of the terms if known
4332 to be positive, we must allow for an additional bit since negating
4333 a negative number can remove one sign bit copy. */
4334
4335 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4336 known_x, known_mode, known_ret);
4337 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4338 known_x, known_mode, known_ret);
4339
4340 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4341 if (result > 0
4342 && (bitwidth > HOST_BITS_PER_WIDE_INT
4343 || (((nonzero_bits (XEXP (x, 0), mode)
4344 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4345 && ((nonzero_bits (XEXP (x, 1), mode)
4346 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4347 result--;
4348
4349 return MAX (1, result);
4350
4351 case UDIV:
4352 /* The result must be <= the first operand. If the first operand
4353 has the high bit set, we know nothing about the number of sign
4354 bit copies. */
4355 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4356 return 1;
4357 else if ((nonzero_bits (XEXP (x, 0), mode)
4358 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4359 return 1;
4360 else
4361 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4362 known_x, known_mode, known_ret);
4363
4364 case UMOD:
4365 /* The result must be <= the second operand. */
4366 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4367 known_x, known_mode, known_ret);
4368
4369 case DIV:
4370 /* Similar to unsigned division, except that we have to worry about
4371 the case where the divisor is negative, in which case we have
4372 to add 1. */
4373 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4374 known_x, known_mode, known_ret);
4375 if (result > 1
4376 && (bitwidth > HOST_BITS_PER_WIDE_INT
4377 || (nonzero_bits (XEXP (x, 1), mode)
4378 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4379 result--;
4380
4381 return result;
4382
4383 case MOD:
4384 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4385 known_x, known_mode, known_ret);
4386 if (result > 1
4387 && (bitwidth > HOST_BITS_PER_WIDE_INT
4388 || (nonzero_bits (XEXP (x, 1), mode)
4389 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4390 result--;
4391
4392 return result;
4393
4394 case ASHIFTRT:
4395 /* Shifts by a constant add to the number of bits equal to the
4396 sign bit. */
4397 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4398 known_x, known_mode, known_ret);
4399 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4400 && INTVAL (XEXP (x, 1)) > 0)
4401 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4402
4403 return num0;
4404
4405 case ASHIFT:
4406 /* Left shifts destroy copies. */
4407 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4408 || INTVAL (XEXP (x, 1)) < 0
4409 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4410 return 1;
4411
4412 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4413 known_x, known_mode, known_ret);
4414 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4415
4416 case IF_THEN_ELSE:
4417 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4418 known_x, known_mode, known_ret);
4419 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4420 known_x, known_mode, known_ret);
4421 return MIN (num0, num1);
4422
4423 case EQ: case NE: case GE: case GT: case LE: case LT:
4424 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4425 case GEU: case GTU: case LEU: case LTU:
4426 case UNORDERED: case ORDERED:
4427 /* If the constant is negative, take its 1's complement and remask.
4428 Then see how many zero bits we have. */
4429 nonzero = STORE_FLAG_VALUE;
4430 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4431 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4432 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4433
4434 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4435
4436 default:
4437 break;
4438 }
4439
4440 /* If we haven't been able to figure it out by one of the above rules,
4441 see if some of the high-order bits are known to be zero. If so,
4442 count those bits and return one less than that amount. If we can't
4443 safely compute the mask for this mode, always return BITWIDTH. */
4444
4445 bitwidth = GET_MODE_BITSIZE (mode);
4446 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4447 return 1;
4448
4449 nonzero = nonzero_bits (x, mode);
4450 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4451 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4452}
6fd21094
RS
4453
4454/* Calculate the rtx_cost of a single instruction. A return value of
4455 zero indicates an instruction pattern without a known cost. */
4456
4457int
4458insn_rtx_cost (rtx pat)
4459{
4460 int i, cost;
4461 rtx set;
4462
4463 /* Extract the single set rtx from the instruction pattern.
4464 We can't use single_set since we only have the pattern. */
4465 if (GET_CODE (pat) == SET)
4466 set = pat;
4467 else if (GET_CODE (pat) == PARALLEL)
4468 {
4469 set = NULL_RTX;
4470 for (i = 0; i < XVECLEN (pat, 0); i++)
4471 {
4472 rtx x = XVECEXP (pat, 0, i);
4473 if (GET_CODE (x) == SET)
4474 {
4475 if (set)
4476 return 0;
4477 set = x;
4478 }
4479 }
4480 if (!set)
4481 return 0;
4482 }
4483 else
4484 return 0;
4485
4486 cost = rtx_cost (SET_SRC (set), SET);
4487 return cost > 0 ? cost : COSTS_N_INSNS (1);
4488}
75473b02
SB
4489
4490/* Given an insn INSN and condition COND, return the condition in a
4491 canonical form to simplify testing by callers. Specifically:
4492
4493 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4494 (2) Both operands will be machine operands; (cc0) will have been replaced.
4495 (3) If an operand is a constant, it will be the second operand.
4496 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4497 for GE, GEU, and LEU.
4498
4499 If the condition cannot be understood, or is an inequality floating-point
4500 comparison which needs to be reversed, 0 will be returned.
4501
4502 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4503
4504 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4505 insn used in locating the condition was found. If a replacement test
4506 of the condition is desired, it should be placed in front of that
4507 insn and we will be sure that the inputs are still valid.
4508
4509 If WANT_REG is nonzero, we wish the condition to be relative to that
4510 register, if possible. Therefore, do not canonicalize the condition
4511 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4512 to be a compare to a CC mode register.
4513
4514 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4515 and at INSN. */
4516
4517rtx
4518canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4519 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4520{
4521 enum rtx_code code;
4522 rtx prev = insn;
4523 rtx set;
4524 rtx tem;
4525 rtx op0, op1;
4526 int reverse_code = 0;
4527 enum machine_mode mode;
569f8d98 4528 basic_block bb = BLOCK_FOR_INSN (insn);
75473b02
SB
4529
4530 code = GET_CODE (cond);
4531 mode = GET_MODE (cond);
4532 op0 = XEXP (cond, 0);
4533 op1 = XEXP (cond, 1);
4534
4535 if (reverse)
4536 code = reversed_comparison_code (cond, insn);
4537 if (code == UNKNOWN)
4538 return 0;
4539
4540 if (earliest)
4541 *earliest = insn;
4542
4543 /* If we are comparing a register with zero, see if the register is set
4544 in the previous insn to a COMPARE or a comparison operation. Perform
4545 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4546 in cse.c */
4547
4548 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4549 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4550 && op1 == CONST0_RTX (GET_MODE (op0))
4551 && op0 != want_reg)
4552 {
4553 /* Set nonzero when we find something of interest. */
4554 rtx x = 0;
4555
4556#ifdef HAVE_cc0
4557 /* If comparison with cc0, import actual comparison from compare
4558 insn. */
4559 if (op0 == cc0_rtx)
4560 {
4561 if ((prev = prev_nonnote_insn (prev)) == 0
4562 || !NONJUMP_INSN_P (prev)
4563 || (set = single_set (prev)) == 0
4564 || SET_DEST (set) != cc0_rtx)
4565 return 0;
4566
4567 op0 = SET_SRC (set);
4568 op1 = CONST0_RTX (GET_MODE (op0));
4569 if (earliest)
4570 *earliest = prev;
4571 }
4572#endif
4573
4574 /* If this is a COMPARE, pick up the two things being compared. */
4575 if (GET_CODE (op0) == COMPARE)
4576 {
4577 op1 = XEXP (op0, 1);
4578 op0 = XEXP (op0, 0);
4579 continue;
4580 }
4581 else if (!REG_P (op0))
4582 break;
4583
4584 /* Go back to the previous insn. Stop if it is not an INSN. We also
4585 stop if it isn't a single set or if it has a REG_INC note because
4586 we don't want to bother dealing with it. */
4587
4588 if ((prev = prev_nonnote_insn (prev)) == 0
4589 || !NONJUMP_INSN_P (prev)
569f8d98
ZD
4590 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4591 /* In cfglayout mode, there do not have to be labels at the
4592 beginning of a block, or jumps at the end, so the previous
4593 conditions would not stop us when we reach bb boundary. */
4594 || BLOCK_FOR_INSN (prev) != bb)
75473b02
SB
4595 break;
4596
4597 set = set_of (op0, prev);
4598
4599 if (set
4600 && (GET_CODE (set) != SET
4601 || !rtx_equal_p (SET_DEST (set), op0)))
4602 break;
4603
4604 /* If this is setting OP0, get what it sets it to if it looks
4605 relevant. */
4606 if (set)
4607 {
4608 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4609#ifdef FLOAT_STORE_FLAG_VALUE
4610 REAL_VALUE_TYPE fsfv;
4611#endif
4612
4613 /* ??? We may not combine comparisons done in a CCmode with
4614 comparisons not done in a CCmode. This is to aid targets
4615 like Alpha that have an IEEE compliant EQ instruction, and
4616 a non-IEEE compliant BEQ instruction. The use of CCmode is
4617 actually artificial, simply to prevent the combination, but
4618 should not affect other platforms.
4619
4620 However, we must allow VOIDmode comparisons to match either
4621 CCmode or non-CCmode comparison, because some ports have
4622 modeless comparisons inside branch patterns.
4623
4624 ??? This mode check should perhaps look more like the mode check
4625 in simplify_comparison in combine. */
4626
4627 if ((GET_CODE (SET_SRC (set)) == COMPARE
4628 || (((code == NE
4629 || (code == LT
4630 && GET_MODE_CLASS (inner_mode) == MODE_INT
4631 && (GET_MODE_BITSIZE (inner_mode)
4632 <= HOST_BITS_PER_WIDE_INT)
4633 && (STORE_FLAG_VALUE
4634 & ((HOST_WIDE_INT) 1
4635 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4636#ifdef FLOAT_STORE_FLAG_VALUE
4637 || (code == LT
3d8bf70f 4638 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4639 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4640 REAL_VALUE_NEGATIVE (fsfv)))
4641#endif
4642 ))
4643 && COMPARISON_P (SET_SRC (set))))
4644 && (((GET_MODE_CLASS (mode) == MODE_CC)
4645 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4646 || mode == VOIDmode || inner_mode == VOIDmode))
4647 x = SET_SRC (set);
4648 else if (((code == EQ
4649 || (code == GE
4650 && (GET_MODE_BITSIZE (inner_mode)
4651 <= HOST_BITS_PER_WIDE_INT)
4652 && GET_MODE_CLASS (inner_mode) == MODE_INT
4653 && (STORE_FLAG_VALUE
4654 & ((HOST_WIDE_INT) 1
4655 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4656#ifdef FLOAT_STORE_FLAG_VALUE
4657 || (code == GE
3d8bf70f 4658 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4659 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4660 REAL_VALUE_NEGATIVE (fsfv)))
4661#endif
4662 ))
4663 && COMPARISON_P (SET_SRC (set))
4664 && (((GET_MODE_CLASS (mode) == MODE_CC)
4665 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4666 || mode == VOIDmode || inner_mode == VOIDmode))
4667
4668 {
4669 reverse_code = 1;
4670 x = SET_SRC (set);
4671 }
4672 else
4673 break;
4674 }
4675
4676 else if (reg_set_p (op0, prev))
4677 /* If this sets OP0, but not directly, we have to give up. */
4678 break;
4679
4680 if (x)
4681 {
4682 /* If the caller is expecting the condition to be valid at INSN,
4683 make sure X doesn't change before INSN. */
4684 if (valid_at_insn_p)
4685 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4686 break;
4687 if (COMPARISON_P (x))
4688 code = GET_CODE (x);
4689 if (reverse_code)
4690 {
4691 code = reversed_comparison_code (x, prev);
4692 if (code == UNKNOWN)
4693 return 0;
4694 reverse_code = 0;
4695 }
4696
4697 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4698 if (earliest)
4699 *earliest = prev;
4700 }
4701 }
4702
4703 /* If constant is first, put it last. */
4704 if (CONSTANT_P (op0))
4705 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4706
4707 /* If OP0 is the result of a comparison, we weren't able to find what
4708 was really being compared, so fail. */
4709 if (!allow_cc_mode
4710 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4711 return 0;
4712
4713 /* Canonicalize any ordered comparison with integers involving equality
4714 if we can do computations in the relevant mode and we do not
4715 overflow. */
4716
4717 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4718 && GET_CODE (op1) == CONST_INT
4719 && GET_MODE (op0) != VOIDmode
4720 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4721 {
4722 HOST_WIDE_INT const_val = INTVAL (op1);
4723 unsigned HOST_WIDE_INT uconst_val = const_val;
4724 unsigned HOST_WIDE_INT max_val
4725 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4726
4727 switch (code)
4728 {
4729 case LE:
4730 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4731 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4732 break;
4733
4734 /* When cross-compiling, const_val might be sign-extended from
4735 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4736 case GE:
4737 if ((HOST_WIDE_INT) (const_val & max_val)
4738 != (((HOST_WIDE_INT) 1
4739 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4740 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4741 break;
4742
4743 case LEU:
4744 if (uconst_val < max_val)
4745 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4746 break;
4747
4748 case GEU:
4749 if (uconst_val != 0)
4750 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4751 break;
4752
4753 default:
4754 break;
4755 }
4756 }
4757
4758 /* Never return CC0; return zero instead. */
4759 if (CC0_P (op0))
4760 return 0;
4761
4762 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4763}
4764
4765/* Given a jump insn JUMP, return the condition that will cause it to branch
4766 to its JUMP_LABEL. If the condition cannot be understood, or is an
4767 inequality floating-point comparison which needs to be reversed, 0 will
4768 be returned.
4769
4770 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4771 insn used in locating the condition was found. If a replacement test
4772 of the condition is desired, it should be placed in front of that
4773 insn and we will be sure that the inputs are still valid. If EARLIEST
4774 is null, the returned condition will be valid at INSN.
4775
4776 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4777 compare CC mode register.
4778
4779 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4780
4781rtx
4782get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4783{
4784 rtx cond;
4785 int reverse;
4786 rtx set;
4787
4788 /* If this is not a standard conditional jump, we can't parse it. */
4789 if (!JUMP_P (jump)
4790 || ! any_condjump_p (jump))
4791 return 0;
4792 set = pc_set (jump);
4793
4794 cond = XEXP (SET_SRC (set), 0);
4795
4796 /* If this branches to JUMP_LABEL when the condition is false, reverse
4797 the condition. */
4798 reverse
4799 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4800 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4801
4802 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4803 allow_cc_mode, valid_at_insn_p);
4804}
4805
b12cbf2c
AN
4806/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4807 TARGET_MODE_REP_EXTENDED.
4808
4809 Note that we assume that the property of
4810 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4811 narrower than mode B. I.e., if A is a mode narrower than B then in
4812 order to be able to operate on it in mode B, mode A needs to
4813 satisfy the requirements set by the representation of mode B. */
4814
4815static void
4816init_num_sign_bit_copies_in_rep (void)
4817{
4818 enum machine_mode mode, in_mode;
4819
4820 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4821 in_mode = GET_MODE_WIDER_MODE (mode))
4822 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4823 mode = GET_MODE_WIDER_MODE (mode))
4824 {
4825 enum machine_mode i;
4826
4827 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4828 extends to the next widest mode. */
4829 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4830 || GET_MODE_WIDER_MODE (mode) == in_mode);
4831
4832 /* We are in in_mode. Count how many bits outside of mode
4833 have to be copies of the sign-bit. */
4834 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4835 {
4836 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4837
4838 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4839 /* We can only check sign-bit copies starting from the
4840 top-bit. In order to be able to check the bits we
4841 have already seen we pretend that subsequent bits
4842 have to be sign-bit copies too. */
4843 || num_sign_bit_copies_in_rep [in_mode][mode])
4844 num_sign_bit_copies_in_rep [in_mode][mode]
4845 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4846 }
4847 }
4848}
4849
d3b72690
PB
4850/* Suppose that truncation from the machine mode of X to MODE is not a
4851 no-op. See if there is anything special about X so that we can
4852 assume it already contains a truncated value of MODE. */
4853
4854bool
4855truncated_to_mode (enum machine_mode mode, rtx x)
4856{
b12cbf2c
AN
4857 /* This register has already been used in MODE without explicit
4858 truncation. */
4859 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4860 return true;
4861
4862 /* See if we already satisfy the requirements of MODE. If yes we
4863 can just switch to MODE. */
4864 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4865 && (num_sign_bit_copies (x, GET_MODE (x))
4866 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4867 return true;
d3b72690 4868
b12cbf2c
AN
4869 return false;
4870}
cf94b0fc
PB
4871\f
4872/* Initialize non_rtx_starting_operands, which is used to speed up
4873 for_each_rtx. */
4874void
4875init_rtlanal (void)
4876{
4877 int i;
4878 for (i = 0; i < NUM_RTX_CODE; i++)
4879 {
4880 const char *format = GET_RTX_FORMAT (i);
4881 const char *first = strpbrk (format, "eEV");
4882 non_rtx_starting_operands[i] = first ? first - format : -1;
4883 }
b12cbf2c
AN
4884
4885 init_num_sign_bit_copies_in_rep ();
cf94b0fc 4886}
3d8504ac
RS
4887\f
4888/* Check whether this is a constant pool constant. */
4889bool
4890constant_pool_constant_p (rtx x)
4891{
4892 x = avoid_constant_pool_reference (x);
4893 return GET_CODE (x) == CONST_DOUBLE;
4894}
4895