]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
c-lex.c (c_lex_with_flags, lex_string): Constify.
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
af082de3 1/* Analyze RTL for GNU compiler.
af841dbd 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
a446b4e8 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software
af082de3 4 Foundation, Inc.
2c88418c 5
1322177d 6This file is part of GCC.
2c88418c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
2c88418c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
2c88418c
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
2c88418c
RS
22
23
24#include "config.h"
670ee920 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
e35b9579 28#include "toplev.h"
2c88418c 29#include "rtl.h"
3335f1d9 30#include "hard-reg-set.h"
bc204393
RH
31#include "insn-config.h"
32#include "recog.h"
f894b69b
PB
33#include "target.h"
34#include "output.h"
91ea4f8d 35#include "tm_p.h"
f5eb5fd0 36#include "flags.h"
52bfebf0 37#include "real.h"
66fd46b6 38#include "regs.h"
2f93eea8 39#include "function.h"
6fb5fa3c 40#include "df.h"
7ffb5e78 41#include "tree.h"
2c88418c 42
f1f4e530
JM
43/* Information about a subreg of a hard register. */
44struct subreg_info
45{
46 /* Offset of first hard register involved in the subreg. */
47 int offset;
48 /* Number of hard registers involved in the subreg. */
49 int nregs;
50 /* Whether this subreg can be represented as a hard reg with the new
51 mode. */
52 bool representable_p;
53};
54
e2373f95 55/* Forward declarations */
0c20a65f 56static void set_of_1 (rtx, rtx, void *);
194acded
HPN
57static bool covers_regno_p (rtx, unsigned int);
58static bool covers_regno_no_parallel_p (rtx, unsigned int);
0c20a65f
AJ
59static int rtx_referenced_p_1 (rtx *, void *);
60static int computed_jump_p_1 (rtx);
61static void parms_set (rtx, rtx, void *);
f1f4e530
JM
62static void subreg_get_info (unsigned int, enum machine_mode,
63 unsigned int, enum machine_mode,
64 struct subreg_info *);
2a1777af 65
2f93eea8
PB
66static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
67 rtx, enum machine_mode,
68 unsigned HOST_WIDE_INT);
69static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
70 enum machine_mode,
71 unsigned HOST_WIDE_INT);
72static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
73 enum machine_mode,
74 unsigned int);
75static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
76 enum machine_mode, unsigned int);
77
cf94b0fc
PB
78/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
79 -1 if a code has no such operand. */
80static int non_rtx_starting_operands[NUM_RTX_CODE];
81
2c88418c
RS
82/* Bit flags that specify the machine subtype we are compiling for.
83 Bits are tested using macros TARGET_... defined in the tm.h file
84 and set by `-m...' switches. Must be defined in rtlanal.c. */
85
86int target_flags;
b12cbf2c
AN
87
88/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
89 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
90 SIGN_EXTEND then while narrowing we also have to enforce the
91 representation and sign-extend the value to mode DESTINATION_REP.
92
93 If the value is already sign-extended to DESTINATION_REP mode we
94 can just switch to DESTINATION mode on it. For each pair of
95 integral modes SOURCE and DESTINATION, when truncating from SOURCE
96 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
97 contains the number of high-order bits in SOURCE that have to be
98 copies of the sign-bit so that we can do this mode-switch to
99 DESTINATION. */
100
101static unsigned int
102num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
2c88418c
RS
103\f
104/* Return 1 if the value of X is unstable
105 (would be different at a different point in the program).
106 The frame pointer, arg pointer, etc. are considered stable
107 (within one function) and so is anything marked `unchanging'. */
108
109int
0c20a65f 110rtx_unstable_p (rtx x)
2c88418c 111{
b3694847
SS
112 RTX_CODE code = GET_CODE (x);
113 int i;
114 const char *fmt;
2c88418c 115
ae0fb1b9
JW
116 switch (code)
117 {
118 case MEM:
389fdba0 119 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 120
ae0fb1b9
JW
121 case CONST:
122 case CONST_INT:
123 case CONST_DOUBLE:
69ef87e2 124 case CONST_VECTOR:
ae0fb1b9
JW
125 case SYMBOL_REF:
126 case LABEL_REF:
127 return 0;
2c88418c 128
ae0fb1b9
JW
129 case REG:
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9 132 /* The arg pointer varies if it is not a fixed register. */
389fdba0 133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b
RH
134 return 0;
135#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
140 return 0;
141#endif
142 return 1;
ae0fb1b9
JW
143
144 case ASM_OPERANDS:
145 if (MEM_VOLATILE_P (x))
146 return 1;
147
5d3cc252 148 /* Fall through. */
ae0fb1b9
JW
149
150 default:
151 break;
152 }
2c88418c
RS
153
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
156 if (fmt[i] == 'e')
9c82ac6b
JW
157 {
158 if (rtx_unstable_p (XEXP (x, i)))
159 return 1;
160 }
161 else if (fmt[i] == 'E')
162 {
163 int j;
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
166 return 1;
167 }
168
2c88418c
RS
169 return 0;
170}
171
172/* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
e38fe8e0
BS
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
2c88418c
RS
177 The frame pointer and the arg pointer are considered constant. */
178
179int
0c20a65f 180rtx_varies_p (rtx x, int for_alias)
2c88418c 181{
e978d62e 182 RTX_CODE code;
b3694847
SS
183 int i;
184 const char *fmt;
2c88418c 185
e978d62e
PB
186 if (!x)
187 return 0;
188
189 code = GET_CODE (x);
2c88418c
RS
190 switch (code)
191 {
192 case MEM:
389fdba0 193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 194
2c88418c
RS
195 case CONST:
196 case CONST_INT:
197 case CONST_DOUBLE:
69ef87e2 198 case CONST_VECTOR:
2c88418c
RS
199 case SYMBOL_REF:
200 case LABEL_REF:
201 return 0;
202
203 case REG:
204 /* Note that we have to test for the actual rtx used for the frame
205 and arg pointers and not just the register number in case we have
206 eliminated the frame and/or arg pointer and are using it
207 for pseudos. */
c0fc376b 208 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
209 /* The arg pointer varies if it is not a fixed register. */
210 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 211 return 0;
e38fe8e0
BS
212 if (x == pic_offset_table_rtx
213#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
214 /* ??? When call-clobbered, the value is stable modulo the restore
215 that must happen after a call. This currently screws up
216 local-alloc into believing that the restore is not needed, so we
217 must return 0 only if we are called from alias analysis. */
218 && for_alias
c0fc376b 219#endif
e38fe8e0
BS
220 )
221 return 0;
c0fc376b 222 return 1;
2c88418c
RS
223
224 case LO_SUM:
225 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
226 (in fact it is related specifically to operand 1)
227 during alias analysis. */
228 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
229 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 230
ae0fb1b9
JW
231 case ASM_OPERANDS:
232 if (MEM_VOLATILE_P (x))
233 return 1;
234
5d3cc252 235 /* Fall through. */
ae0fb1b9 236
e9a25f70
JL
237 default:
238 break;
2c88418c
RS
239 }
240
241 fmt = GET_RTX_FORMAT (code);
242 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
243 if (fmt[i] == 'e')
9c82ac6b 244 {
e38fe8e0 245 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
246 return 1;
247 }
248 else if (fmt[i] == 'E')
249 {
250 int j;
251 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 252 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
253 return 1;
254 }
255
2c88418c
RS
256 return 0;
257}
258
2358ff91
EB
259/* Return nonzero if the use of X as an address in a MEM can cause a trap.
260 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
261 whether nonzero is returned for unaligned memory accesses on strict
262 alignment machines. */
2c88418c 263
2358ff91
EB
264static int
265rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
2c88418c 266{
b3694847 267 enum rtx_code code = GET_CODE (x);
2c88418c
RS
268
269 switch (code)
270 {
271 case SYMBOL_REF:
ff0b6b99
FS
272 return SYMBOL_REF_WEAK (x);
273
2c88418c 274 case LABEL_REF:
2c88418c
RS
275 return 0;
276
277 case REG:
278 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
4f73495e
RH
279 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
280 || x == stack_pointer_rtx
281 /* The arg pointer varies if it is not a fixed register. */
282 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
283 return 0;
284 /* All of the virtual frame registers are stack references. */
285 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
286 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
287 return 0;
288 return 1;
2c88418c
RS
289
290 case CONST:
2358ff91 291 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
2c88418c
RS
292
293 case PLUS:
2358ff91
EB
294 /* An address is assumed not to trap if:
295 - it is an address that can't trap plus a constant integer,
296 with the proper remainder modulo the mode size if we are
297 considering unaligned memory references. */
298 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
299 && GET_CODE (XEXP (x, 1)) == CONST_INT)
300 {
301 HOST_WIDE_INT offset;
302
bb11103a
EB
303 if (!STRICT_ALIGNMENT
304 || !unaligned_mems
305 || GET_MODE_SIZE (mode) == 0)
2358ff91
EB
306 return 0;
307
308 offset = INTVAL (XEXP (x, 1));
309
310#ifdef SPARC_STACK_BOUNDARY_HACK
311 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
312 the real alignment of %sp. However, when it does this, the
313 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
314 if (SPARC_STACK_BOUNDARY_HACK
315 && (XEXP (x, 0) == stack_pointer_rtx
316 || XEXP (x, 0) == hard_frame_pointer_rtx))
317 offset -= STACK_POINTER_OFFSET;
318#endif
319
320 return offset % GET_MODE_SIZE (mode) != 0;
321 }
322
323 /* - or it is the pic register plus a constant. */
324 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
325 return 0;
326
327 return 1;
2c88418c
RS
328
329 case LO_SUM:
4f73495e 330 case PRE_MODIFY:
2358ff91 331 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
4f73495e
RH
332
333 case PRE_DEC:
334 case PRE_INC:
335 case POST_DEC:
336 case POST_INC:
337 case POST_MODIFY:
2358ff91 338 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
4f73495e 339
e9a25f70
JL
340 default:
341 break;
2c88418c
RS
342 }
343
344 /* If it isn't one of the case above, it can cause a trap. */
345 return 1;
346}
347
2358ff91
EB
348/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
349
350int
351rtx_addr_can_trap_p (rtx x)
352{
353 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
354}
355
4977bab6
ZW
356/* Return true if X is an address that is known to not be zero. */
357
358bool
0c20a65f 359nonzero_address_p (rtx x)
4977bab6
ZW
360{
361 enum rtx_code code = GET_CODE (x);
362
363 switch (code)
364 {
365 case SYMBOL_REF:
366 return !SYMBOL_REF_WEAK (x);
367
368 case LABEL_REF:
369 return true;
370
4977bab6
ZW
371 case REG:
372 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
373 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
374 || x == stack_pointer_rtx
375 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
376 return true;
377 /* All of the virtual frame registers are stack references. */
378 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
379 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
380 return true;
381 return false;
382
383 case CONST:
384 return nonzero_address_p (XEXP (x, 0));
385
386 case PLUS:
387 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
942d7821 388 return nonzero_address_p (XEXP (x, 0));
4977bab6
ZW
389 /* Handle PIC references. */
390 else if (XEXP (x, 0) == pic_offset_table_rtx
391 && CONSTANT_P (XEXP (x, 1)))
392 return true;
393 return false;
394
395 case PRE_MODIFY:
396 /* Similar to the above; allow positive offsets. Further, since
397 auto-inc is only allowed in memories, the register must be a
398 pointer. */
399 if (GET_CODE (XEXP (x, 1)) == CONST_INT
400 && INTVAL (XEXP (x, 1)) > 0)
401 return true;
402 return nonzero_address_p (XEXP (x, 0));
403
404 case PRE_INC:
405 /* Similarly. Further, the offset is always positive. */
406 return true;
407
408 case PRE_DEC:
409 case POST_DEC:
410 case POST_INC:
411 case POST_MODIFY:
412 return nonzero_address_p (XEXP (x, 0));
413
414 case LO_SUM:
415 return nonzero_address_p (XEXP (x, 1));
416
417 default:
418 break;
419 }
420
421 /* If it isn't one of the case above, might be zero. */
422 return false;
423}
424
a6a2274a 425/* Return 1 if X refers to a memory location whose address
2c88418c 426 cannot be compared reliably with constant addresses,
a6a2274a 427 or if X refers to a BLKmode memory object.
e38fe8e0
BS
428 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
429 zero, we are slightly more conservative. */
2c88418c
RS
430
431int
0c20a65f 432rtx_addr_varies_p (rtx x, int for_alias)
2c88418c 433{
b3694847
SS
434 enum rtx_code code;
435 int i;
436 const char *fmt;
2c88418c
RS
437
438 if (x == 0)
439 return 0;
440
441 code = GET_CODE (x);
442 if (code == MEM)
e38fe8e0 443 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
444
445 fmt = GET_RTX_FORMAT (code);
446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
447 if (fmt[i] == 'e')
833c0b26 448 {
e38fe8e0 449 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
450 return 1;
451 }
452 else if (fmt[i] == 'E')
453 {
454 int j;
455 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 456 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
457 return 1;
458 }
2c88418c
RS
459 return 0;
460}
461\f
462/* Return the value of the integer term in X, if one is apparent;
463 otherwise return 0.
464 Only obvious integer terms are detected.
3ef42a0c 465 This is used in cse.c with the `related_value' field. */
2c88418c 466
c166a311 467HOST_WIDE_INT
0c20a65f 468get_integer_term (rtx x)
2c88418c
RS
469{
470 if (GET_CODE (x) == CONST)
471 x = XEXP (x, 0);
472
473 if (GET_CODE (x) == MINUS
474 && GET_CODE (XEXP (x, 1)) == CONST_INT)
475 return - INTVAL (XEXP (x, 1));
476 if (GET_CODE (x) == PLUS
477 && GET_CODE (XEXP (x, 1)) == CONST_INT)
478 return INTVAL (XEXP (x, 1));
479 return 0;
480}
481
482/* If X is a constant, return the value sans apparent integer term;
483 otherwise return 0.
484 Only obvious integer terms are detected. */
485
486rtx
0c20a65f 487get_related_value (rtx x)
2c88418c
RS
488{
489 if (GET_CODE (x) != CONST)
490 return 0;
491 x = XEXP (x, 0);
492 if (GET_CODE (x) == PLUS
493 && GET_CODE (XEXP (x, 1)) == CONST_INT)
494 return XEXP (x, 0);
495 else if (GET_CODE (x) == MINUS
496 && GET_CODE (XEXP (x, 1)) == CONST_INT)
497 return XEXP (x, 0);
498 return 0;
499}
500\f
7ffb5e78
RS
501/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
502 to somewhere in the same object or object_block as SYMBOL. */
503
504bool
505offset_within_block_p (rtx symbol, HOST_WIDE_INT offset)
506{
507 tree decl;
508
509 if (GET_CODE (symbol) != SYMBOL_REF)
510 return false;
511
512 if (offset == 0)
513 return true;
514
515 if (offset > 0)
516 {
517 if (CONSTANT_POOL_ADDRESS_P (symbol)
518 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
519 return true;
520
521 decl = SYMBOL_REF_DECL (symbol);
522 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
523 return true;
524 }
525
526 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
527 && SYMBOL_REF_BLOCK (symbol)
528 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
529 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
530 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
531 return true;
532
533 return false;
534}
535
536/* Split X into a base and a constant offset, storing them in *BASE_OUT
537 and *OFFSET_OUT respectively. */
538
539void
540split_const (rtx x, rtx *base_out, rtx *offset_out)
541{
542 if (GET_CODE (x) == CONST)
543 {
544 x = XEXP (x, 0);
545 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
546 {
547 *base_out = XEXP (x, 0);
548 *offset_out = XEXP (x, 1);
549 return;
550 }
551 }
552 *base_out = x;
553 *offset_out = const0_rtx;
554}
555\f
4b983fdc
RH
556/* Return the number of places FIND appears within X. If COUNT_DEST is
557 zero, we do not count occurrences inside the destination of a SET. */
558
559int
0c20a65f 560count_occurrences (rtx x, rtx find, int count_dest)
4b983fdc
RH
561{
562 int i, j;
563 enum rtx_code code;
564 const char *format_ptr;
565 int count;
566
567 if (x == find)
568 return 1;
569
570 code = GET_CODE (x);
571
572 switch (code)
573 {
574 case REG:
575 case CONST_INT:
576 case CONST_DOUBLE:
69ef87e2 577 case CONST_VECTOR:
4b983fdc
RH
578 case SYMBOL_REF:
579 case CODE_LABEL:
580 case PC:
581 case CC0:
582 return 0;
583
2372a062
BS
584 case EXPR_LIST:
585 count = count_occurrences (XEXP (x, 0), find, count_dest);
586 if (XEXP (x, 1))
587 count += count_occurrences (XEXP (x, 1), find, count_dest);
588 return count;
589
4b983fdc 590 case MEM:
3c0cb5de 591 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
592 return 1;
593 break;
594
595 case SET:
596 if (SET_DEST (x) == find && ! count_dest)
597 return count_occurrences (SET_SRC (x), find, count_dest);
598 break;
599
600 default:
601 break;
602 }
603
604 format_ptr = GET_RTX_FORMAT (code);
605 count = 0;
606
607 for (i = 0; i < GET_RTX_LENGTH (code); i++)
608 {
609 switch (*format_ptr++)
610 {
611 case 'e':
612 count += count_occurrences (XEXP (x, i), find, count_dest);
613 break;
614
615 case 'E':
616 for (j = 0; j < XVECLEN (x, i); j++)
617 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
618 break;
619 }
620 }
621 return count;
622}
6fb5fa3c 623
4b983fdc 624\f
2c88418c
RS
625/* Nonzero if register REG appears somewhere within IN.
626 Also works if REG is not a register; in this case it checks
627 for a subexpression of IN that is Lisp "equal" to REG. */
628
629int
0c20a65f 630reg_mentioned_p (rtx reg, rtx in)
2c88418c 631{
b3694847
SS
632 const char *fmt;
633 int i;
634 enum rtx_code code;
2c88418c
RS
635
636 if (in == 0)
637 return 0;
638
639 if (reg == in)
640 return 1;
641
642 if (GET_CODE (in) == LABEL_REF)
643 return reg == XEXP (in, 0);
644
645 code = GET_CODE (in);
646
647 switch (code)
648 {
649 /* Compare registers by number. */
650 case REG:
f8cfc6aa 651 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
652
653 /* These codes have no constituent expressions
654 and are unique. */
655 case SCRATCH:
656 case CC0:
657 case PC:
658 return 0;
659
660 case CONST_INT:
69ef87e2 661 case CONST_VECTOR:
2c88418c
RS
662 case CONST_DOUBLE:
663 /* These are kept unique for a given value. */
664 return 0;
a6a2274a 665
e9a25f70
JL
666 default:
667 break;
2c88418c
RS
668 }
669
670 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
671 return 1;
672
673 fmt = GET_RTX_FORMAT (code);
674
675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
676 {
677 if (fmt[i] == 'E')
678 {
b3694847 679 int j;
2c88418c
RS
680 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
681 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
682 return 1;
683 }
684 else if (fmt[i] == 'e'
685 && reg_mentioned_p (reg, XEXP (in, i)))
686 return 1;
687 }
688 return 0;
689}
690\f
691/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
692 no CODE_LABEL insn. */
693
694int
0c20a65f 695no_labels_between_p (rtx beg, rtx end)
2c88418c 696{
b3694847 697 rtx p;
978f547f
JH
698 if (beg == end)
699 return 0;
2c88418c 700 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 701 if (LABEL_P (p))
2c88418c
RS
702 return 0;
703 return 1;
704}
705
706/* Nonzero if register REG is used in an insn between
707 FROM_INSN and TO_INSN (exclusive of those two). */
708
709int
0c20a65f 710reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 711{
b3694847 712 rtx insn;
2c88418c
RS
713
714 if (from_insn == to_insn)
715 return 0;
716
717 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 718 if (INSN_P (insn)
8f3e7a26 719 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
76dd5923 720 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
721 return 1;
722 return 0;
723}
724\f
725/* Nonzero if the old value of X, a register, is referenced in BODY. If X
726 is entirely replaced by a new value and the only use is as a SET_DEST,
727 we do not consider it a reference. */
728
729int
0c20a65f 730reg_referenced_p (rtx x, rtx body)
2c88418c
RS
731{
732 int i;
733
734 switch (GET_CODE (body))
735 {
736 case SET:
737 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
738 return 1;
739
740 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
741 of a REG that occupies all of the REG, the insn references X if
742 it is mentioned in the destination. */
743 if (GET_CODE (SET_DEST (body)) != CC0
744 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 745 && !REG_P (SET_DEST (body))
2c88418c 746 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 747 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
748 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
749 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
750 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
751 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
752 && reg_overlap_mentioned_p (x, SET_DEST (body)))
753 return 1;
e9a25f70 754 return 0;
2c88418c
RS
755
756 case ASM_OPERANDS:
757 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
758 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
759 return 1;
e9a25f70 760 return 0;
2c88418c
RS
761
762 case CALL:
763 case USE:
14a774a9 764 case IF_THEN_ELSE:
2c88418c
RS
765 return reg_overlap_mentioned_p (x, body);
766
767 case TRAP_IF:
768 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
769
21b8482a
JJ
770 case PREFETCH:
771 return reg_overlap_mentioned_p (x, XEXP (body, 0));
772
2ac4fed0
RK
773 case UNSPEC:
774 case UNSPEC_VOLATILE:
2f9fb4c2
R
775 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
776 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
777 return 1;
778 return 0;
779
2c88418c
RS
780 case PARALLEL:
781 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
782 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
783 return 1;
e9a25f70 784 return 0;
a6a2274a 785
0d3ffb5a 786 case CLOBBER:
3c0cb5de 787 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
788 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
789 return 1;
790 return 0;
791
0c99ec5c
RH
792 case COND_EXEC:
793 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
794 return 1;
795 return reg_referenced_p (x, COND_EXEC_CODE (body));
796
e9a25f70
JL
797 default:
798 return 0;
2c88418c 799 }
2c88418c 800}
2c88418c
RS
801\f
802/* Nonzero if register REG is set or clobbered in an insn between
803 FROM_INSN and TO_INSN (exclusive of those two). */
804
805int
0c20a65f 806reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
2c88418c 807{
b3694847 808 rtx insn;
2c88418c
RS
809
810 if (from_insn == to_insn)
811 return 0;
812
813 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 814 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
815 return 1;
816 return 0;
817}
818
819/* Internals of reg_set_between_p. */
2c88418c 820int
0c20a65f 821reg_set_p (rtx reg, rtx insn)
2c88418c 822{
2c88418c
RS
823 /* We can be passed an insn or part of one. If we are passed an insn,
824 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
825 if (INSN_P (insn)
826 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 827 || (CALL_P (insn)
f8cfc6aa 828 && ((REG_P (reg)
4f1605d2 829 && REGNO (reg) < FIRST_PSEUDO_REGISTER
5da20cfe
RS
830 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
831 GET_MODE (reg), REGNO (reg)))
3c0cb5de 832 || MEM_P (reg)
4977bab6
ZW
833 || find_reg_fusage (insn, CLOBBER, reg)))))
834 return 1;
2c88418c 835
91b2d119 836 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
837}
838
839/* Similar to reg_set_between_p, but check all registers in X. Return 0
840 only if none of them are modified between START and END. Return 1 if
7b52eede 841 X contains a MEM; this routine does usememory aliasing. */
2c88418c
RS
842
843int
0c20a65f 844modified_between_p (rtx x, rtx start, rtx end)
2c88418c
RS
845{
846 enum rtx_code code = GET_CODE (x);
6f7d635c 847 const char *fmt;
f8163c92 848 int i, j;
7b52eede
JH
849 rtx insn;
850
851 if (start == end)
852 return 0;
2c88418c
RS
853
854 switch (code)
855 {
856 case CONST_INT:
857 case CONST_DOUBLE:
69ef87e2 858 case CONST_VECTOR:
2c88418c
RS
859 case CONST:
860 case SYMBOL_REF:
861 case LABEL_REF:
862 return 0;
863
864 case PC:
865 case CC0:
866 return 1;
867
868 case MEM:
7b52eede 869 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 870 return 1;
550b7784
KK
871 if (MEM_READONLY_P (x))
872 return 0;
7b52eede
JH
873 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
874 if (memory_modified_in_insn_p (x, insn))
875 return 1;
876 return 0;
2c88418c
RS
877 break;
878
879 case REG:
880 return reg_set_between_p (x, start, end);
a6a2274a 881
e9a25f70
JL
882 default:
883 break;
2c88418c
RS
884 }
885
886 fmt = GET_RTX_FORMAT (code);
887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
888 {
889 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
890 return 1;
891
d4757e6a 892 else if (fmt[i] == 'E')
f8163c92
RK
893 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
894 if (modified_between_p (XVECEXP (x, i, j), start, end))
895 return 1;
896 }
897
898 return 0;
899}
900
901/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
902 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 903 does use memory aliasing. */
f8163c92
RK
904
905int
0c20a65f 906modified_in_p (rtx x, rtx insn)
f8163c92
RK
907{
908 enum rtx_code code = GET_CODE (x);
6f7d635c 909 const char *fmt;
f8163c92
RK
910 int i, j;
911
912 switch (code)
913 {
914 case CONST_INT:
915 case CONST_DOUBLE:
69ef87e2 916 case CONST_VECTOR:
f8163c92
RK
917 case CONST:
918 case SYMBOL_REF:
919 case LABEL_REF:
920 return 0;
921
922 case PC:
923 case CC0:
2c88418c
RS
924 return 1;
925
f8163c92 926 case MEM:
7b52eede 927 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 928 return 1;
550b7784
KK
929 if (MEM_READONLY_P (x))
930 return 0;
7b52eede
JH
931 if (memory_modified_in_insn_p (x, insn))
932 return 1;
933 return 0;
f8163c92
RK
934 break;
935
936 case REG:
937 return reg_set_p (x, insn);
e9a25f70
JL
938
939 default:
940 break;
f8163c92
RK
941 }
942
943 fmt = GET_RTX_FORMAT (code);
944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
945 {
946 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
947 return 1;
948
d4757e6a 949 else if (fmt[i] == 'E')
f8163c92
RK
950 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
951 if (modified_in_p (XVECEXP (x, i, j), insn))
952 return 1;
953 }
954
2c88418c
RS
955 return 0;
956}
957\f
91b2d119
JH
958/* Helper function for set_of. */
959struct set_of_data
960 {
961 rtx found;
962 rtx pat;
963 };
964
965static void
0c20a65f 966set_of_1 (rtx x, rtx pat, void *data1)
91b2d119
JH
967{
968 struct set_of_data *data = (struct set_of_data *) (data1);
969 if (rtx_equal_p (x, data->pat)
3c0cb5de 970 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
91b2d119
JH
971 data->found = pat;
972}
973
974/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 975 (either directly or via STRICT_LOW_PART and similar modifiers). */
91b2d119 976rtx
0c20a65f 977set_of (rtx pat, rtx insn)
91b2d119
JH
978{
979 struct set_of_data data;
980 data.found = NULL_RTX;
981 data.pat = pat;
982 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
983 return data.found;
984}
985\f
2c88418c
RS
986/* Given an INSN, return a SET expression if this insn has only a single SET.
987 It may also have CLOBBERs, USEs, or SET whose output
988 will not be used, which we ignore. */
989
990rtx
0c20a65f 991single_set_2 (rtx insn, rtx pat)
2c88418c 992{
c9b89a21
JH
993 rtx set = NULL;
994 int set_verified = 1;
2c88418c 995 int i;
c9b89a21 996
b1cdafbb 997 if (GET_CODE (pat) == PARALLEL)
2c88418c 998 {
c9b89a21 999 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 1000 {
c9b89a21
JH
1001 rtx sub = XVECEXP (pat, 0, i);
1002 switch (GET_CODE (sub))
1003 {
1004 case USE:
1005 case CLOBBER:
1006 break;
1007
1008 case SET:
1009 /* We can consider insns having multiple sets, where all
1010 but one are dead as single set insns. In common case
1011 only single set is present in the pattern so we want
f63d1bf7 1012 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
1013
1014 When we reach set first time, we just expect this is
1015 the single set we are looking for and only when more
1016 sets are found in the insn, we check them. */
1017 if (!set_verified)
1018 {
1019 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1020 && !side_effects_p (set))
1021 set = NULL;
1022 else
1023 set_verified = 1;
1024 }
1025 if (!set)
1026 set = sub, set_verified = 0;
1027 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1028 || side_effects_p (sub))
1029 return NULL_RTX;
1030 break;
1031
1032 default:
1033 return NULL_RTX;
1034 }
787ccee0 1035 }
2c88418c 1036 }
c9b89a21 1037 return set;
2c88418c 1038}
941c63ac
JL
1039
1040/* Given an INSN, return nonzero if it has more than one SET, else return
1041 zero. */
1042
5f7d3786 1043int
0c20a65f 1044multiple_sets (rtx insn)
941c63ac 1045{
cae8acdd 1046 int found;
941c63ac 1047 int i;
a6a2274a 1048
941c63ac 1049 /* INSN must be an insn. */
2c3c49de 1050 if (! INSN_P (insn))
941c63ac
JL
1051 return 0;
1052
1053 /* Only a PARALLEL can have multiple SETs. */
1054 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1055 {
1056 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1057 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1058 {
1059 /* If we have already found a SET, then return now. */
1060 if (found)
1061 return 1;
1062 else
1063 found = 1;
1064 }
1065 }
a6a2274a 1066
941c63ac
JL
1067 /* Either zero or one SET. */
1068 return 0;
1069}
2c88418c 1070\f
7142e318
JW
1071/* Return nonzero if the destination of SET equals the source
1072 and there are no side effects. */
1073
1074int
0c20a65f 1075set_noop_p (rtx set)
7142e318
JW
1076{
1077 rtx src = SET_SRC (set);
1078 rtx dst = SET_DEST (set);
1079
371b8fc0
JH
1080 if (dst == pc_rtx && src == pc_rtx)
1081 return 1;
1082
3c0cb5de 1083 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1084 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1085
46d096a3 1086 if (GET_CODE (dst) == ZERO_EXTRACT)
7142e318 1087 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1088 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1089 && !side_effects_p (src);
7142e318
JW
1090
1091 if (GET_CODE (dst) == STRICT_LOW_PART)
1092 dst = XEXP (dst, 0);
1093
1094 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1095 {
1096 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1097 return 0;
1098 src = SUBREG_REG (src);
1099 dst = SUBREG_REG (dst);
1100 }
1101
f8cfc6aa 1102 return (REG_P (src) && REG_P (dst)
7142e318
JW
1103 && REGNO (src) == REGNO (dst));
1104}
0005550b
JH
1105\f
1106/* Return nonzero if an insn consists only of SETs, each of which only sets a
1107 value to itself. */
1108
1109int
0c20a65f 1110noop_move_p (rtx insn)
0005550b
JH
1111{
1112 rtx pat = PATTERN (insn);
1113
b5832b43
JH
1114 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1115 return 1;
1116
0005550b
JH
1117 /* Insns carrying these notes are useful later on. */
1118 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1119 return 0;
1120
eb9d8e4d
JW
1121 /* For now treat an insn with a REG_RETVAL note as a
1122 a special insn which should not be considered a no-op. */
1123 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1124 return 0;
1125
0005550b
JH
1126 if (GET_CODE (pat) == SET && set_noop_p (pat))
1127 return 1;
1128
1129 if (GET_CODE (pat) == PARALLEL)
1130 {
1131 int i;
1132 /* If nothing but SETs of registers to themselves,
1133 this insn can also be deleted. */
1134 for (i = 0; i < XVECLEN (pat, 0); i++)
1135 {
1136 rtx tem = XVECEXP (pat, 0, i);
1137
1138 if (GET_CODE (tem) == USE
1139 || GET_CODE (tem) == CLOBBER)
1140 continue;
1141
1142 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1143 return 0;
1144 }
1145
1146 return 1;
1147 }
1148 return 0;
1149}
1150\f
7142e318 1151
63be01fb
JW
1152/* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1153 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1154 If the object was modified, if we hit a partial assignment to X, or hit a
1155 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1156 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1157 be the src. */
2c88418c
RS
1158
1159rtx
0c20a65f 1160find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
2c88418c
RS
1161{
1162 rtx p;
1163
4b4bf941 1164 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
2c88418c 1165 p = PREV_INSN (p))
2c3c49de 1166 if (INSN_P (p))
2c88418c
RS
1167 {
1168 rtx set = single_set (p);
c166a311 1169 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
2c88418c
RS
1170
1171 if (set && rtx_equal_p (x, SET_DEST (set)))
1172 {
1173 rtx src = SET_SRC (set);
1174
1175 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1176 src = XEXP (note, 0);
1177
63be01fb
JW
1178 if ((valid_to == NULL_RTX
1179 || ! modified_between_p (src, PREV_INSN (p), valid_to))
2c88418c
RS
1180 /* Reject hard registers because we don't usually want
1181 to use them; we'd rather use a pseudo. */
f8cfc6aa 1182 && (! (REG_P (src)
89d3d442 1183 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
2c88418c
RS
1184 {
1185 *pinsn = p;
1186 return src;
1187 }
1188 }
a6a2274a 1189
2c88418c
RS
1190 /* If set in non-simple way, we don't have a value. */
1191 if (reg_set_p (x, p))
1192 break;
1193 }
1194
1195 return x;
a6a2274a 1196}
2c88418c
RS
1197\f
1198/* Return nonzero if register in range [REGNO, ENDREGNO)
1199 appears either explicitly or implicitly in X
1200 other than being stored into.
1201
1202 References contained within the substructure at LOC do not count.
1203 LOC may be zero, meaning don't ignore anything. */
1204
1205int
0c20a65f
AJ
1206refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1207 rtx *loc)
2c88418c 1208{
770ae6cc
RK
1209 int i;
1210 unsigned int x_regno;
1211 RTX_CODE code;
1212 const char *fmt;
2c88418c
RS
1213
1214 repeat:
1215 /* The contents of a REG_NONNEG note is always zero, so we must come here
1216 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1217 if (x == 0)
1218 return 0;
1219
1220 code = GET_CODE (x);
1221
1222 switch (code)
1223 {
1224 case REG:
770ae6cc 1225 x_regno = REGNO (x);
f8163c92
RK
1226
1227 /* If we modifying the stack, frame, or argument pointer, it will
1228 clobber a virtual register. In fact, we could be more precise,
1229 but it isn't worth it. */
770ae6cc 1230 if ((x_regno == STACK_POINTER_REGNUM
f8163c92 1231#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
770ae6cc 1232 || x_regno == ARG_POINTER_REGNUM
f8163c92 1233#endif
770ae6cc 1234 || x_regno == FRAME_POINTER_REGNUM)
f8163c92
RK
1235 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1236 return 1;
1237
09e18274 1238 return endregno > x_regno && regno < END_REGNO (x);
2c88418c
RS
1239
1240 case SUBREG:
1241 /* If this is a SUBREG of a hard reg, we can see exactly which
1242 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1243 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1244 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1245 {
ddef6bc7 1246 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1247 unsigned int inner_endregno
403c659c 1248 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
f1f4e530 1249 ? subreg_nregs (x) : 1);
2c88418c
RS
1250
1251 return endregno > inner_regno && regno < inner_endregno;
1252 }
1253 break;
1254
1255 case CLOBBER:
1256 case SET:
1257 if (&SET_DEST (x) != loc
1258 /* Note setting a SUBREG counts as referring to the REG it is in for
1259 a pseudo but not for hard registers since we can
1260 treat each word individually. */
1261 && ((GET_CODE (SET_DEST (x)) == SUBREG
1262 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1263 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1264 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1265 && refers_to_regno_p (regno, endregno,
1266 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1267 || (!REG_P (SET_DEST (x))
2c88418c
RS
1268 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1269 return 1;
1270
1271 if (code == CLOBBER || loc == &SET_SRC (x))
1272 return 0;
1273 x = SET_SRC (x);
1274 goto repeat;
e9a25f70
JL
1275
1276 default:
1277 break;
2c88418c
RS
1278 }
1279
1280 /* X does not match, so try its subexpressions. */
1281
1282 fmt = GET_RTX_FORMAT (code);
1283 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1284 {
1285 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1286 {
1287 if (i == 0)
1288 {
1289 x = XEXP (x, 0);
1290 goto repeat;
1291 }
1292 else
1293 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1294 return 1;
1295 }
1296 else if (fmt[i] == 'E')
1297 {
b3694847 1298 int j;
6a87d634 1299 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1300 if (loc != &XVECEXP (x, i, j)
1301 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1302 return 1;
1303 }
1304 }
1305 return 0;
1306}
1307
1308/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1309 we check if any register number in X conflicts with the relevant register
1310 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1311 contains a MEM (we don't bother checking for memory addresses that can't
1312 conflict because we expect this to be a rare case. */
1313
1314int
0c20a65f 1315reg_overlap_mentioned_p (rtx x, rtx in)
2c88418c 1316{
770ae6cc 1317 unsigned int regno, endregno;
2c88418c 1318
6f626d1b
PB
1319 /* If either argument is a constant, then modifying X can not
1320 affect IN. Here we look at IN, we can profitably combine
1321 CONSTANT_P (x) with the switch statement below. */
1322 if (CONSTANT_P (in))
b98b49ac 1323 return 0;
0c99ec5c 1324
6f626d1b 1325 recurse:
0c99ec5c 1326 switch (GET_CODE (x))
2c88418c 1327 {
6f626d1b
PB
1328 case STRICT_LOW_PART:
1329 case ZERO_EXTRACT:
1330 case SIGN_EXTRACT:
1331 /* Overly conservative. */
1332 x = XEXP (x, 0);
1333 goto recurse;
1334
0c99ec5c 1335 case SUBREG:
2c88418c
RS
1336 regno = REGNO (SUBREG_REG (x));
1337 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1338 regno = subreg_regno (x);
f1f4e530
JM
1339 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1340 ? subreg_nregs (x) : 1);
0c99ec5c 1341 goto do_reg;
2c88418c 1342
0c99ec5c
RH
1343 case REG:
1344 regno = REGNO (x);
09e18274 1345 endregno = END_REGNO (x);
f1f4e530 1346 do_reg:
8e2e89f7 1347 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1348
0c99ec5c
RH
1349 case MEM:
1350 {
1351 const char *fmt;
1352 int i;
2c88418c 1353
3c0cb5de 1354 if (MEM_P (in))
2c88418c
RS
1355 return 1;
1356
0c99ec5c
RH
1357 fmt = GET_RTX_FORMAT (GET_CODE (in));
1358 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
3b009185
RH
1359 if (fmt[i] == 'e')
1360 {
1361 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1362 return 1;
1363 }
1364 else if (fmt[i] == 'E')
1365 {
1366 int j;
1367 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1368 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1369 return 1;
1370 }
c0222c21 1371
0c99ec5c
RH
1372 return 0;
1373 }
1374
1375 case SCRATCH:
1376 case PC:
1377 case CC0:
1378 return reg_mentioned_p (x, in);
1379
1380 case PARALLEL:
37ceff9d 1381 {
90d036a0 1382 int i;
37ceff9d
RH
1383
1384 /* If any register in here refers to it we return true. */
7193d1dc
RK
1385 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1386 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1387 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1388 return 1;
7193d1dc 1389 return 0;
37ceff9d 1390 }
2c88418c 1391
0c99ec5c 1392 default:
41374e13 1393 gcc_assert (CONSTANT_P (x));
6f626d1b
PB
1394 return 0;
1395 }
2c88418c
RS
1396}
1397\f
2c88418c 1398/* Call FUN on each register or MEM that is stored into or clobbered by X.
c3a1ef9d
MM
1399 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1400 ignored by note_stores, but passed to FUN.
1401
1402 FUN receives three arguments:
1403 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1404 2. the SET or CLOBBER rtx that does the store,
1405 3. the pointer DATA provided to note_stores.
2c88418c
RS
1406
1407 If the item being stored in or clobbered is a SUBREG of a hard register,
1408 the SUBREG will be passed. */
a6a2274a 1409
2c88418c 1410void
0c20a65f 1411note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
2c88418c 1412{
90d036a0
RK
1413 int i;
1414
0c99ec5c
RH
1415 if (GET_CODE (x) == COND_EXEC)
1416 x = COND_EXEC_CODE (x);
90d036a0 1417
0c99ec5c 1418 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2c88418c 1419 {
b3694847 1420 rtx dest = SET_DEST (x);
90d036a0 1421
2c88418c 1422 while ((GET_CODE (dest) == SUBREG
f8cfc6aa 1423 && (!REG_P (SUBREG_REG (dest))
2c88418c
RS
1424 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1425 || GET_CODE (dest) == ZERO_EXTRACT
2c88418c
RS
1426 || GET_CODE (dest) == STRICT_LOW_PART)
1427 dest = XEXP (dest, 0);
86465af7 1428
7193d1dc 1429 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
629111c7 1430 each of whose first operand is a register. */
7193d1dc
RK
1431 if (GET_CODE (dest) == PARALLEL)
1432 {
1433 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1434 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
629111c7 1435 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
7193d1dc 1436 }
86465af7 1437 else
84832317 1438 (*fun) (dest, x, data);
2c88418c 1439 }
770ae6cc 1440
90d036a0
RK
1441 else if (GET_CODE (x) == PARALLEL)
1442 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1443 note_stores (XVECEXP (x, 0, i), fun, data);
2c88418c
RS
1444}
1445\f
e2373f95
RK
1446/* Like notes_stores, but call FUN for each expression that is being
1447 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1448 FUN for each expression, not any interior subexpressions. FUN receives a
1449 pointer to the expression and the DATA passed to this function.
1450
1451 Note that this is not quite the same test as that done in reg_referenced_p
1452 since that considers something as being referenced if it is being
1453 partially set, while we do not. */
1454
1455void
0c20a65f 1456note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1457{
1458 rtx body = *pbody;
1459 int i;
1460
1461 switch (GET_CODE (body))
1462 {
1463 case COND_EXEC:
1464 (*fun) (&COND_EXEC_TEST (body), data);
1465 note_uses (&COND_EXEC_CODE (body), fun, data);
1466 return;
1467
1468 case PARALLEL:
1469 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1470 note_uses (&XVECEXP (body, 0, i), fun, data);
1471 return;
1472
bbbc206e
BS
1473 case SEQUENCE:
1474 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1475 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1476 return;
1477
e2373f95
RK
1478 case USE:
1479 (*fun) (&XEXP (body, 0), data);
1480 return;
1481
1482 case ASM_OPERANDS:
1483 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1484 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1485 return;
1486
1487 case TRAP_IF:
1488 (*fun) (&TRAP_CONDITION (body), data);
1489 return;
1490
21b8482a
JJ
1491 case PREFETCH:
1492 (*fun) (&XEXP (body, 0), data);
1493 return;
1494
e2373f95
RK
1495 case UNSPEC:
1496 case UNSPEC_VOLATILE:
1497 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1498 (*fun) (&XVECEXP (body, 0, i), data);
1499 return;
1500
1501 case CLOBBER:
3c0cb5de 1502 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1503 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1504 return;
1505
1506 case SET:
1507 {
1508 rtx dest = SET_DEST (body);
1509
1510 /* For sets we replace everything in source plus registers in memory
1511 expression in store and operands of a ZERO_EXTRACT. */
1512 (*fun) (&SET_SRC (body), data);
1513
1514 if (GET_CODE (dest) == ZERO_EXTRACT)
1515 {
1516 (*fun) (&XEXP (dest, 1), data);
1517 (*fun) (&XEXP (dest, 2), data);
1518 }
1519
1520 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1521 dest = XEXP (dest, 0);
1522
3c0cb5de 1523 if (MEM_P (dest))
e2373f95
RK
1524 (*fun) (&XEXP (dest, 0), data);
1525 }
1526 return;
1527
1528 default:
1529 /* All the other possibilities never store. */
1530 (*fun) (pbody, data);
1531 return;
1532 }
1533}
1534\f
2c88418c
RS
1535/* Return nonzero if X's old contents don't survive after INSN.
1536 This will be true if X is (cc0) or if X is a register and
1537 X dies in INSN or because INSN entirely sets X.
1538
46d096a3
SB
1539 "Entirely set" means set directly and not through a SUBREG, or
1540 ZERO_EXTRACT, so no trace of the old contents remains.
2c88418c
RS
1541 Likewise, REG_INC does not count.
1542
1543 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1544 but for this use that makes no difference, since regs don't overlap
1545 during their lifetimes. Therefore, this function may be used
6fb5fa3c 1546 at any time after deaths have been computed.
2c88418c
RS
1547
1548 If REG is a hard reg that occupies multiple machine registers, this
1549 function will only return 1 if each of those registers will be replaced
1550 by INSN. */
1551
1552int
0c20a65f 1553dead_or_set_p (rtx insn, rtx x)
2c88418c 1554{
09e18274 1555 unsigned int regno, end_regno;
770ae6cc 1556 unsigned int i;
2c88418c
RS
1557
1558 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1559 if (GET_CODE (x) == CC0)
1560 return 1;
1561
41374e13 1562 gcc_assert (REG_P (x));
2c88418c
RS
1563
1564 regno = REGNO (x);
09e18274
RS
1565 end_regno = END_REGNO (x);
1566 for (i = regno; i < end_regno; i++)
2c88418c
RS
1567 if (! dead_or_set_regno_p (insn, i))
1568 return 0;
1569
1570 return 1;
1571}
1572
194acded
HPN
1573/* Return TRUE iff DEST is a register or subreg of a register and
1574 doesn't change the number of words of the inner register, and any
1575 part of the register is TEST_REGNO. */
1576
1577static bool
1578covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1579{
1580 unsigned int regno, endregno;
1581
1582 if (GET_CODE (dest) == SUBREG
1583 && (((GET_MODE_SIZE (GET_MODE (dest))
1584 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1585 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1586 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1587 dest = SUBREG_REG (dest);
1588
1589 if (!REG_P (dest))
1590 return false;
1591
1592 regno = REGNO (dest);
09e18274 1593 endregno = END_REGNO (dest);
194acded
HPN
1594 return (test_regno >= regno && test_regno < endregno);
1595}
1596
1597/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1598 any member matches the covers_regno_no_parallel_p criteria. */
1599
1600static bool
1601covers_regno_p (rtx dest, unsigned int test_regno)
1602{
1603 if (GET_CODE (dest) == PARALLEL)
1604 {
1605 /* Some targets place small structures in registers for return
1606 values of functions, and those registers are wrapped in
1607 PARALLELs that we may see as the destination of a SET. */
1608 int i;
1609
1610 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1611 {
1612 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1613 if (inner != NULL_RTX
1614 && covers_regno_no_parallel_p (inner, test_regno))
1615 return true;
1616 }
1617
1618 return false;
1619 }
1620 else
1621 return covers_regno_no_parallel_p (dest, test_regno);
1622}
1623
6fb5fa3c 1624/* Utility function for dead_or_set_p to check an individual register. */
2c88418c
RS
1625
1626int
0c20a65f 1627dead_or_set_regno_p (rtx insn, unsigned int test_regno)
2c88418c 1628{
8c8de5fc 1629 rtx pattern;
2c88418c 1630
0a2287bf
RH
1631 /* See if there is a death note for something that includes TEST_REGNO. */
1632 if (find_regno_note (insn, REG_DEAD, test_regno))
1633 return 1;
2c88418c 1634
4b4bf941 1635 if (CALL_P (insn)
8f3e7a26
RK
1636 && find_regno_fusage (insn, CLOBBER, test_regno))
1637 return 1;
1638
0c99ec5c
RH
1639 pattern = PATTERN (insn);
1640
1641 if (GET_CODE (pattern) == COND_EXEC)
1642 pattern = COND_EXEC_CODE (pattern);
1643
1644 if (GET_CODE (pattern) == SET)
194acded 1645 return covers_regno_p (SET_DEST (pattern), test_regno);
0c99ec5c 1646 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 1647 {
b3694847 1648 int i;
2c88418c 1649
0c99ec5c 1650 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 1651 {
0c99ec5c
RH
1652 rtx body = XVECEXP (pattern, 0, i);
1653
1654 if (GET_CODE (body) == COND_EXEC)
1655 body = COND_EXEC_CODE (body);
2c88418c 1656
194acded
HPN
1657 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1658 && covers_regno_p (SET_DEST (body), test_regno))
1659 return 1;
2c88418c
RS
1660 }
1661 }
1662
1663 return 0;
1664}
1665
1666/* Return the reg-note of kind KIND in insn INSN, if there is one.
1667 If DATUM is nonzero, look for one whose datum is DATUM. */
1668
1669rtx
0c20a65f 1670find_reg_note (rtx insn, enum reg_note kind, rtx datum)
2c88418c 1671{
b3694847 1672 rtx link;
2c88418c 1673
af082de3
BE
1674 gcc_assert (insn);
1675
ae78d276 1676 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1677 if (! INSN_P (insn))
ae78d276 1678 return 0;
cd798543
AP
1679 if (datum == 0)
1680 {
1681 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1682 if (REG_NOTE_KIND (link) == kind)
1683 return link;
1684 return 0;
1685 }
ae78d276 1686
2c88418c 1687 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 1688 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
1689 return link;
1690 return 0;
1691}
1692
1693/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
1694 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1695 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1696 it might be the case that the note overlaps REGNO. */
2c88418c
RS
1697
1698rtx
0c20a65f 1699find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 1700{
b3694847 1701 rtx link;
2c88418c 1702
ae78d276 1703 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1704 if (! INSN_P (insn))
ae78d276
MM
1705 return 0;
1706
2c88418c
RS
1707 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1708 if (REG_NOTE_KIND (link) == kind
1709 /* Verify that it is a register, so that scratch and MEM won't cause a
1710 problem here. */
f8cfc6aa 1711 && REG_P (XEXP (link, 0))
99309f3b 1712 && REGNO (XEXP (link, 0)) <= regno
09e18274 1713 && END_REGNO (XEXP (link, 0)) > regno)
2c88418c
RS
1714 return link;
1715 return 0;
1716}
8f3e7a26 1717
d9c695ff
RK
1718/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1719 has such a note. */
1720
1721rtx
0c20a65f 1722find_reg_equal_equiv_note (rtx insn)
d9c695ff 1723{
cd648cec 1724 rtx link;
d9c695ff 1725
cd648cec 1726 if (!INSN_P (insn))
d9c695ff 1727 return 0;
ea8f106d 1728
cd648cec
JH
1729 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1730 if (REG_NOTE_KIND (link) == REG_EQUAL
1731 || REG_NOTE_KIND (link) == REG_EQUIV)
1732 {
ea8f106d
SB
1733 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1734 insns that have multiple sets. Checking single_set to
1735 make sure of this is not the proper check, as explained
1736 in the comment in set_unique_reg_note.
1737
1738 This should be changed into an assert. */
1739 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
cd648cec
JH
1740 return 0;
1741 return link;
1742 }
1743 return NULL;
d9c695ff
RK
1744}
1745
2a450639
RS
1746/* Check whether INSN is a single_set whose source is known to be
1747 equivalent to a constant. Return that constant if so, otherwise
1748 return null. */
1749
1750rtx
1751find_constant_src (rtx insn)
1752{
1753 rtx note, set, x;
1754
1755 set = single_set (insn);
1756 if (set)
1757 {
1758 x = avoid_constant_pool_reference (SET_SRC (set));
1759 if (CONSTANT_P (x))
1760 return x;
1761 }
1762
1763 note = find_reg_equal_equiv_note (insn);
1764 if (note && CONSTANT_P (XEXP (note, 0)))
1765 return XEXP (note, 0);
1766
1767 return NULL_RTX;
1768}
1769
8f3e7a26
RK
1770/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1771 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1772
1773int
0c20a65f 1774find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
8f3e7a26
RK
1775{
1776 /* If it's not a CALL_INSN, it can't possibly have a
1777 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 1778 if (!CALL_P (insn))
8f3e7a26
RK
1779 return 0;
1780
41374e13 1781 gcc_assert (datum);
8f3e7a26 1782
f8cfc6aa 1783 if (!REG_P (datum))
8f3e7a26 1784 {
b3694847 1785 rtx link;
8f3e7a26
RK
1786
1787 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 1788 link;
8f3e7a26 1789 link = XEXP (link, 1))
a6a2274a 1790 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 1791 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 1792 return 1;
8f3e7a26
RK
1793 }
1794 else
1795 {
770ae6cc 1796 unsigned int regno = REGNO (datum);
8f3e7a26
RK
1797
1798 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1799 to pseudo registers, so don't bother checking. */
1800
1801 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 1802 {
09e18274 1803 unsigned int end_regno = END_HARD_REGNO (datum);
770ae6cc 1804 unsigned int i;
8f3e7a26
RK
1805
1806 for (i = regno; i < end_regno; i++)
1807 if (find_regno_fusage (insn, code, i))
1808 return 1;
a6a2274a 1809 }
8f3e7a26
RK
1810 }
1811
1812 return 0;
1813}
1814
1815/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1816 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1817
1818int
0c20a65f 1819find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 1820{
b3694847 1821 rtx link;
8f3e7a26
RK
1822
1823 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1824 to pseudo registers, so don't bother checking. */
1825
1826 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 1827 || !CALL_P (insn) )
8f3e7a26
RK
1828 return 0;
1829
1830 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 1831 {
770ae6cc 1832 rtx op, reg;
83ab3839
RH
1833
1834 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 1835 && REG_P (reg = XEXP (op, 0))
09e18274
RS
1836 && REGNO (reg) <= regno
1837 && END_HARD_REGNO (reg) > regno)
83ab3839
RH
1838 return 1;
1839 }
8f3e7a26
RK
1840
1841 return 0;
1842}
a6a063b8
AM
1843
1844/* Return true if INSN is a call to a pure function. */
1845
1846int
0c20a65f 1847pure_call_p (rtx insn)
a6a063b8
AM
1848{
1849 rtx link;
1850
4b4bf941 1851 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
a6a063b8
AM
1852 return 0;
1853
1854 /* Look for the note that differentiates const and pure functions. */
1855 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1856 {
1857 rtx u, m;
1858
1859 if (GET_CODE (u = XEXP (link, 0)) == USE
3c0cb5de 1860 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
a6a063b8
AM
1861 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1862 return 1;
1863 }
1864
1865 return 0;
1866}
2c88418c
RS
1867\f
1868/* Remove register note NOTE from the REG_NOTES of INSN. */
1869
1870void
0c20a65f 1871remove_note (rtx insn, rtx note)
2c88418c 1872{
b3694847 1873 rtx link;
2c88418c 1874
49c3bb12
RH
1875 if (note == NULL_RTX)
1876 return;
1877
2c88418c 1878 if (REG_NOTES (insn) == note)
6fb5fa3c
DB
1879 REG_NOTES (insn) = XEXP (note, 1);
1880 else
1881 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1882 if (XEXP (link, 1) == note)
1883 {
1884 XEXP (link, 1) = XEXP (note, 1);
1885 break;
1886 }
1887
1888 switch (REG_NOTE_KIND (note))
2c88418c 1889 {
6fb5fa3c
DB
1890 case REG_EQUAL:
1891 case REG_EQUIV:
1892 df_notes_rescan (insn);
1893 break;
1894 default:
1895 break;
2c88418c 1896 }
2c88418c 1897}
55a98783 1898
7cd689bc
SB
1899/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1900
1901void
1902remove_reg_equal_equiv_notes (rtx insn)
1903{
1904 rtx *loc;
1905
1906 loc = &REG_NOTES (insn);
1907 while (*loc)
1908 {
1909 enum reg_note kind = REG_NOTE_KIND (*loc);
1910 if (kind == REG_EQUAL || kind == REG_EQUIV)
1911 *loc = XEXP (*loc, 1);
1912 else
1913 loc = &XEXP (*loc, 1);
1914 }
1915}
1916
5f0d2358
RK
1917/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1918 return 1 if it is found. A simple equality test is used to determine if
1919 NODE matches. */
1920
1921int
0c20a65f 1922in_expr_list_p (rtx listp, rtx node)
5f0d2358
RK
1923{
1924 rtx x;
1925
1926 for (x = listp; x; x = XEXP (x, 1))
1927 if (node == XEXP (x, 0))
1928 return 1;
1929
1930 return 0;
1931}
1932
dd248abd
RK
1933/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1934 remove that entry from the list if it is found.
55a98783 1935
dd248abd 1936 A simple equality test is used to determine if NODE matches. */
55a98783
JL
1937
1938void
0c20a65f 1939remove_node_from_expr_list (rtx node, rtx *listp)
55a98783
JL
1940{
1941 rtx temp = *listp;
1942 rtx prev = NULL_RTX;
1943
1944 while (temp)
1945 {
1946 if (node == XEXP (temp, 0))
1947 {
1948 /* Splice the node out of the list. */
1949 if (prev)
1950 XEXP (prev, 1) = XEXP (temp, 1);
1951 else
1952 *listp = XEXP (temp, 1);
1953
1954 return;
1955 }
dd248abd
RK
1956
1957 prev = temp;
55a98783
JL
1958 temp = XEXP (temp, 1);
1959 }
1960}
2c88418c 1961\f
2b067faf
RS
1962/* Nonzero if X contains any volatile instructions. These are instructions
1963 which may cause unpredictable machine state instructions, and thus no
1964 instructions should be moved or combined across them. This includes
1965 only volatile asms and UNSPEC_VOLATILE instructions. */
1966
1967int
0c20a65f 1968volatile_insn_p (rtx x)
2b067faf 1969{
b3694847 1970 RTX_CODE code;
2b067faf
RS
1971
1972 code = GET_CODE (x);
1973 switch (code)
1974 {
1975 case LABEL_REF:
1976 case SYMBOL_REF:
1977 case CONST_INT:
1978 case CONST:
1979 case CONST_DOUBLE:
69ef87e2 1980 case CONST_VECTOR:
2b067faf
RS
1981 case CC0:
1982 case PC:
1983 case REG:
1984 case SCRATCH:
1985 case CLOBBER:
2b067faf
RS
1986 case ADDR_VEC:
1987 case ADDR_DIFF_VEC:
1988 case CALL:
1989 case MEM:
1990 return 0;
1991
1992 case UNSPEC_VOLATILE:
1993 /* case TRAP_IF: This isn't clear yet. */
1994 return 1;
1995
4c46ea23 1996 case ASM_INPUT:
2b067faf
RS
1997 case ASM_OPERANDS:
1998 if (MEM_VOLATILE_P (x))
1999 return 1;
e9a25f70
JL
2000
2001 default:
2002 break;
2b067faf
RS
2003 }
2004
2005 /* Recursively scan the operands of this expression. */
2006
2007 {
b3694847
SS
2008 const char *fmt = GET_RTX_FORMAT (code);
2009 int i;
a6a2274a 2010
2b067faf
RS
2011 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2012 {
2013 if (fmt[i] == 'e')
2014 {
31001f72 2015 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
2016 return 1;
2017 }
d4757e6a 2018 else if (fmt[i] == 'E')
2b067faf 2019 {
b3694847 2020 int j;
2b067faf 2021 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 2022 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
2023 return 1;
2024 }
2025 }
2026 }
2027 return 0;
2028}
2029
2c88418c 2030/* Nonzero if X contains any volatile memory references
2ac4fed0 2031 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
2032
2033int
0c20a65f 2034volatile_refs_p (rtx x)
2c88418c 2035{
b3694847 2036 RTX_CODE code;
2c88418c
RS
2037
2038 code = GET_CODE (x);
2039 switch (code)
2040 {
2041 case LABEL_REF:
2042 case SYMBOL_REF:
2043 case CONST_INT:
2044 case CONST:
2045 case CONST_DOUBLE:
69ef87e2 2046 case CONST_VECTOR:
2c88418c
RS
2047 case CC0:
2048 case PC:
2049 case REG:
2050 case SCRATCH:
2051 case CLOBBER:
2c88418c
RS
2052 case ADDR_VEC:
2053 case ADDR_DIFF_VEC:
2054 return 0;
2055
2ac4fed0 2056 case UNSPEC_VOLATILE:
2c88418c
RS
2057 return 1;
2058
2059 case MEM:
4c46ea23 2060 case ASM_INPUT:
2c88418c
RS
2061 case ASM_OPERANDS:
2062 if (MEM_VOLATILE_P (x))
2063 return 1;
e9a25f70
JL
2064
2065 default:
2066 break;
2c88418c
RS
2067 }
2068
2069 /* Recursively scan the operands of this expression. */
2070
2071 {
b3694847
SS
2072 const char *fmt = GET_RTX_FORMAT (code);
2073 int i;
a6a2274a 2074
2c88418c
RS
2075 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2076 {
2077 if (fmt[i] == 'e')
2078 {
2079 if (volatile_refs_p (XEXP (x, i)))
2080 return 1;
2081 }
d4757e6a 2082 else if (fmt[i] == 'E')
2c88418c 2083 {
b3694847 2084 int j;
2c88418c
RS
2085 for (j = 0; j < XVECLEN (x, i); j++)
2086 if (volatile_refs_p (XVECEXP (x, i, j)))
2087 return 1;
2088 }
2089 }
2090 }
2091 return 0;
2092}
2093
2094/* Similar to above, except that it also rejects register pre- and post-
2095 incrementing. */
2096
2097int
0c20a65f 2098side_effects_p (rtx x)
2c88418c 2099{
b3694847 2100 RTX_CODE code;
2c88418c
RS
2101
2102 code = GET_CODE (x);
2103 switch (code)
2104 {
2105 case LABEL_REF:
2106 case SYMBOL_REF:
2107 case CONST_INT:
2108 case CONST:
2109 case CONST_DOUBLE:
69ef87e2 2110 case CONST_VECTOR:
2c88418c
RS
2111 case CC0:
2112 case PC:
2113 case REG:
2114 case SCRATCH:
2c88418c
RS
2115 case ADDR_VEC:
2116 case ADDR_DIFF_VEC:
2117 return 0;
2118
2119 case CLOBBER:
2120 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2121 when some combination can't be done. If we see one, don't think
2122 that we can simplify the expression. */
2123 return (GET_MODE (x) != VOIDmode);
2124
2125 case PRE_INC:
2126 case PRE_DEC:
2127 case POST_INC:
2128 case POST_DEC:
1fb9c5cd
MH
2129 case PRE_MODIFY:
2130 case POST_MODIFY:
2c88418c 2131 case CALL:
2ac4fed0 2132 case UNSPEC_VOLATILE:
2c88418c
RS
2133 /* case TRAP_IF: This isn't clear yet. */
2134 return 1;
2135
2136 case MEM:
4c46ea23 2137 case ASM_INPUT:
2c88418c
RS
2138 case ASM_OPERANDS:
2139 if (MEM_VOLATILE_P (x))
2140 return 1;
e9a25f70
JL
2141
2142 default:
2143 break;
2c88418c
RS
2144 }
2145
2146 /* Recursively scan the operands of this expression. */
2147
2148 {
b3694847
SS
2149 const char *fmt = GET_RTX_FORMAT (code);
2150 int i;
a6a2274a 2151
2c88418c
RS
2152 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2153 {
2154 if (fmt[i] == 'e')
2155 {
2156 if (side_effects_p (XEXP (x, i)))
2157 return 1;
2158 }
d4757e6a 2159 else if (fmt[i] == 'E')
2c88418c 2160 {
b3694847 2161 int j;
2c88418c
RS
2162 for (j = 0; j < XVECLEN (x, i); j++)
2163 if (side_effects_p (XVECEXP (x, i, j)))
2164 return 1;
2165 }
2166 }
2167 }
2168 return 0;
2169}
2170\f
e755fcf5
ZD
2171enum may_trap_p_flags
2172{
2173 MTP_UNALIGNED_MEMS = 1,
2174 MTP_AFTER_MOVE = 2
2175};
2176/* Return nonzero if evaluating rtx X might cause a trap.
2177 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2178 unaligned memory accesses on strict alignment machines. If
2179 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2180 cannot trap at its current location, but it might become trapping if moved
2181 elsewhere. */
2c88418c 2182
2358ff91 2183static int
e755fcf5 2184may_trap_p_1 (rtx x, unsigned flags)
2c88418c
RS
2185{
2186 int i;
2187 enum rtx_code code;
6f7d635c 2188 const char *fmt;
e755fcf5 2189 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2c88418c
RS
2190
2191 if (x == 0)
2192 return 0;
2193 code = GET_CODE (x);
2194 switch (code)
2195 {
2196 /* Handle these cases quickly. */
2197 case CONST_INT:
2198 case CONST_DOUBLE:
69ef87e2 2199 case CONST_VECTOR:
2c88418c
RS
2200 case SYMBOL_REF:
2201 case LABEL_REF:
2202 case CONST:
2203 case PC:
2204 case CC0:
2205 case REG:
2206 case SCRATCH:
2207 return 0;
2208
22aa60a1 2209 case ASM_INPUT:
2ac4fed0 2210 case UNSPEC_VOLATILE:
2c88418c
RS
2211 case TRAP_IF:
2212 return 1;
2213
22aa60a1
RH
2214 case ASM_OPERANDS:
2215 return MEM_VOLATILE_P (x);
2216
2c88418c
RS
2217 /* Memory ref can trap unless it's a static var or a stack slot. */
2218 case MEM:
e755fcf5
ZD
2219 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2220 reference; moving it out of condition might cause its address
2221 become invalid. */
2222 !(flags & MTP_AFTER_MOVE)
2223 && MEM_NOTRAP_P (x)
2358ff91 2224 && (!STRICT_ALIGNMENT || !unaligned_mems))
4da2eb6b 2225 return 0;
2358ff91
EB
2226 return
2227 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2c88418c
RS
2228
2229 /* Division by a non-constant might trap. */
2230 case DIV:
2231 case MOD:
2232 case UDIV:
2233 case UMOD:
52bfebf0
RS
2234 if (HONOR_SNANS (GET_MODE (x)))
2235 return 1;
3d8bf70f 2236 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
f9013075
DE
2237 return flag_trapping_math;
2238 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2c88418c 2239 return 1;
e9a25f70
JL
2240 break;
2241
b278301b
RK
2242 case EXPR_LIST:
2243 /* An EXPR_LIST is used to represent a function call. This
2244 certainly may trap. */
2245 return 1;
e9a25f70 2246
734508ea
JW
2247 case GE:
2248 case GT:
2249 case LE:
2250 case LT:
19aec195 2251 case LTGT:
55143861 2252 case COMPARE:
734508ea 2253 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2254 if (!flag_trapping_math)
2255 break;
734508ea
JW
2256 /* ??? There is no machine independent way to check for tests that trap
2257 when COMPARE is used, though many targets do make this distinction.
2258 For instance, sparc uses CCFPE for compares which generate exceptions
2259 and CCFP for compares which do not generate exceptions. */
52bfebf0 2260 if (HONOR_NANS (GET_MODE (x)))
55143861
JJ
2261 return 1;
2262 /* But often the compare has some CC mode, so check operand
2263 modes as well. */
52bfebf0
RS
2264 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2265 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2266 return 1;
2267 break;
2268
2269 case EQ:
2270 case NE:
2271 if (HONOR_SNANS (GET_MODE (x)))
2272 return 1;
2273 /* Often comparison is CC mode, so check operand modes. */
2274 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2275 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
55143861
JJ
2276 return 1;
2277 break;
2278
22fd5743
FH
2279 case FIX:
2280 /* Conversion of floating point might trap. */
2281 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2282 return 1;
2283 break;
2284
05cc23e8
RH
2285 case NEG:
2286 case ABS:
e3947b34 2287 case SUBREG:
05cc23e8
RH
2288 /* These operations don't trap even with floating point. */
2289 break;
2290
2c88418c
RS
2291 default:
2292 /* Any floating arithmetic may trap. */
3d8bf70f 2293 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
f5eb5fd0 2294 && flag_trapping_math)
2c88418c
RS
2295 return 1;
2296 }
2297
2298 fmt = GET_RTX_FORMAT (code);
2299 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2300 {
2301 if (fmt[i] == 'e')
2302 {
e755fcf5 2303 if (may_trap_p_1 (XEXP (x, i), flags))
2c88418c
RS
2304 return 1;
2305 }
2306 else if (fmt[i] == 'E')
2307 {
b3694847 2308 int j;
2c88418c 2309 for (j = 0; j < XVECLEN (x, i); j++)
e755fcf5 2310 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2c88418c
RS
2311 return 1;
2312 }
2313 }
2314 return 0;
2315}
2358ff91
EB
2316
2317/* Return nonzero if evaluating rtx X might cause a trap. */
2318
2319int
2320may_trap_p (rtx x)
2321{
e755fcf5
ZD
2322 return may_trap_p_1 (x, 0);
2323}
2324
2325/* Return nonzero if evaluating rtx X might cause a trap, when the expression
2326 is moved from its current location by some optimization. */
2327
2328int
2329may_trap_after_code_motion_p (rtx x)
2330{
2331 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2358ff91
EB
2332}
2333
c0220ea4 2334/* Same as above, but additionally return nonzero if evaluating rtx X might
2358ff91
EB
2335 cause a fault. We define a fault for the purpose of this function as a
2336 erroneous execution condition that cannot be encountered during the normal
2337 execution of a valid program; the typical example is an unaligned memory
2338 access on a strict alignment machine. The compiler guarantees that it
2339 doesn't generate code that will fault from a valid program, but this
2340 guarantee doesn't mean anything for individual instructions. Consider
2341 the following example:
2342
2343 struct S { int d; union { char *cp; int *ip; }; };
2344
2345 int foo(struct S *s)
2346 {
2347 if (s->d == 1)
2348 return *s->ip;
2349 else
2350 return *s->cp;
2351 }
2352
2353 on a strict alignment machine. In a valid program, foo will never be
2354 invoked on a structure for which d is equal to 1 and the underlying
2355 unique field of the union not aligned on a 4-byte boundary, but the
2356 expression *s->ip might cause a fault if considered individually.
2357
2358 At the RTL level, potentially problematic expressions will almost always
2359 verify may_trap_p; for example, the above dereference can be emitted as
2360 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2361 However, suppose that foo is inlined in a caller that causes s->cp to
2362 point to a local character variable and guarantees that s->d is not set
2363 to 1; foo may have been effectively translated into pseudo-RTL as:
2364
2365 if ((reg:SI) == 1)
2366 (set (reg:SI) (mem:SI (%fp - 7)))
2367 else
2368 (set (reg:QI) (mem:QI (%fp - 7)))
2369
2370 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2371 memory reference to a stack slot, but it will certainly cause a fault
2372 on a strict alignment machine. */
2373
2374int
2375may_trap_or_fault_p (rtx x)
2376{
e755fcf5 2377 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2358ff91 2378}
2c88418c
RS
2379\f
2380/* Return nonzero if X contains a comparison that is not either EQ or NE,
2381 i.e., an inequality. */
2382
2383int
0c20a65f 2384inequality_comparisons_p (rtx x)
2c88418c 2385{
b3694847
SS
2386 const char *fmt;
2387 int len, i;
2388 enum rtx_code code = GET_CODE (x);
2c88418c
RS
2389
2390 switch (code)
2391 {
2392 case REG:
2393 case SCRATCH:
2394 case PC:
2395 case CC0:
2396 case CONST_INT:
2397 case CONST_DOUBLE:
69ef87e2 2398 case CONST_VECTOR:
2c88418c
RS
2399 case CONST:
2400 case LABEL_REF:
2401 case SYMBOL_REF:
2402 return 0;
2403
2404 case LT:
2405 case LTU:
2406 case GT:
2407 case GTU:
2408 case LE:
2409 case LEU:
2410 case GE:
2411 case GEU:
2412 return 1;
a6a2274a 2413
e9a25f70
JL
2414 default:
2415 break;
2c88418c
RS
2416 }
2417
2418 len = GET_RTX_LENGTH (code);
2419 fmt = GET_RTX_FORMAT (code);
2420
2421 for (i = 0; i < len; i++)
2422 {
2423 if (fmt[i] == 'e')
2424 {
2425 if (inequality_comparisons_p (XEXP (x, i)))
2426 return 1;
2427 }
2428 else if (fmt[i] == 'E')
2429 {
b3694847 2430 int j;
2c88418c
RS
2431 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2432 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2433 return 1;
2434 }
2435 }
a6a2274a 2436
2c88418c
RS
2437 return 0;
2438}
2439\f
1ed0205e
VM
2440/* Replace any occurrence of FROM in X with TO. The function does
2441 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2442
2443 Note that copying is not done so X must not be shared unless all copies
2444 are to be modified. */
2445
2446rtx
0c20a65f 2447replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2448{
b3694847
SS
2449 int i, j;
2450 const char *fmt;
2c88418c 2451
1ed0205e 2452 /* The following prevents loops occurrence when we change MEM in
dc297297 2453 CONST_DOUBLE onto the same CONST_DOUBLE. */
1ed0205e
VM
2454 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2455 return x;
2456
2c88418c
RS
2457 if (x == from)
2458 return to;
2459
2460 /* Allow this function to make replacements in EXPR_LISTs. */
2461 if (x == 0)
2462 return 0;
2463
9dd791c8
AO
2464 if (GET_CODE (x) == SUBREG)
2465 {
2466 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2467
2468 if (GET_CODE (new) == CONST_INT)
2469 {
2470 x = simplify_subreg (GET_MODE (x), new,
2471 GET_MODE (SUBREG_REG (x)),
2472 SUBREG_BYTE (x));
41374e13 2473 gcc_assert (x);
9dd791c8
AO
2474 }
2475 else
2476 SUBREG_REG (x) = new;
2477
2478 return x;
2479 }
2480 else if (GET_CODE (x) == ZERO_EXTEND)
2481 {
2482 rtx new = replace_rtx (XEXP (x, 0), from, to);
2483
2484 if (GET_CODE (new) == CONST_INT)
2485 {
2486 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2487 new, GET_MODE (XEXP (x, 0)));
41374e13 2488 gcc_assert (x);
9dd791c8
AO
2489 }
2490 else
2491 XEXP (x, 0) = new;
2492
2493 return x;
2494 }
2495
2c88418c
RS
2496 fmt = GET_RTX_FORMAT (GET_CODE (x));
2497 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2498 {
2499 if (fmt[i] == 'e')
2500 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2501 else if (fmt[i] == 'E')
2502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2503 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2504 }
2505
2506 return x;
a6a2274a 2507}
2c88418c 2508\f
39811184 2509/* Replace occurrences of the old label in *X with the new one.
4af16369 2510 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
39811184
JZ
2511
2512int
0c20a65f 2513replace_label (rtx *x, void *data)
39811184
JZ
2514{
2515 rtx l = *x;
4af16369
JZ
2516 rtx old_label = ((replace_label_data *) data)->r1;
2517 rtx new_label = ((replace_label_data *) data)->r2;
2518 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
39811184
JZ
2519
2520 if (l == NULL_RTX)
2521 return 0;
2522
173cd571
JZ
2523 if (GET_CODE (l) == SYMBOL_REF
2524 && CONSTANT_POOL_ADDRESS_P (l))
4af16369 2525 {
173cd571 2526 rtx c = get_pool_constant (l);
4af16369
JZ
2527 if (rtx_referenced_p (old_label, c))
2528 {
2529 rtx new_c, new_l;
2530 replace_label_data *d = (replace_label_data *) data;
0c20a65f 2531
4af16369
JZ
2532 /* Create a copy of constant C; replace the label inside
2533 but do not update LABEL_NUSES because uses in constant pool
2534 are not counted. */
2535 new_c = copy_rtx (c);
2536 d->update_label_nuses = false;
2537 for_each_rtx (&new_c, replace_label, data);
2538 d->update_label_nuses = update_label_nuses;
2539
2540 /* Add the new constant NEW_C to constant pool and replace
2541 the old reference to constant by new reference. */
173cd571 2542 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
4af16369
JZ
2543 *x = replace_rtx (l, l, new_l);
2544 }
2545 return 0;
2546 }
2547
39811184
JZ
2548 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2549 field. This is not handled by for_each_rtx because it doesn't
2550 handle unprinted ('0') fields. */
4b4bf941 2551 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
39811184 2552 JUMP_LABEL (l) = new_label;
39811184 2553
4af16369
JZ
2554 if ((GET_CODE (l) == LABEL_REF
2555 || GET_CODE (l) == INSN_LIST)
2556 && XEXP (l, 0) == old_label)
2557 {
2558 XEXP (l, 0) = new_label;
2559 if (update_label_nuses)
2560 {
2561 ++LABEL_NUSES (new_label);
2562 --LABEL_NUSES (old_label);
2563 }
2564 return 0;
2565 }
39811184
JZ
2566
2567 return 0;
2568}
2569
4af16369
JZ
2570/* When *BODY is equal to X or X is directly referenced by *BODY
2571 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2572 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
39811184
JZ
2573
2574static int
0c20a65f 2575rtx_referenced_p_1 (rtx *body, void *x)
39811184 2576{
4af16369
JZ
2577 rtx y = (rtx) x;
2578
2579 if (*body == NULL_RTX)
2580 return y == NULL_RTX;
2581
2582 /* Return true if a label_ref *BODY refers to label Y. */
4b4bf941 2583 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
4af16369
JZ
2584 return XEXP (*body, 0) == y;
2585
2586 /* If *BODY is a reference to pool constant traverse the constant. */
2587 if (GET_CODE (*body) == SYMBOL_REF
2588 && CONSTANT_POOL_ADDRESS_P (*body))
2589 return rtx_referenced_p (y, get_pool_constant (*body));
2590
2591 /* By default, compare the RTL expressions. */
2592 return rtx_equal_p (*body, y);
39811184
JZ
2593}
2594
4af16369 2595/* Return true if X is referenced in BODY. */
39811184
JZ
2596
2597int
0c20a65f 2598rtx_referenced_p (rtx x, rtx body)
39811184 2599{
4af16369 2600 return for_each_rtx (&body, rtx_referenced_p_1, x);
39811184
JZ
2601}
2602
ee735eef
JZ
2603/* If INSN is a tablejump return true and store the label (before jump table) to
2604 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
2605
2606bool
ee735eef 2607tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
39811184 2608{
ee735eef
JZ
2609 rtx label, table;
2610
4b4bf941 2611 if (JUMP_P (insn)
ee735eef
JZ
2612 && (label = JUMP_LABEL (insn)) != NULL_RTX
2613 && (table = next_active_insn (label)) != NULL_RTX
4b4bf941 2614 && JUMP_P (table)
ee735eef
JZ
2615 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2616 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
39811184 2617 {
ee735eef
JZ
2618 if (labelp)
2619 *labelp = label;
2620 if (tablep)
2621 *tablep = table;
39811184
JZ
2622 return true;
2623 }
2624 return false;
2625}
2626
fce7e199
RH
2627/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2628 constant that is not in the constant pool and not in the condition
2629 of an IF_THEN_ELSE. */
2a1777af
JL
2630
2631static int
0c20a65f 2632computed_jump_p_1 (rtx x)
2a1777af
JL
2633{
2634 enum rtx_code code = GET_CODE (x);
2635 int i, j;
6f7d635c 2636 const char *fmt;
2a1777af
JL
2637
2638 switch (code)
2639 {
2a1777af
JL
2640 case LABEL_REF:
2641 case PC:
2642 return 0;
2643
fce7e199
RH
2644 case CONST:
2645 case CONST_INT:
2646 case CONST_DOUBLE:
69ef87e2 2647 case CONST_VECTOR:
fce7e199 2648 case SYMBOL_REF:
2a1777af
JL
2649 case REG:
2650 return 1;
2651
2652 case MEM:
2653 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2654 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2655
2656 case IF_THEN_ELSE:
fce7e199
RH
2657 return (computed_jump_p_1 (XEXP (x, 1))
2658 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
2659
2660 default:
2661 break;
2a1777af
JL
2662 }
2663
2664 fmt = GET_RTX_FORMAT (code);
2665 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2666 {
2667 if (fmt[i] == 'e'
fce7e199 2668 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
2669 return 1;
2670
d4757e6a 2671 else if (fmt[i] == 'E')
2a1777af 2672 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 2673 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
2674 return 1;
2675 }
2676
2677 return 0;
2678}
2679
2680/* Return nonzero if INSN is an indirect jump (aka computed jump).
2681
2682 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 2683 we can recognize them by a (use (label_ref)). */
2a1777af
JL
2684
2685int
0c20a65f 2686computed_jump_p (rtx insn)
2a1777af
JL
2687{
2688 int i;
4b4bf941 2689 if (JUMP_P (insn))
2a1777af
JL
2690 {
2691 rtx pat = PATTERN (insn);
2a1777af 2692
f759eb8b
AO
2693 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2694 return 0;
2695 else if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
2696 {
2697 int len = XVECLEN (pat, 0);
2698 int has_use_labelref = 0;
2699
2700 for (i = len - 1; i >= 0; i--)
2701 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2702 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2703 == LABEL_REF))
2704 has_use_labelref = 1;
2705
2706 if (! has_use_labelref)
2707 for (i = len - 1; i >= 0; i--)
2708 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2709 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 2710 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
2711 return 1;
2712 }
2713 else if (GET_CODE (pat) == SET
2714 && SET_DEST (pat) == pc_rtx
fce7e199 2715 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
2716 return 1;
2717 }
2718 return 0;
2719}
ccc2d6d0 2720
cf94b0fc
PB
2721/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2722 calls. Processes the subexpressions of EXP and passes them to F. */
2723static int
2724for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2725{
2726 int result, i, j;
2727 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2728 rtx *x;
2729
2730 for (; format[n] != '\0'; n++)
2731 {
2732 switch (format[n])
2733 {
2734 case 'e':
2735 /* Call F on X. */
2736 x = &XEXP (exp, n);
2737 result = (*f) (x, data);
2738 if (result == -1)
2739 /* Do not traverse sub-expressions. */
2740 continue;
2741 else if (result != 0)
2742 /* Stop the traversal. */
2743 return result;
2744
2745 if (*x == NULL_RTX)
2746 /* There are no sub-expressions. */
2747 continue;
2748
2749 i = non_rtx_starting_operands[GET_CODE (*x)];
2750 if (i >= 0)
2751 {
2752 result = for_each_rtx_1 (*x, i, f, data);
2753 if (result != 0)
2754 return result;
2755 }
2756 break;
2757
2758 case 'V':
2759 case 'E':
2760 if (XVEC (exp, n) == 0)
2761 continue;
2762 for (j = 0; j < XVECLEN (exp, n); ++j)
2763 {
2764 /* Call F on X. */
2765 x = &XVECEXP (exp, n, j);
2766 result = (*f) (x, data);
2767 if (result == -1)
2768 /* Do not traverse sub-expressions. */
2769 continue;
2770 else if (result != 0)
2771 /* Stop the traversal. */
2772 return result;
2773
2774 if (*x == NULL_RTX)
2775 /* There are no sub-expressions. */
2776 continue;
2777
2778 i = non_rtx_starting_operands[GET_CODE (*x)];
2779 if (i >= 0)
2780 {
2781 result = for_each_rtx_1 (*x, i, f, data);
2782 if (result != 0)
2783 return result;
2784 }
2785 }
2786 break;
2787
2788 default:
2789 /* Nothing to do. */
2790 break;
2791 }
2792 }
2793
2794 return 0;
2795}
2796
ccc2d6d0
MM
2797/* Traverse X via depth-first search, calling F for each
2798 sub-expression (including X itself). F is also passed the DATA.
2799 If F returns -1, do not traverse sub-expressions, but continue
2800 traversing the rest of the tree. If F ever returns any other
40f03658 2801 nonzero value, stop the traversal, and return the value returned
ccc2d6d0
MM
2802 by F. Otherwise, return 0. This function does not traverse inside
2803 tree structure that contains RTX_EXPRs, or into sub-expressions
2804 whose format code is `0' since it is not known whether or not those
2805 codes are actually RTL.
2806
2807 This routine is very general, and could (should?) be used to
2808 implement many of the other routines in this file. */
2809
ae0b51ef 2810int
0c20a65f 2811for_each_rtx (rtx *x, rtx_function f, void *data)
ccc2d6d0
MM
2812{
2813 int result;
ccc2d6d0
MM
2814 int i;
2815
2816 /* Call F on X. */
b987f237 2817 result = (*f) (x, data);
ccc2d6d0
MM
2818 if (result == -1)
2819 /* Do not traverse sub-expressions. */
2820 return 0;
2821 else if (result != 0)
2822 /* Stop the traversal. */
2823 return result;
2824
2825 if (*x == NULL_RTX)
2826 /* There are no sub-expressions. */
2827 return 0;
2828
cf94b0fc
PB
2829 i = non_rtx_starting_operands[GET_CODE (*x)];
2830 if (i < 0)
2831 return 0;
ccc2d6d0 2832
cf94b0fc 2833 return for_each_rtx_1 (*x, i, f, data);
ccc2d6d0 2834}
3ec2b590 2835
cf94b0fc 2836
777b1b71
RH
2837/* Searches X for any reference to REGNO, returning the rtx of the
2838 reference found if any. Otherwise, returns NULL_RTX. */
2839
2840rtx
0c20a65f 2841regno_use_in (unsigned int regno, rtx x)
777b1b71 2842{
b3694847 2843 const char *fmt;
777b1b71
RH
2844 int i, j;
2845 rtx tem;
2846
f8cfc6aa 2847 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
2848 return x;
2849
2850 fmt = GET_RTX_FORMAT (GET_CODE (x));
2851 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2852 {
2853 if (fmt[i] == 'e')
2854 {
2855 if ((tem = regno_use_in (regno, XEXP (x, i))))
2856 return tem;
2857 }
2858 else if (fmt[i] == 'E')
2859 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2860 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2861 return tem;
2862 }
2863
2864 return NULL_RTX;
2865}
2dfa9a87 2866
e5c56fd9
JH
2867/* Return a value indicating whether OP, an operand of a commutative
2868 operation, is preferred as the first or second operand. The higher
2869 the value, the stronger the preference for being the first operand.
2870 We use negative values to indicate a preference for the first operand
2871 and positive values for the second operand. */
2872
9b3bd424 2873int
0c20a65f 2874commutative_operand_precedence (rtx op)
e5c56fd9 2875{
e3d6e740 2876 enum rtx_code code = GET_CODE (op);
e3d6e740 2877
e5c56fd9 2878 /* Constants always come the second operand. Prefer "nice" constants. */
e3d6e740 2879 if (code == CONST_INT)
7e0b4eae 2880 return -8;
e3d6e740 2881 if (code == CONST_DOUBLE)
7e0b4eae 2882 return -7;
9ce79a7a 2883 op = avoid_constant_pool_reference (op);
79b82df3 2884 code = GET_CODE (op);
ec8e098d
PB
2885
2886 switch (GET_RTX_CLASS (code))
2887 {
2888 case RTX_CONST_OBJ:
2889 if (code == CONST_INT)
7e0b4eae 2890 return -6;
ec8e098d 2891 if (code == CONST_DOUBLE)
7e0b4eae
PB
2892 return -5;
2893 return -4;
ec8e098d
PB
2894
2895 case RTX_EXTRA:
2896 /* SUBREGs of objects should come second. */
2897 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
7e0b4eae 2898 return -3;
6fb5fa3c 2899 return 0;
ec8e098d
PB
2900
2901 case RTX_OBJ:
2902 /* Complex expressions should be the first, so decrease priority
7e0b4eae
PB
2903 of objects. Prefer pointer objects over non pointer objects. */
2904 if ((REG_P (op) && REG_POINTER (op))
2905 || (MEM_P (op) && MEM_POINTER (op)))
2906 return -1;
2907 return -2;
ec8e098d
PB
2908
2909 case RTX_COMM_ARITH:
2910 /* Prefer operands that are themselves commutative to be first.
2911 This helps to make things linear. In particular,
2912 (and (and (reg) (reg)) (not (reg))) is canonical. */
2913 return 4;
2914
2915 case RTX_BIN_ARITH:
2916 /* If only one operand is a binary expression, it will be the first
2917 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2918 is canonical, although it will usually be further simplified. */
2919 return 2;
e3d6e740 2920
ec8e098d
PB
2921 case RTX_UNARY:
2922 /* Then prefer NEG and NOT. */
2923 if (code == NEG || code == NOT)
2924 return 1;
e5c56fd9 2925
ec8e098d
PB
2926 default:
2927 return 0;
2928 }
e5c56fd9
JH
2929}
2930
f63d1bf7 2931/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
2932 in order to canonicalize expression. */
2933
7e0b4eae 2934bool
0c20a65f 2935swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 2936{
9b3bd424
RH
2937 return (commutative_operand_precedence (x)
2938 < commutative_operand_precedence (y));
e5c56fd9 2939}
2dfa9a87
MH
2940
2941/* Return 1 if X is an autoincrement side effect and the register is
2942 not the stack pointer. */
2943int
0c20a65f 2944auto_inc_p (rtx x)
2dfa9a87
MH
2945{
2946 switch (GET_CODE (x))
2947 {
2948 case PRE_INC:
2949 case POST_INC:
2950 case PRE_DEC:
2951 case POST_DEC:
2952 case PRE_MODIFY:
2953 case POST_MODIFY:
2954 /* There are no REG_INC notes for SP. */
2955 if (XEXP (x, 0) != stack_pointer_rtx)
2956 return 1;
2957 default:
2958 break;
2959 }
2960 return 0;
2961}
3b10cf4b 2962
f9da5064 2963/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 2964int
0c20a65f 2965loc_mentioned_in_p (rtx *loc, rtx in)
db7ba742 2966{
a52b023a
PB
2967 enum rtx_code code;
2968 const char *fmt;
db7ba742
R
2969 int i, j;
2970
a52b023a
PB
2971 if (!in)
2972 return 0;
2973
2974 code = GET_CODE (in);
2975 fmt = GET_RTX_FORMAT (code);
db7ba742
R
2976 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2977 {
9ce88f5e 2978 if (loc == &in->u.fld[i].rt_rtx)
db7ba742
R
2979 return 1;
2980 if (fmt[i] == 'e')
2981 {
2982 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2983 return 1;
2984 }
2985 else if (fmt[i] == 'E')
2986 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2987 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2988 return 1;
2989 }
2990 return 0;
2991}
ddef6bc7 2992
bb51e270
RS
2993/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2994 and SUBREG_BYTE, return the bit offset where the subreg begins
2995 (counting from the least significant bit of the operand). */
33aceff2
JW
2996
2997unsigned int
bb51e270
RS
2998subreg_lsb_1 (enum machine_mode outer_mode,
2999 enum machine_mode inner_mode,
3000 unsigned int subreg_byte)
33aceff2 3001{
33aceff2
JW
3002 unsigned int bitpos;
3003 unsigned int byte;
3004 unsigned int word;
3005
3006 /* A paradoxical subreg begins at bit position 0. */
bb51e270 3007 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
33aceff2
JW
3008 return 0;
3009
3010 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3011 /* If the subreg crosses a word boundary ensure that
3012 it also begins and ends on a word boundary. */
41374e13
NS
3013 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3014 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3015 && (subreg_byte % UNITS_PER_WORD
3016 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
33aceff2
JW
3017
3018 if (WORDS_BIG_ENDIAN)
3019 word = (GET_MODE_SIZE (inner_mode)
bb51e270 3020 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 3021 else
bb51e270 3022 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
3023 bitpos = word * BITS_PER_WORD;
3024
3025 if (BYTES_BIG_ENDIAN)
3026 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 3027 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 3028 else
bb51e270 3029 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
3030 bitpos += byte * BITS_PER_UNIT;
3031
3032 return bitpos;
3033}
3034
bb51e270
RS
3035/* Given a subreg X, return the bit offset where the subreg begins
3036 (counting from the least significant bit of the reg). */
3037
3038unsigned int
3039subreg_lsb (rtx x)
3040{
3041 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3042 SUBREG_BYTE (x));
3043}
3044
f1f4e530 3045/* Fill in information about a subreg of a hard register.
ddef6bc7
JJ
3046 xregno - A regno of an inner hard subreg_reg (or what will become one).
3047 xmode - The mode of xregno.
3048 offset - The byte offset.
3049 ymode - The mode of a top level SUBREG (or what may become one).
f1f4e530
JM
3050 info - Pointer to structure to fill in. */
3051static void
3052subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3053 unsigned int offset, enum machine_mode ymode,
3054 struct subreg_info *info)
04c5580f 3055{
8521c414 3056 int nregs_xmode, nregs_ymode;
04c5580f 3057 int mode_multiple, nregs_multiple;
f1f4e530 3058 int offset_adj, y_offset, y_offset_adj;
8521c414 3059 int regsize_xmode, regsize_ymode;
f1f4e530 3060 bool rknown;
04c5580f 3061
41374e13 3062 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
04c5580f 3063
f1f4e530
JM
3064 rknown = false;
3065
dd79bb7e
GK
3066 /* If there are holes in a non-scalar mode in registers, we expect
3067 that it is made up of its units concatenated together. */
8521c414 3068 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
dd79bb7e 3069 {
8521c414
JM
3070 enum machine_mode xmode_unit;
3071
3072 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3073 if (GET_MODE_INNER (xmode) == VOIDmode)
3074 xmode_unit = xmode;
3075 else
3076 xmode_unit = GET_MODE_INNER (xmode);
3077 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3078 gcc_assert (nregs_xmode
3079 == (GET_MODE_NUNITS (xmode)
3080 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3081 gcc_assert (hard_regno_nregs[xregno][xmode]
3082 == (hard_regno_nregs[xregno][xmode_unit]
3083 * GET_MODE_NUNITS (xmode)));
dd79bb7e
GK
3084
3085 /* You can only ask for a SUBREG of a value with holes in the middle
3086 if you don't cross the holes. (Such a SUBREG should be done by
3087 picking a different register class, or doing it in memory if
3088 necessary.) An example of a value with holes is XCmode on 32-bit
3089 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3090 3 for each part, but in memory it's two 128-bit parts.
3091 Padding is assumed to be at the end (not necessarily the 'high part')
3092 of each unit. */
8521c414
JM
3093 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3094 < GET_MODE_NUNITS (xmode))
3095 && (offset / GET_MODE_SIZE (xmode_unit)
dd79bb7e 3096 != ((offset + GET_MODE_SIZE (ymode) - 1)
8521c414 3097 / GET_MODE_SIZE (xmode_unit))))
f1f4e530
JM
3098 {
3099 info->representable_p = false;
3100 rknown = true;
3101 }
dd79bb7e
GK
3102 }
3103 else
3104 nregs_xmode = hard_regno_nregs[xregno][xmode];
3105
66fd46b6 3106 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3107
dd79bb7e 3108 /* Paradoxical subregs are otherwise valid. */
f1f4e530
JM
3109 if (!rknown
3110 && offset == 0
3111 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3112 {
3113 info->representable_p = true;
3114 /* If this is a big endian paradoxical subreg, which uses more
3115 actual hard registers than the original register, we must
3116 return a negative offset so that we find the proper highpart
3117 of the register. */
3118 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3119 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3120 info->offset = nregs_xmode - nregs_ymode;
3121 else
3122 info->offset = 0;
3123 info->nregs = nregs_ymode;
3124 return;
3125 }
04c5580f 3126
8521c414
JM
3127 /* If registers store different numbers of bits in the different
3128 modes, we cannot generally form this subreg. */
f1f4e530 3129 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
5f7fc2b8
JM
3130 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3131 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3132 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
f1f4e530
JM
3133 {
3134 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
f1f4e530 3135 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
f1f4e530
JM
3136 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3137 {
3138 info->representable_p = false;
3139 info->nregs
3140 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3141 info->offset = offset / regsize_xmode;
3142 return;
3143 }
3144 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3145 {
3146 info->representable_p = false;
3147 info->nregs
3148 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3149 info->offset = offset / regsize_xmode;
3150 return;
3151 }
3152 }
8521c414 3153
dd79bb7e 3154 /* Lowpart subregs are otherwise valid. */
f1f4e530
JM
3155 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3156 {
3157 info->representable_p = true;
3158 rknown = true;
a446b4e8
JM
3159
3160 if (offset == 0 || nregs_xmode == nregs_ymode)
3161 {
3162 info->offset = 0;
3163 info->nregs = nregs_ymode;
3164 return;
3165 }
f1f4e530 3166 }
04c5580f 3167
dd79bb7e
GK
3168 /* This should always pass, otherwise we don't know how to verify
3169 the constraint. These conditions may be relaxed but
3170 subreg_regno_offset would need to be redesigned. */
41374e13 3171 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
41374e13 3172 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
04c5580f 3173
b20b352b 3174 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3175 values. The subreg must represent a lowpart of given field.
04c5580f 3176 Compute what field it is. */
f1f4e530
JM
3177 offset_adj = offset;
3178 offset_adj -= subreg_lowpart_offset (ymode,
3179 mode_for_size (GET_MODE_BITSIZE (xmode)
3180 / nregs_xmode,
3181 MODE_INT, 0));
04c5580f 3182
dd79bb7e 3183 /* Size of ymode must not be greater than the size of xmode. */
04c5580f 3184 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 3185 gcc_assert (mode_multiple != 0);
04c5580f
JH
3186
3187 y_offset = offset / GET_MODE_SIZE (ymode);
f1f4e530
JM
3188 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3189 nregs_multiple = nregs_xmode / nregs_ymode;
41374e13 3190
f1f4e530 3191 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
41374e13
NS
3192 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3193
f1f4e530
JM
3194 if (!rknown)
3195 {
3196 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3197 rknown = true;
3198 }
3199 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3200 info->nregs = nregs_ymode;
3201}
3202
3203/* This function returns the regno offset of a subreg expression.
3204 xregno - A regno of an inner hard subreg_reg (or what will become one).
3205 xmode - The mode of xregno.
3206 offset - The byte offset.
3207 ymode - The mode of a top level SUBREG (or what may become one).
3208 RETURN - The regno offset which would be used. */
3209unsigned int
3210subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3211 unsigned int offset, enum machine_mode ymode)
3212{
3213 struct subreg_info info;
3214 subreg_get_info (xregno, xmode, offset, ymode, &info);
3215 return info.offset;
3216}
3217
3218/* This function returns true when the offset is representable via
3219 subreg_offset in the given regno.
3220 xregno - A regno of an inner hard subreg_reg (or what will become one).
3221 xmode - The mode of xregno.
3222 offset - The byte offset.
3223 ymode - The mode of a top level SUBREG (or what may become one).
3224 RETURN - Whether the offset is representable. */
3225bool
3226subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3227 unsigned int offset, enum machine_mode ymode)
3228{
3229 struct subreg_info info;
3230 subreg_get_info (xregno, xmode, offset, ymode, &info);
3231 return info.representable_p;
04c5580f
JH
3232}
3233
dc297297 3234/* Return the final regno that a subreg expression refers to. */
a6a2274a 3235unsigned int
0c20a65f 3236subreg_regno (rtx x)
ddef6bc7
JJ
3237{
3238 unsigned int ret;
3239 rtx subreg = SUBREG_REG (x);
3240 int regno = REGNO (subreg);
3241
a6a2274a
KH
3242 ret = regno + subreg_regno_offset (regno,
3243 GET_MODE (subreg),
ddef6bc7
JJ
3244 SUBREG_BYTE (x),
3245 GET_MODE (x));
3246 return ret;
3247
3248}
f1f4e530
JM
3249
3250/* Return the number of registers that a subreg expression refers
3251 to. */
3252unsigned int
3253subreg_nregs (rtx x)
3254{
3255 struct subreg_info info;
3256 rtx subreg = SUBREG_REG (x);
3257 int regno = REGNO (subreg);
3258
3259 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3260 &info);
3261 return info.nregs;
3262}
3263
833366d6
JH
3264struct parms_set_data
3265{
3266 int nregs;
3267 HARD_REG_SET regs;
3268};
3269
3270/* Helper function for noticing stores to parameter registers. */
3271static void
0c20a65f 3272parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6
JH
3273{
3274 struct parms_set_data *d = data;
3275 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3276 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3277 {
3278 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3279 d->nregs--;
3280 }
3281}
3282
a6a2274a 3283/* Look backward for first parameter to be loaded.
b2df20b4
DJ
3284 Note that loads of all parameters will not necessarily be
3285 found if CSE has eliminated some of them (e.g., an argument
3286 to the outer function is passed down as a parameter).
833366d6
JH
3287 Do not skip BOUNDARY. */
3288rtx
0c20a65f 3289find_first_parameter_load (rtx call_insn, rtx boundary)
833366d6
JH
3290{
3291 struct parms_set_data parm;
b2df20b4 3292 rtx p, before, first_set;
833366d6
JH
3293
3294 /* Since different machines initialize their parameter registers
3295 in different orders, assume nothing. Collect the set of all
3296 parameter registers. */
3297 CLEAR_HARD_REG_SET (parm.regs);
3298 parm.nregs = 0;
3299 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3300 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3301 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6 3302 {
41374e13 3303 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
833366d6
JH
3304
3305 /* We only care about registers which can hold function
3306 arguments. */
3307 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3308 continue;
3309
3310 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3311 parm.nregs++;
3312 }
3313 before = call_insn;
b2df20b4 3314 first_set = call_insn;
833366d6
JH
3315
3316 /* Search backward for the first set of a register in this set. */
3317 while (parm.nregs && before != boundary)
3318 {
3319 before = PREV_INSN (before);
3320
3321 /* It is possible that some loads got CSEed from one call to
3322 another. Stop in that case. */
4b4bf941 3323 if (CALL_P (before))
833366d6
JH
3324 break;
3325
dbc1a163 3326 /* Our caller needs either ensure that we will find all sets
833366d6 3327 (in case code has not been optimized yet), or take care
eaec9b3d 3328 for possible labels in a way by setting boundary to preceding
833366d6 3329 CODE_LABEL. */
4b4bf941 3330 if (LABEL_P (before))
dbc1a163 3331 {
41374e13 3332 gcc_assert (before == boundary);
dbc1a163
RH
3333 break;
3334 }
833366d6 3335
0d025d43 3336 if (INSN_P (before))
b2df20b4
DJ
3337 {
3338 int nregs_old = parm.nregs;
3339 note_stores (PATTERN (before), parms_set, &parm);
3340 /* If we found something that did not set a parameter reg,
3341 we're done. Do not keep going, as that might result
3342 in hoisting an insn before the setting of a pseudo
3343 that is used by the hoisted insn. */
3344 if (nregs_old != parm.nregs)
3345 first_set = before;
3346 else
3347 break;
3348 }
833366d6 3349 }
b2df20b4 3350 return first_set;
833366d6 3351}
3dec4024 3352
14b493d6 3353/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3354 call instruction. */
3355
3356bool
0c20a65f 3357keep_with_call_p (rtx insn)
3dec4024
JH
3358{
3359 rtx set;
3360
3361 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3362 {
f8cfc6aa 3363 if (REG_P (SET_DEST (set))
5df533b3 3364 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3365 && fixed_regs[REGNO (SET_DEST (set))]
3366 && general_operand (SET_SRC (set), VOIDmode))
3367 return true;
f8cfc6aa 3368 if (REG_P (SET_SRC (set))
3dec4024 3369 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
f8cfc6aa 3370 && REG_P (SET_DEST (set))
3dec4024
JH
3371 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3372 return true;
bc204393
RH
3373 /* There may be a stack pop just after the call and before the store
3374 of the return register. Search for the actual store when deciding
3375 if we can break or not. */
3dec4024
JH
3376 if (SET_DEST (set) == stack_pointer_rtx)
3377 {
3378 rtx i2 = next_nonnote_insn (insn);
bc204393 3379 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3380 return true;
3381 }
3382 }
3383 return false;
3384}
71d2c5bd 3385
432f982f
JH
3386/* Return true if LABEL is a target of JUMP_INSN. This applies only
3387 to non-complex jumps. That is, direct unconditional, conditional,
3388 and tablejumps, but not computed jumps or returns. It also does
3389 not apply to the fallthru case of a conditional jump. */
3390
3391bool
3392label_is_jump_target_p (rtx label, rtx jump_insn)
3393{
3394 rtx tmp = JUMP_LABEL (jump_insn);
3395
3396 if (label == tmp)
3397 return true;
3398
3399 if (tablejump_p (jump_insn, NULL, &tmp))
3400 {
3401 rtvec vec = XVEC (PATTERN (tmp),
3402 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3403 int i, veclen = GET_NUM_ELEM (vec);
3404
3405 for (i = 0; i < veclen; ++i)
3406 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3407 return true;
3408 }
3409
3410 return false;
3411}
3412
f894b69b
PB
3413\f
3414/* Return an estimate of the cost of computing rtx X.
3415 One use is in cse, to decide which expression to keep in the hash table.
3416 Another is in rtl generation, to pick the cheapest way to multiply.
3417 Other uses like the latter are expected in the future. */
3418
3419int
3420rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3421{
3422 int i, j;
3423 enum rtx_code code;
3424 const char *fmt;
3425 int total;
3426
3427 if (x == 0)
3428 return 0;
3429
3430 /* Compute the default costs of certain things.
3431 Note that targetm.rtx_costs can override the defaults. */
3432
3433 code = GET_CODE (x);
3434 switch (code)
3435 {
3436 case MULT:
3437 total = COSTS_N_INSNS (5);
3438 break;
3439 case DIV:
3440 case UDIV:
3441 case MOD:
3442 case UMOD:
3443 total = COSTS_N_INSNS (7);
3444 break;
3445 case USE:
db3edc20 3446 /* Used in combine.c as a marker. */
f894b69b
PB
3447 total = 0;
3448 break;
3449 default:
3450 total = COSTS_N_INSNS (1);
3451 }
3452
3453 switch (code)
3454 {
3455 case REG:
3456 return 0;
3457
3458 case SUBREG:
edb81165 3459 total = 0;
f894b69b
PB
3460 /* If we can't tie these modes, make this expensive. The larger
3461 the mode, the more expensive it is. */
3462 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3463 return COSTS_N_INSNS (2
3464 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3465 break;
3466
3467 default:
5fd9b178 3468 if (targetm.rtx_costs (x, code, outer_code, &total))
f894b69b
PB
3469 return total;
3470 break;
3471 }
3472
3473 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3474 which is already in total. */
3475
3476 fmt = GET_RTX_FORMAT (code);
3477 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3478 if (fmt[i] == 'e')
3479 total += rtx_cost (XEXP (x, i), code);
3480 else if (fmt[i] == 'E')
3481 for (j = 0; j < XVECLEN (x, i); j++)
3482 total += rtx_cost (XVECEXP (x, i, j), code);
3483
3484 return total;
3485}
3486\f
3487/* Return cost of address expression X.
3488 Expect that X is properly formed address reference. */
3489
3490int
3491address_cost (rtx x, enum machine_mode mode)
3492{
f894b69b
PB
3493 /* We may be asked for cost of various unusual addresses, such as operands
3494 of push instruction. It is not worthwhile to complicate writing
3495 of the target hook by such cases. */
3496
3497 if (!memory_address_p (mode, x))
3498 return 1000;
3499
5fd9b178 3500 return targetm.address_cost (x);
f894b69b
PB
3501}
3502
3503/* If the target doesn't override, compute the cost as with arithmetic. */
3504
3505int
3506default_address_cost (rtx x)
3507{
3508 return rtx_cost (x, MEM);
3509}
2f93eea8
PB
3510\f
3511
3512unsigned HOST_WIDE_INT
3513nonzero_bits (rtx x, enum machine_mode mode)
3514{
3515 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3516}
3517
3518unsigned int
3519num_sign_bit_copies (rtx x, enum machine_mode mode)
3520{
3521 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3522}
3523
3524/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3525 It avoids exponential behavior in nonzero_bits1 when X has
3526 identical subexpressions on the first or the second level. */
3527
3528static unsigned HOST_WIDE_INT
3529cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3530 enum machine_mode known_mode,
3531 unsigned HOST_WIDE_INT known_ret)
3532{
3533 if (x == known_x && mode == known_mode)
3534 return known_ret;
3535
3536 /* Try to find identical subexpressions. If found call
3537 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3538 precomputed value for the subexpression as KNOWN_RET. */
3539
3540 if (ARITHMETIC_P (x))
3541 {
3542 rtx x0 = XEXP (x, 0);
3543 rtx x1 = XEXP (x, 1);
3544
3545 /* Check the first level. */
3546 if (x0 == x1)
3547 return nonzero_bits1 (x, mode, x0, mode,
3548 cached_nonzero_bits (x0, mode, known_x,
3549 known_mode, known_ret));
3550
3551 /* Check the second level. */
3552 if (ARITHMETIC_P (x0)
3553 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3554 return nonzero_bits1 (x, mode, x1, mode,
3555 cached_nonzero_bits (x1, mode, known_x,
3556 known_mode, known_ret));
3557
3558 if (ARITHMETIC_P (x1)
3559 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3560 return nonzero_bits1 (x, mode, x0, mode,
3561 cached_nonzero_bits (x0, mode, known_x,
3562 known_mode, known_ret));
3563 }
3564
3565 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3566}
3567
3568/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3569 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3570 is less useful. We can't allow both, because that results in exponential
3571 run time recursion. There is a nullstone testcase that triggered
3572 this. This macro avoids accidental uses of num_sign_bit_copies. */
3573#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3574
3575/* Given an expression, X, compute which bits in X can be nonzero.
3576 We don't care about bits outside of those defined in MODE.
3577
3578 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3579 an arithmetic operation, we can do better. */
3580
3581static unsigned HOST_WIDE_INT
3582nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3583 enum machine_mode known_mode,
3584 unsigned HOST_WIDE_INT known_ret)
3585{
3586 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3587 unsigned HOST_WIDE_INT inner_nz;
3588 enum rtx_code code;
3589 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3590
3591 /* For floating-point values, assume all bits are needed. */
3592 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3593 return nonzero;
3594
3595 /* If X is wider than MODE, use its mode instead. */
3596 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3597 {
3598 mode = GET_MODE (x);
3599 nonzero = GET_MODE_MASK (mode);
3600 mode_width = GET_MODE_BITSIZE (mode);
3601 }
3602
3603 if (mode_width > HOST_BITS_PER_WIDE_INT)
3604 /* Our only callers in this case look for single bit values. So
3605 just return the mode mask. Those tests will then be false. */
3606 return nonzero;
3607
3608#ifndef WORD_REGISTER_OPERATIONS
3609 /* If MODE is wider than X, but both are a single word for both the host
3610 and target machines, we can compute this from which bits of the
3611 object might be nonzero in its own mode, taking into account the fact
3612 that on many CISC machines, accessing an object in a wider mode
3613 causes the high-order bits to become undefined. So they are
3614 not known to be zero. */
3615
3616 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3617 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3618 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3619 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3620 {
3621 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3622 known_x, known_mode, known_ret);
3623 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3624 return nonzero;
3625 }
3626#endif
3627
3628 code = GET_CODE (x);
3629 switch (code)
3630 {
3631 case REG:
3632#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3633 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3634 all the bits above ptr_mode are known to be zero. */
3635 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3636 && REG_POINTER (x))
3637 nonzero &= GET_MODE_MASK (ptr_mode);
3638#endif
3639
3640 /* Include declared information about alignment of pointers. */
3641 /* ??? We don't properly preserve REG_POINTER changes across
3642 pointer-to-integer casts, so we can't trust it except for
3643 things that we know must be pointers. See execute/960116-1.c. */
3644 if ((x == stack_pointer_rtx
3645 || x == frame_pointer_rtx
3646 || x == arg_pointer_rtx)
3647 && REGNO_POINTER_ALIGN (REGNO (x)))
3648 {
3649 unsigned HOST_WIDE_INT alignment
3650 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3651
3652#ifdef PUSH_ROUNDING
3653 /* If PUSH_ROUNDING is defined, it is possible for the
3654 stack to be momentarily aligned only to that amount,
3655 so we pick the least alignment. */
3656 if (x == stack_pointer_rtx && PUSH_ARGS)
3657 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3658 alignment);
3659#endif
3660
3661 nonzero &= ~(alignment - 1);
3662 }
3663
3664 {
3665 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3666 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3667 known_mode, known_ret,
3668 &nonzero_for_hook);
3669
3670 if (new)
3671 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3672 known_mode, known_ret);
3673
3674 return nonzero_for_hook;
3675 }
3676
3677 case CONST_INT:
3678#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3679 /* If X is negative in MODE, sign-extend the value. */
3680 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3681 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3682 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3683#endif
3684
3685 return INTVAL (x);
3686
3687 case MEM:
3688#ifdef LOAD_EXTEND_OP
3689 /* In many, if not most, RISC machines, reading a byte from memory
3690 zeros the rest of the register. Noticing that fact saves a lot
3691 of extra zero-extends. */
3692 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3693 nonzero &= GET_MODE_MASK (GET_MODE (x));
3694#endif
3695 break;
3696
3697 case EQ: case NE:
3698 case UNEQ: case LTGT:
3699 case GT: case GTU: case UNGT:
3700 case LT: case LTU: case UNLT:
3701 case GE: case GEU: case UNGE:
3702 case LE: case LEU: case UNLE:
3703 case UNORDERED: case ORDERED:
2f93eea8
PB
3704 /* If this produces an integer result, we know which bits are set.
3705 Code here used to clear bits outside the mode of X, but that is
3706 now done above. */
505ac507
RH
3707 /* Mind that MODE is the mode the caller wants to look at this
3708 operation in, and not the actual operation mode. We can wind
3709 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3710 that describes the results of a vector compare. */
3711 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2f93eea8
PB
3712 && mode_width <= HOST_BITS_PER_WIDE_INT)
3713 nonzero = STORE_FLAG_VALUE;
3714 break;
3715
3716 case NEG:
3717#if 0
3718 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3719 and num_sign_bit_copies. */
3720 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3721 == GET_MODE_BITSIZE (GET_MODE (x)))
3722 nonzero = 1;
3723#endif
3724
3725 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3726 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3727 break;
3728
3729 case ABS:
3730#if 0
3731 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3732 and num_sign_bit_copies. */
3733 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3734 == GET_MODE_BITSIZE (GET_MODE (x)))
3735 nonzero = 1;
3736#endif
3737 break;
3738
3739 case TRUNCATE:
3740 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3741 known_x, known_mode, known_ret)
3742 & GET_MODE_MASK (mode));
3743 break;
3744
3745 case ZERO_EXTEND:
3746 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3747 known_x, known_mode, known_ret);
3748 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3749 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3750 break;
3751
3752 case SIGN_EXTEND:
3753 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3754 Otherwise, show all the bits in the outer mode but not the inner
3755 may be nonzero. */
3756 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3757 known_x, known_mode, known_ret);
3758 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3759 {
3760 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3761 if (inner_nz
3762 & (((HOST_WIDE_INT) 1
3763 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3764 inner_nz |= (GET_MODE_MASK (mode)
3765 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3766 }
3767
3768 nonzero &= inner_nz;
3769 break;
3770
3771 case AND:
3772 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3773 known_x, known_mode, known_ret)
3774 & cached_nonzero_bits (XEXP (x, 1), mode,
3775 known_x, known_mode, known_ret);
3776 break;
3777
3778 case XOR: case IOR:
3779 case UMIN: case UMAX: case SMIN: case SMAX:
3780 {
3781 unsigned HOST_WIDE_INT nonzero0 =
3782 cached_nonzero_bits (XEXP (x, 0), mode,
3783 known_x, known_mode, known_ret);
3784
3785 /* Don't call nonzero_bits for the second time if it cannot change
3786 anything. */
3787 if ((nonzero & nonzero0) != nonzero)
3788 nonzero &= nonzero0
3789 | cached_nonzero_bits (XEXP (x, 1), mode,
3790 known_x, known_mode, known_ret);
3791 }
3792 break;
3793
3794 case PLUS: case MINUS:
3795 case MULT:
3796 case DIV: case UDIV:
3797 case MOD: case UMOD:
3798 /* We can apply the rules of arithmetic to compute the number of
3799 high- and low-order zero bits of these operations. We start by
3800 computing the width (position of the highest-order nonzero bit)
3801 and the number of low-order zero bits for each value. */
3802 {
3803 unsigned HOST_WIDE_INT nz0 =
3804 cached_nonzero_bits (XEXP (x, 0), mode,
3805 known_x, known_mode, known_ret);
3806 unsigned HOST_WIDE_INT nz1 =
3807 cached_nonzero_bits (XEXP (x, 1), mode,
3808 known_x, known_mode, known_ret);
3809 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3810 int width0 = floor_log2 (nz0) + 1;
3811 int width1 = floor_log2 (nz1) + 1;
3812 int low0 = floor_log2 (nz0 & -nz0);
3813 int low1 = floor_log2 (nz1 & -nz1);
3814 HOST_WIDE_INT op0_maybe_minusp
3815 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3816 HOST_WIDE_INT op1_maybe_minusp
3817 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3818 unsigned int result_width = mode_width;
3819 int result_low = 0;
3820
3821 switch (code)
3822 {
3823 case PLUS:
3824 result_width = MAX (width0, width1) + 1;
3825 result_low = MIN (low0, low1);
3826 break;
3827 case MINUS:
3828 result_low = MIN (low0, low1);
3829 break;
3830 case MULT:
3831 result_width = width0 + width1;
3832 result_low = low0 + low1;
3833 break;
3834 case DIV:
3835 if (width1 == 0)
3836 break;
3837 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3838 result_width = width0;
3839 break;
3840 case UDIV:
3841 if (width1 == 0)
3842 break;
3843 result_width = width0;
3844 break;
3845 case MOD:
3846 if (width1 == 0)
3847 break;
3848 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3849 result_width = MIN (width0, width1);
3850 result_low = MIN (low0, low1);
3851 break;
3852 case UMOD:
3853 if (width1 == 0)
3854 break;
3855 result_width = MIN (width0, width1);
3856 result_low = MIN (low0, low1);
3857 break;
3858 default:
41374e13 3859 gcc_unreachable ();
2f93eea8
PB
3860 }
3861
3862 if (result_width < mode_width)
3863 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3864
3865 if (result_low > 0)
3866 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3867
3868#ifdef POINTERS_EXTEND_UNSIGNED
3869 /* If pointers extend unsigned and this is an addition or subtraction
3870 to a pointer in Pmode, all the bits above ptr_mode are known to be
3871 zero. */
3872 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3873 && (code == PLUS || code == MINUS)
f8cfc6aa 3874 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
3875 nonzero &= GET_MODE_MASK (ptr_mode);
3876#endif
3877 }
3878 break;
3879
3880 case ZERO_EXTRACT:
3881 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3882 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3883 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3884 break;
3885
3886 case SUBREG:
3887 /* If this is a SUBREG formed for a promoted variable that has
3888 been zero-extended, we know that at least the high-order bits
3889 are zero, though others might be too. */
3890
3891 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3892 nonzero = GET_MODE_MASK (GET_MODE (x))
3893 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3894 known_x, known_mode, known_ret);
3895
3896 /* If the inner mode is a single word for both the host and target
3897 machines, we can compute this from which bits of the inner
3898 object might be nonzero. */
3899 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3900 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3901 <= HOST_BITS_PER_WIDE_INT))
3902 {
3903 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3904 known_x, known_mode, known_ret);
3905
3906#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3907 /* If this is a typical RISC machine, we only have to worry
3908 about the way loads are extended. */
3909 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3910 ? (((nonzero
3911 & (((unsigned HOST_WIDE_INT) 1
3912 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3913 != 0))
3914 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3c0cb5de 3915 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
3916#endif
3917 {
3918 /* On many CISC machines, accessing an object in a wider mode
3919 causes the high-order bits to become undefined. So they are
3920 not known to be zero. */
3921 if (GET_MODE_SIZE (GET_MODE (x))
3922 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3923 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3924 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3925 }
3926 }
3927 break;
3928
3929 case ASHIFTRT:
3930 case LSHIFTRT:
3931 case ASHIFT:
3932 case ROTATE:
3933 /* The nonzero bits are in two classes: any bits within MODE
3934 that aren't in GET_MODE (x) are always significant. The rest of the
3935 nonzero bits are those that are significant in the operand of
3936 the shift when shifted the appropriate number of bits. This
3937 shows that high-order bits are cleared by the right shift and
3938 low-order bits by left shifts. */
3939 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3940 && INTVAL (XEXP (x, 1)) >= 0
3941 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3942 {
3943 enum machine_mode inner_mode = GET_MODE (x);
3944 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3945 int count = INTVAL (XEXP (x, 1));
3946 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3947 unsigned HOST_WIDE_INT op_nonzero =
3948 cached_nonzero_bits (XEXP (x, 0), mode,
3949 known_x, known_mode, known_ret);
3950 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3951 unsigned HOST_WIDE_INT outer = 0;
3952
3953 if (mode_width > width)
3954 outer = (op_nonzero & nonzero & ~mode_mask);
3955
3956 if (code == LSHIFTRT)
3957 inner >>= count;
3958 else if (code == ASHIFTRT)
3959 {
3960 inner >>= count;
3961
3962 /* If the sign bit may have been nonzero before the shift, we
3963 need to mark all the places it could have been copied to
3964 by the shift as possibly nonzero. */
3965 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3966 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3967 }
3968 else if (code == ASHIFT)
3969 inner <<= count;
3970 else
3971 inner = ((inner << (count % width)
3972 | (inner >> (width - (count % width)))) & mode_mask);
3973
3974 nonzero &= (outer | inner);
3975 }
3976 break;
3977
3978 case FFS:
3979 case POPCOUNT:
3980 /* This is at most the number of bits in the mode. */
3981 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3982 break;
3983
3984 case CLZ:
3985 /* If CLZ has a known value at zero, then the nonzero bits are
3986 that value, plus the number of bits in the mode minus one. */
3987 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3988 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3989 else
3990 nonzero = -1;
3991 break;
3992
3993 case CTZ:
3994 /* If CTZ has a known value at zero, then the nonzero bits are
3995 that value, plus the number of bits in the mode minus one. */
3996 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3997 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3998 else
3999 nonzero = -1;
4000 break;
4001
4002 case PARITY:
4003 nonzero = 1;
4004 break;
4005
4006 case IF_THEN_ELSE:
4007 {
4008 unsigned HOST_WIDE_INT nonzero_true =
4009 cached_nonzero_bits (XEXP (x, 1), mode,
4010 known_x, known_mode, known_ret);
4011
4012 /* Don't call nonzero_bits for the second time if it cannot change
4013 anything. */
4014 if ((nonzero & nonzero_true) != nonzero)
4015 nonzero &= nonzero_true
4016 | cached_nonzero_bits (XEXP (x, 2), mode,
4017 known_x, known_mode, known_ret);
4018 }
4019 break;
4020
4021 default:
4022 break;
4023 }
4024
4025 return nonzero;
4026}
4027
4028/* See the macro definition above. */
4029#undef cached_num_sign_bit_copies
4030
4031\f
4032/* The function cached_num_sign_bit_copies is a wrapper around
4033 num_sign_bit_copies1. It avoids exponential behavior in
4034 num_sign_bit_copies1 when X has identical subexpressions on the
4035 first or the second level. */
4036
4037static unsigned int
4038cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4039 enum machine_mode known_mode,
4040 unsigned int known_ret)
4041{
4042 if (x == known_x && mode == known_mode)
4043 return known_ret;
4044
4045 /* Try to find identical subexpressions. If found call
4046 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4047 the precomputed value for the subexpression as KNOWN_RET. */
4048
4049 if (ARITHMETIC_P (x))
4050 {
4051 rtx x0 = XEXP (x, 0);
4052 rtx x1 = XEXP (x, 1);
4053
4054 /* Check the first level. */
4055 if (x0 == x1)
4056 return
4057 num_sign_bit_copies1 (x, mode, x0, mode,
4058 cached_num_sign_bit_copies (x0, mode, known_x,
4059 known_mode,
4060 known_ret));
4061
4062 /* Check the second level. */
4063 if (ARITHMETIC_P (x0)
4064 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4065 return
4066 num_sign_bit_copies1 (x, mode, x1, mode,
4067 cached_num_sign_bit_copies (x1, mode, known_x,
4068 known_mode,
4069 known_ret));
4070
4071 if (ARITHMETIC_P (x1)
4072 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4073 return
4074 num_sign_bit_copies1 (x, mode, x0, mode,
4075 cached_num_sign_bit_copies (x0, mode, known_x,
4076 known_mode,
4077 known_ret));
4078 }
4079
4080 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4081}
4082
4083/* Return the number of bits at the high-order end of X that are known to
4084 be equal to the sign bit. X will be used in mode MODE; if MODE is
4085 VOIDmode, X will be used in its own mode. The returned value will always
4086 be between 1 and the number of bits in MODE. */
4087
4088static unsigned int
4089num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4090 enum machine_mode known_mode,
4091 unsigned int known_ret)
4092{
4093 enum rtx_code code = GET_CODE (x);
4094 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4095 int num0, num1, result;
4096 unsigned HOST_WIDE_INT nonzero;
4097
4098 /* If we weren't given a mode, use the mode of X. If the mode is still
4099 VOIDmode, we don't know anything. Likewise if one of the modes is
4100 floating-point. */
4101
4102 if (mode == VOIDmode)
4103 mode = GET_MODE (x);
4104
4105 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4106 return 1;
4107
4108 /* For a smaller object, just ignore the high bits. */
4109 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4110 {
4111 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4112 known_x, known_mode, known_ret);
4113 return MAX (1,
4114 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4115 }
4116
4117 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4118 {
4119#ifndef WORD_REGISTER_OPERATIONS
4120 /* If this machine does not do all register operations on the entire
4121 register and MODE is wider than the mode of X, we can say nothing
4122 at all about the high-order bits. */
4123 return 1;
4124#else
4125 /* Likewise on machines that do, if the mode of the object is smaller
4126 than a word and loads of that size don't sign extend, we can say
4127 nothing about the high order bits. */
4128 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4129#ifdef LOAD_EXTEND_OP
4130 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4131#endif
4132 )
4133 return 1;
4134#endif
4135 }
4136
4137 switch (code)
4138 {
4139 case REG:
4140
4141#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4142 /* If pointers extend signed and this is a pointer in Pmode, say that
4143 all the bits above ptr_mode are known to be sign bit copies. */
4144 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4145 && REG_POINTER (x))
4146 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4147#endif
4148
4149 {
4150 unsigned int copies_for_hook = 1, copies = 1;
4151 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4152 known_mode, known_ret,
4153 &copies_for_hook);
4154
4155 if (new)
4156 copies = cached_num_sign_bit_copies (new, mode, known_x,
4157 known_mode, known_ret);
4158
4159 if (copies > 1 || copies_for_hook > 1)
4160 return MAX (copies, copies_for_hook);
4161
4162 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4163 }
4164 break;
4165
4166 case MEM:
4167#ifdef LOAD_EXTEND_OP
4168 /* Some RISC machines sign-extend all loads of smaller than a word. */
4169 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4170 return MAX (1, ((int) bitwidth
4171 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4172#endif
4173 break;
4174
4175 case CONST_INT:
4176 /* If the constant is negative, take its 1's complement and remask.
4177 Then see how many zero bits we have. */
4178 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4179 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4180 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4181 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4182
4183 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4184
4185 case SUBREG:
4186 /* If this is a SUBREG for a promoted object that is sign-extended
4187 and we are looking at it in a wider mode, we know that at least the
4188 high-order bits are known to be sign bit copies. */
4189
4190 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4191 {
4192 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4193 known_x, known_mode, known_ret);
4194 return MAX ((int) bitwidth
4195 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4196 num0);
4197 }
4198
4199 /* For a smaller object, just ignore the high bits. */
4200 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4201 {
4202 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4203 known_x, known_mode, known_ret);
4204 return MAX (1, (num0
4205 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4206 - bitwidth)));
4207 }
4208
4209#ifdef WORD_REGISTER_OPERATIONS
4210#ifdef LOAD_EXTEND_OP
4211 /* For paradoxical SUBREGs on machines where all register operations
4212 affect the entire register, just look inside. Note that we are
4213 passing MODE to the recursive call, so the number of sign bit copies
4214 will remain relative to that mode, not the inner mode. */
4215
4216 /* This works only if loads sign extend. Otherwise, if we get a
4217 reload for the inner part, it may be loaded from the stack, and
4218 then we lose all sign bit copies that existed before the store
4219 to the stack. */
4220
4221 if ((GET_MODE_SIZE (GET_MODE (x))
4222 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4223 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4224 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4225 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4226 known_x, known_mode, known_ret);
4227#endif
4228#endif
4229 break;
4230
4231 case SIGN_EXTRACT:
4232 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4233 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4234 break;
4235
4236 case SIGN_EXTEND:
4237 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4238 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4239 known_x, known_mode, known_ret));
4240
4241 case TRUNCATE:
4242 /* For a smaller object, just ignore the high bits. */
4243 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4244 known_x, known_mode, known_ret);
4245 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4246 - bitwidth)));
4247
4248 case NOT:
4249 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4250 known_x, known_mode, known_ret);
4251
4252 case ROTATE: case ROTATERT:
4253 /* If we are rotating left by a number of bits less than the number
4254 of sign bit copies, we can just subtract that amount from the
4255 number. */
4256 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4257 && INTVAL (XEXP (x, 1)) >= 0
4258 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4259 {
4260 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4261 known_x, known_mode, known_ret);
4262 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4263 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4264 }
4265 break;
4266
4267 case NEG:
4268 /* In general, this subtracts one sign bit copy. But if the value
4269 is known to be positive, the number of sign bit copies is the
4270 same as that of the input. Finally, if the input has just one bit
4271 that might be nonzero, all the bits are copies of the sign bit. */
4272 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4273 known_x, known_mode, known_ret);
4274 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4275 return num0 > 1 ? num0 - 1 : 1;
4276
4277 nonzero = nonzero_bits (XEXP (x, 0), mode);
4278 if (nonzero == 1)
4279 return bitwidth;
4280
4281 if (num0 > 1
4282 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4283 num0--;
4284
4285 return num0;
4286
4287 case IOR: case AND: case XOR:
4288 case SMIN: case SMAX: case UMIN: case UMAX:
4289 /* Logical operations will preserve the number of sign-bit copies.
4290 MIN and MAX operations always return one of the operands. */
4291 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4292 known_x, known_mode, known_ret);
4293 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4294 known_x, known_mode, known_ret);
22761ec3
AN
4295
4296 /* If num1 is clearing some of the top bits then regardless of
4297 the other term, we are guaranteed to have at least that many
4298 high-order zero bits. */
4299 if (code == AND
4300 && num1 > 1
4301 && bitwidth <= HOST_BITS_PER_WIDE_INT
4302 && GET_CODE (XEXP (x, 1)) == CONST_INT
4303 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4304 return num1;
4305
4306 /* Similarly for IOR when setting high-order bits. */
4307 if (code == IOR
4308 && num1 > 1
4309 && bitwidth <= HOST_BITS_PER_WIDE_INT
4310 && GET_CODE (XEXP (x, 1)) == CONST_INT
4311 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4312 return num1;
4313
2f93eea8
PB
4314 return MIN (num0, num1);
4315
4316 case PLUS: case MINUS:
4317 /* For addition and subtraction, we can have a 1-bit carry. However,
4318 if we are subtracting 1 from a positive number, there will not
4319 be such a carry. Furthermore, if the positive number is known to
4320 be 0 or 1, we know the result is either -1 or 0. */
4321
4322 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4323 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4324 {
4325 nonzero = nonzero_bits (XEXP (x, 0), mode);
4326 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4327 return (nonzero == 1 || nonzero == 0 ? bitwidth
4328 : bitwidth - floor_log2 (nonzero) - 1);
4329 }
4330
4331 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4332 known_x, known_mode, known_ret);
4333 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4334 known_x, known_mode, known_ret);
4335 result = MAX (1, MIN (num0, num1) - 1);
4336
4337#ifdef POINTERS_EXTEND_UNSIGNED
4338 /* If pointers extend signed and this is an addition or subtraction
4339 to a pointer in Pmode, all the bits above ptr_mode are known to be
4340 sign bit copies. */
4341 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4342 && (code == PLUS || code == MINUS)
f8cfc6aa 4343 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
4344 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4345 - GET_MODE_BITSIZE (ptr_mode) + 1),
4346 result);
4347#endif
4348 return result;
4349
4350 case MULT:
4351 /* The number of bits of the product is the sum of the number of
4352 bits of both terms. However, unless one of the terms if known
4353 to be positive, we must allow for an additional bit since negating
4354 a negative number can remove one sign bit copy. */
4355
4356 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4357 known_x, known_mode, known_ret);
4358 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4359 known_x, known_mode, known_ret);
4360
4361 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4362 if (result > 0
4363 && (bitwidth > HOST_BITS_PER_WIDE_INT
4364 || (((nonzero_bits (XEXP (x, 0), mode)
4365 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4366 && ((nonzero_bits (XEXP (x, 1), mode)
4367 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4368 result--;
4369
4370 return MAX (1, result);
4371
4372 case UDIV:
4373 /* The result must be <= the first operand. If the first operand
4374 has the high bit set, we know nothing about the number of sign
4375 bit copies. */
4376 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4377 return 1;
4378 else if ((nonzero_bits (XEXP (x, 0), mode)
4379 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4380 return 1;
4381 else
4382 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4383 known_x, known_mode, known_ret);
4384
4385 case UMOD:
4386 /* The result must be <= the second operand. */
4387 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4388 known_x, known_mode, known_ret);
4389
4390 case DIV:
4391 /* Similar to unsigned division, except that we have to worry about
4392 the case where the divisor is negative, in which case we have
4393 to add 1. */
4394 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4395 known_x, known_mode, known_ret);
4396 if (result > 1
4397 && (bitwidth > HOST_BITS_PER_WIDE_INT
4398 || (nonzero_bits (XEXP (x, 1), mode)
4399 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4400 result--;
4401
4402 return result;
4403
4404 case MOD:
4405 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4406 known_x, known_mode, known_ret);
4407 if (result > 1
4408 && (bitwidth > HOST_BITS_PER_WIDE_INT
4409 || (nonzero_bits (XEXP (x, 1), mode)
4410 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4411 result--;
4412
4413 return result;
4414
4415 case ASHIFTRT:
4416 /* Shifts by a constant add to the number of bits equal to the
4417 sign bit. */
4418 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4419 known_x, known_mode, known_ret);
4420 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4421 && INTVAL (XEXP (x, 1)) > 0)
4422 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4423
4424 return num0;
4425
4426 case ASHIFT:
4427 /* Left shifts destroy copies. */
4428 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4429 || INTVAL (XEXP (x, 1)) < 0
4430 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4431 return 1;
4432
4433 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4434 known_x, known_mode, known_ret);
4435 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4436
4437 case IF_THEN_ELSE:
4438 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4439 known_x, known_mode, known_ret);
4440 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4441 known_x, known_mode, known_ret);
4442 return MIN (num0, num1);
4443
4444 case EQ: case NE: case GE: case GT: case LE: case LT:
4445 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4446 case GEU: case GTU: case LEU: case LTU:
4447 case UNORDERED: case ORDERED:
4448 /* If the constant is negative, take its 1's complement and remask.
4449 Then see how many zero bits we have. */
4450 nonzero = STORE_FLAG_VALUE;
4451 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4452 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4453 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4454
4455 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4456
4457 default:
4458 break;
4459 }
4460
4461 /* If we haven't been able to figure it out by one of the above rules,
4462 see if some of the high-order bits are known to be zero. If so,
4463 count those bits and return one less than that amount. If we can't
4464 safely compute the mask for this mode, always return BITWIDTH. */
4465
4466 bitwidth = GET_MODE_BITSIZE (mode);
4467 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4468 return 1;
4469
4470 nonzero = nonzero_bits (x, mode);
4471 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4472 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4473}
6fd21094
RS
4474
4475/* Calculate the rtx_cost of a single instruction. A return value of
4476 zero indicates an instruction pattern without a known cost. */
4477
4478int
4479insn_rtx_cost (rtx pat)
4480{
4481 int i, cost;
4482 rtx set;
4483
4484 /* Extract the single set rtx from the instruction pattern.
4485 We can't use single_set since we only have the pattern. */
4486 if (GET_CODE (pat) == SET)
4487 set = pat;
4488 else if (GET_CODE (pat) == PARALLEL)
4489 {
4490 set = NULL_RTX;
4491 for (i = 0; i < XVECLEN (pat, 0); i++)
4492 {
4493 rtx x = XVECEXP (pat, 0, i);
4494 if (GET_CODE (x) == SET)
4495 {
4496 if (set)
4497 return 0;
4498 set = x;
4499 }
4500 }
4501 if (!set)
4502 return 0;
4503 }
4504 else
4505 return 0;
4506
4507 cost = rtx_cost (SET_SRC (set), SET);
4508 return cost > 0 ? cost : COSTS_N_INSNS (1);
4509}
75473b02
SB
4510
4511/* Given an insn INSN and condition COND, return the condition in a
4512 canonical form to simplify testing by callers. Specifically:
4513
4514 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4515 (2) Both operands will be machine operands; (cc0) will have been replaced.
4516 (3) If an operand is a constant, it will be the second operand.
4517 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4518 for GE, GEU, and LEU.
4519
4520 If the condition cannot be understood, or is an inequality floating-point
4521 comparison which needs to be reversed, 0 will be returned.
4522
4523 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4524
4525 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4526 insn used in locating the condition was found. If a replacement test
4527 of the condition is desired, it should be placed in front of that
4528 insn and we will be sure that the inputs are still valid.
4529
4530 If WANT_REG is nonzero, we wish the condition to be relative to that
4531 register, if possible. Therefore, do not canonicalize the condition
4532 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4533 to be a compare to a CC mode register.
4534
4535 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4536 and at INSN. */
4537
4538rtx
4539canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4540 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4541{
4542 enum rtx_code code;
4543 rtx prev = insn;
4544 rtx set;
4545 rtx tem;
4546 rtx op0, op1;
4547 int reverse_code = 0;
4548 enum machine_mode mode;
569f8d98 4549 basic_block bb = BLOCK_FOR_INSN (insn);
75473b02
SB
4550
4551 code = GET_CODE (cond);
4552 mode = GET_MODE (cond);
4553 op0 = XEXP (cond, 0);
4554 op1 = XEXP (cond, 1);
4555
4556 if (reverse)
4557 code = reversed_comparison_code (cond, insn);
4558 if (code == UNKNOWN)
4559 return 0;
4560
4561 if (earliest)
4562 *earliest = insn;
4563
4564 /* If we are comparing a register with zero, see if the register is set
4565 in the previous insn to a COMPARE or a comparison operation. Perform
4566 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4567 in cse.c */
4568
4569 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4570 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4571 && op1 == CONST0_RTX (GET_MODE (op0))
4572 && op0 != want_reg)
4573 {
4574 /* Set nonzero when we find something of interest. */
4575 rtx x = 0;
4576
4577#ifdef HAVE_cc0
4578 /* If comparison with cc0, import actual comparison from compare
4579 insn. */
4580 if (op0 == cc0_rtx)
4581 {
4582 if ((prev = prev_nonnote_insn (prev)) == 0
4583 || !NONJUMP_INSN_P (prev)
4584 || (set = single_set (prev)) == 0
4585 || SET_DEST (set) != cc0_rtx)
4586 return 0;
4587
4588 op0 = SET_SRC (set);
4589 op1 = CONST0_RTX (GET_MODE (op0));
4590 if (earliest)
4591 *earliest = prev;
4592 }
4593#endif
4594
4595 /* If this is a COMPARE, pick up the two things being compared. */
4596 if (GET_CODE (op0) == COMPARE)
4597 {
4598 op1 = XEXP (op0, 1);
4599 op0 = XEXP (op0, 0);
4600 continue;
4601 }
4602 else if (!REG_P (op0))
4603 break;
4604
4605 /* Go back to the previous insn. Stop if it is not an INSN. We also
4606 stop if it isn't a single set or if it has a REG_INC note because
4607 we don't want to bother dealing with it. */
4608
4609 if ((prev = prev_nonnote_insn (prev)) == 0
4610 || !NONJUMP_INSN_P (prev)
569f8d98
ZD
4611 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4612 /* In cfglayout mode, there do not have to be labels at the
4613 beginning of a block, or jumps at the end, so the previous
4614 conditions would not stop us when we reach bb boundary. */
4615 || BLOCK_FOR_INSN (prev) != bb)
75473b02
SB
4616 break;
4617
4618 set = set_of (op0, prev);
4619
4620 if (set
4621 && (GET_CODE (set) != SET
4622 || !rtx_equal_p (SET_DEST (set), op0)))
4623 break;
4624
4625 /* If this is setting OP0, get what it sets it to if it looks
4626 relevant. */
4627 if (set)
4628 {
4629 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4630#ifdef FLOAT_STORE_FLAG_VALUE
4631 REAL_VALUE_TYPE fsfv;
4632#endif
4633
4634 /* ??? We may not combine comparisons done in a CCmode with
4635 comparisons not done in a CCmode. This is to aid targets
4636 like Alpha that have an IEEE compliant EQ instruction, and
4637 a non-IEEE compliant BEQ instruction. The use of CCmode is
4638 actually artificial, simply to prevent the combination, but
4639 should not affect other platforms.
4640
4641 However, we must allow VOIDmode comparisons to match either
4642 CCmode or non-CCmode comparison, because some ports have
4643 modeless comparisons inside branch patterns.
4644
4645 ??? This mode check should perhaps look more like the mode check
4646 in simplify_comparison in combine. */
4647
4648 if ((GET_CODE (SET_SRC (set)) == COMPARE
4649 || (((code == NE
4650 || (code == LT
4651 && GET_MODE_CLASS (inner_mode) == MODE_INT
4652 && (GET_MODE_BITSIZE (inner_mode)
4653 <= HOST_BITS_PER_WIDE_INT)
4654 && (STORE_FLAG_VALUE
4655 & ((HOST_WIDE_INT) 1
4656 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4657#ifdef FLOAT_STORE_FLAG_VALUE
4658 || (code == LT
3d8bf70f 4659 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4660 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4661 REAL_VALUE_NEGATIVE (fsfv)))
4662#endif
4663 ))
4664 && COMPARISON_P (SET_SRC (set))))
4665 && (((GET_MODE_CLASS (mode) == MODE_CC)
4666 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4667 || mode == VOIDmode || inner_mode == VOIDmode))
4668 x = SET_SRC (set);
4669 else if (((code == EQ
4670 || (code == GE
4671 && (GET_MODE_BITSIZE (inner_mode)
4672 <= HOST_BITS_PER_WIDE_INT)
4673 && GET_MODE_CLASS (inner_mode) == MODE_INT
4674 && (STORE_FLAG_VALUE
4675 & ((HOST_WIDE_INT) 1
4676 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4677#ifdef FLOAT_STORE_FLAG_VALUE
4678 || (code == GE
3d8bf70f 4679 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4680 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4681 REAL_VALUE_NEGATIVE (fsfv)))
4682#endif
4683 ))
4684 && COMPARISON_P (SET_SRC (set))
4685 && (((GET_MODE_CLASS (mode) == MODE_CC)
4686 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4687 || mode == VOIDmode || inner_mode == VOIDmode))
4688
4689 {
4690 reverse_code = 1;
4691 x = SET_SRC (set);
4692 }
4693 else
4694 break;
4695 }
4696
4697 else if (reg_set_p (op0, prev))
4698 /* If this sets OP0, but not directly, we have to give up. */
4699 break;
4700
4701 if (x)
4702 {
4703 /* If the caller is expecting the condition to be valid at INSN,
4704 make sure X doesn't change before INSN. */
4705 if (valid_at_insn_p)
4706 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4707 break;
4708 if (COMPARISON_P (x))
4709 code = GET_CODE (x);
4710 if (reverse_code)
4711 {
4712 code = reversed_comparison_code (x, prev);
4713 if (code == UNKNOWN)
4714 return 0;
4715 reverse_code = 0;
4716 }
4717
4718 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4719 if (earliest)
4720 *earliest = prev;
4721 }
4722 }
4723
4724 /* If constant is first, put it last. */
4725 if (CONSTANT_P (op0))
4726 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4727
4728 /* If OP0 is the result of a comparison, we weren't able to find what
4729 was really being compared, so fail. */
4730 if (!allow_cc_mode
4731 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4732 return 0;
4733
4734 /* Canonicalize any ordered comparison with integers involving equality
4735 if we can do computations in the relevant mode and we do not
4736 overflow. */
4737
4738 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4739 && GET_CODE (op1) == CONST_INT
4740 && GET_MODE (op0) != VOIDmode
4741 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4742 {
4743 HOST_WIDE_INT const_val = INTVAL (op1);
4744 unsigned HOST_WIDE_INT uconst_val = const_val;
4745 unsigned HOST_WIDE_INT max_val
4746 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4747
4748 switch (code)
4749 {
4750 case LE:
4751 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4752 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4753 break;
4754
4755 /* When cross-compiling, const_val might be sign-extended from
4756 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4757 case GE:
4758 if ((HOST_WIDE_INT) (const_val & max_val)
4759 != (((HOST_WIDE_INT) 1
4760 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4761 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4762 break;
4763
4764 case LEU:
4765 if (uconst_val < max_val)
4766 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4767 break;
4768
4769 case GEU:
4770 if (uconst_val != 0)
4771 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4772 break;
4773
4774 default:
4775 break;
4776 }
4777 }
4778
4779 /* Never return CC0; return zero instead. */
4780 if (CC0_P (op0))
4781 return 0;
4782
4783 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4784}
4785
4786/* Given a jump insn JUMP, return the condition that will cause it to branch
4787 to its JUMP_LABEL. If the condition cannot be understood, or is an
4788 inequality floating-point comparison which needs to be reversed, 0 will
4789 be returned.
4790
4791 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4792 insn used in locating the condition was found. If a replacement test
4793 of the condition is desired, it should be placed in front of that
4794 insn and we will be sure that the inputs are still valid. If EARLIEST
4795 is null, the returned condition will be valid at INSN.
4796
4797 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4798 compare CC mode register.
4799
4800 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4801
4802rtx
4803get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4804{
4805 rtx cond;
4806 int reverse;
4807 rtx set;
4808
4809 /* If this is not a standard conditional jump, we can't parse it. */
4810 if (!JUMP_P (jump)
4811 || ! any_condjump_p (jump))
4812 return 0;
4813 set = pc_set (jump);
4814
4815 cond = XEXP (SET_SRC (set), 0);
4816
4817 /* If this branches to JUMP_LABEL when the condition is false, reverse
4818 the condition. */
4819 reverse
4820 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4821 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4822
4823 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4824 allow_cc_mode, valid_at_insn_p);
4825}
4826
b12cbf2c
AN
4827/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4828 TARGET_MODE_REP_EXTENDED.
4829
4830 Note that we assume that the property of
4831 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4832 narrower than mode B. I.e., if A is a mode narrower than B then in
4833 order to be able to operate on it in mode B, mode A needs to
4834 satisfy the requirements set by the representation of mode B. */
4835
4836static void
4837init_num_sign_bit_copies_in_rep (void)
4838{
4839 enum machine_mode mode, in_mode;
4840
4841 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4842 in_mode = GET_MODE_WIDER_MODE (mode))
4843 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4844 mode = GET_MODE_WIDER_MODE (mode))
4845 {
4846 enum machine_mode i;
4847
4848 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4849 extends to the next widest mode. */
4850 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4851 || GET_MODE_WIDER_MODE (mode) == in_mode);
4852
4853 /* We are in in_mode. Count how many bits outside of mode
4854 have to be copies of the sign-bit. */
4855 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4856 {
4857 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4858
4859 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4860 /* We can only check sign-bit copies starting from the
4861 top-bit. In order to be able to check the bits we
4862 have already seen we pretend that subsequent bits
4863 have to be sign-bit copies too. */
4864 || num_sign_bit_copies_in_rep [in_mode][mode])
4865 num_sign_bit_copies_in_rep [in_mode][mode]
4866 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4867 }
4868 }
4869}
4870
d3b72690
PB
4871/* Suppose that truncation from the machine mode of X to MODE is not a
4872 no-op. See if there is anything special about X so that we can
4873 assume it already contains a truncated value of MODE. */
4874
4875bool
4876truncated_to_mode (enum machine_mode mode, rtx x)
4877{
b12cbf2c
AN
4878 /* This register has already been used in MODE without explicit
4879 truncation. */
4880 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4881 return true;
4882
4883 /* See if we already satisfy the requirements of MODE. If yes we
4884 can just switch to MODE. */
4885 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4886 && (num_sign_bit_copies (x, GET_MODE (x))
4887 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4888 return true;
d3b72690 4889
b12cbf2c
AN
4890 return false;
4891}
cf94b0fc
PB
4892\f
4893/* Initialize non_rtx_starting_operands, which is used to speed up
4894 for_each_rtx. */
4895void
4896init_rtlanal (void)
4897{
4898 int i;
4899 for (i = 0; i < NUM_RTX_CODE; i++)
4900 {
4901 const char *format = GET_RTX_FORMAT (i);
4902 const char *first = strpbrk (format, "eEV");
4903 non_rtx_starting_operands[i] = first ? first - format : -1;
4904 }
b12cbf2c
AN
4905
4906 init_num_sign_bit_copies_in_rep ();
cf94b0fc 4907}
3d8504ac
RS
4908\f
4909/* Check whether this is a constant pool constant. */
4910bool
4911constant_pool_constant_p (rtx x)
4912{
4913 x = avoid_constant_pool_reference (x);
4914 return GET_CODE (x) == CONST_DOUBLE;
4915}
4916