]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
re PR fortran/54462 (Another "segmentation fault" after an error in COMMON statement...
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
af082de3 1/* Analyze RTL for GNU compiler.
af841dbd 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
8840ae2b 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
7bc14a04 4 2011, 2012 Free Software Foundation, Inc.
2c88418c 5
1322177d 6This file is part of GCC.
2c88418c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
2c88418c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
2c88418c
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
2c88418c
RS
21
22
23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
718f9c0f 27#include "diagnostic-core.h"
3335f1d9 28#include "hard-reg-set.h"
9f02e6a5 29#include "rtl.h"
bc204393
RH
30#include "insn-config.h"
31#include "recog.h"
f894b69b
PB
32#include "target.h"
33#include "output.h"
91ea4f8d 34#include "tm_p.h"
f5eb5fd0 35#include "flags.h"
66fd46b6 36#include "regs.h"
2f93eea8 37#include "function.h"
6fb5fa3c 38#include "df.h"
7ffb5e78 39#include "tree.h"
5936d944 40#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
2c88418c 41
e2373f95 42/* Forward declarations */
7bc980e1 43static void set_of_1 (rtx, const_rtx, void *);
f7d504c2
KG
44static bool covers_regno_p (const_rtx, unsigned int);
45static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
0c20a65f 46static int rtx_referenced_p_1 (rtx *, void *);
f7d504c2 47static int computed_jump_p_1 (const_rtx);
7bc980e1 48static void parms_set (rtx, const_rtx, void *);
2a1777af 49
fa233e34
KG
50static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
51 const_rtx, enum machine_mode,
2f93eea8 52 unsigned HOST_WIDE_INT);
fa233e34
KG
53static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
54 const_rtx, enum machine_mode,
2f93eea8 55 unsigned HOST_WIDE_INT);
fa233e34 56static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
2f93eea8
PB
57 enum machine_mode,
58 unsigned int);
fa233e34 59static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
2f93eea8
PB
60 enum machine_mode, unsigned int);
61
cf94b0fc
PB
62/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63 -1 if a code has no such operand. */
64static int non_rtx_starting_operands[NUM_RTX_CODE];
65
b12cbf2c
AN
66/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
67 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
68 SIGN_EXTEND then while narrowing we also have to enforce the
69 representation and sign-extend the value to mode DESTINATION_REP.
70
71 If the value is already sign-extended to DESTINATION_REP mode we
72 can just switch to DESTINATION mode on it. For each pair of
73 integral modes SOURCE and DESTINATION, when truncating from SOURCE
74 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
75 contains the number of high-order bits in SOURCE that have to be
76 copies of the sign-bit so that we can do this mode-switch to
77 DESTINATION. */
78
79static unsigned int
80num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
2c88418c
RS
81\f
82/* Return 1 if the value of X is unstable
83 (would be different at a different point in the program).
84 The frame pointer, arg pointer, etc. are considered stable
85 (within one function) and so is anything marked `unchanging'. */
86
87int
f7d504c2 88rtx_unstable_p (const_rtx x)
2c88418c 89{
f7d504c2 90 const RTX_CODE code = GET_CODE (x);
b3694847
SS
91 int i;
92 const char *fmt;
2c88418c 93
ae0fb1b9
JW
94 switch (code)
95 {
96 case MEM:
389fdba0 97 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 98
ae0fb1b9 99 case CONST:
d8116890 100 CASE_CONST_ANY:
ae0fb1b9
JW
101 case SYMBOL_REF:
102 case LABEL_REF:
103 return 0;
2c88418c 104
ae0fb1b9
JW
105 case REG:
106 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 107 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9 108 /* The arg pointer varies if it is not a fixed register. */
389fdba0 109 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 110 return 0;
c0fc376b
RH
111 /* ??? When call-clobbered, the value is stable modulo the restore
112 that must happen after a call. This currently screws up local-alloc
113 into believing that the restore is not needed. */
f8fe0a4a 114 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
c0fc376b 115 return 0;
c0fc376b 116 return 1;
ae0fb1b9
JW
117
118 case ASM_OPERANDS:
119 if (MEM_VOLATILE_P (x))
120 return 1;
121
5d3cc252 122 /* Fall through. */
ae0fb1b9
JW
123
124 default:
125 break;
126 }
2c88418c
RS
127
128 fmt = GET_RTX_FORMAT (code);
129 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
130 if (fmt[i] == 'e')
9c82ac6b
JW
131 {
132 if (rtx_unstable_p (XEXP (x, i)))
133 return 1;
134 }
135 else if (fmt[i] == 'E')
136 {
137 int j;
138 for (j = 0; j < XVECLEN (x, i); j++)
139 if (rtx_unstable_p (XVECEXP (x, i, j)))
140 return 1;
141 }
142
2c88418c
RS
143 return 0;
144}
145
146/* Return 1 if X has a value that can vary even between two
147 executions of the program. 0 means X can be compared reliably
148 against certain constants or near-constants.
e38fe8e0
BS
149 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
150 zero, we are slightly more conservative.
2c88418c
RS
151 The frame pointer and the arg pointer are considered constant. */
152
4f588890
KG
153bool
154rtx_varies_p (const_rtx x, bool for_alias)
2c88418c 155{
e978d62e 156 RTX_CODE code;
b3694847
SS
157 int i;
158 const char *fmt;
2c88418c 159
e978d62e
PB
160 if (!x)
161 return 0;
162
163 code = GET_CODE (x);
2c88418c
RS
164 switch (code)
165 {
166 case MEM:
389fdba0 167 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 168
2c88418c 169 case CONST:
d8116890 170 CASE_CONST_ANY:
2c88418c
RS
171 case SYMBOL_REF:
172 case LABEL_REF:
173 return 0;
174
175 case REG:
176 /* Note that we have to test for the actual rtx used for the frame
177 and arg pointers and not just the register number in case we have
178 eliminated the frame and/or arg pointer and are using it
179 for pseudos. */
c0fc376b 180 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
181 /* The arg pointer varies if it is not a fixed register. */
182 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 183 return 0;
e38fe8e0 184 if (x == pic_offset_table_rtx
e38fe8e0
BS
185 /* ??? When call-clobbered, the value is stable modulo the restore
186 that must happen after a call. This currently screws up
187 local-alloc into believing that the restore is not needed, so we
188 must return 0 only if we are called from alias analysis. */
f8fe0a4a 189 && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
e38fe8e0 190 return 0;
c0fc376b 191 return 1;
2c88418c
RS
192
193 case LO_SUM:
194 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
195 (in fact it is related specifically to operand 1)
196 during alias analysis. */
197 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
198 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 199
ae0fb1b9
JW
200 case ASM_OPERANDS:
201 if (MEM_VOLATILE_P (x))
202 return 1;
203
5d3cc252 204 /* Fall through. */
ae0fb1b9 205
e9a25f70
JL
206 default:
207 break;
2c88418c
RS
208 }
209
210 fmt = GET_RTX_FORMAT (code);
211 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
212 if (fmt[i] == 'e')
9c82ac6b 213 {
e38fe8e0 214 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
215 return 1;
216 }
217 else if (fmt[i] == 'E')
218 {
219 int j;
220 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 221 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
222 return 1;
223 }
224
2c88418c
RS
225 return 0;
226}
227
2358ff91
EB
228/* Return nonzero if the use of X as an address in a MEM can cause a trap.
229 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
230 whether nonzero is returned for unaligned memory accesses on strict
231 alignment machines. */
2c88418c 232
2358ff91 233static int
48e8382e
PB
234rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
235 enum machine_mode mode, bool unaligned_mems)
2c88418c 236{
b3694847 237 enum rtx_code code = GET_CODE (x);
2c88418c 238
48e8382e
PB
239 if (STRICT_ALIGNMENT
240 && unaligned_mems
241 && GET_MODE_SIZE (mode) != 0)
242 {
243 HOST_WIDE_INT actual_offset = offset;
244#ifdef SPARC_STACK_BOUNDARY_HACK
245 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
246 the real alignment of %sp. However, when it does this, the
247 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
248 if (SPARC_STACK_BOUNDARY_HACK
249 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
250 actual_offset -= STACK_POINTER_OFFSET;
251#endif
252
65a74b5d
PB
253 if (actual_offset % GET_MODE_SIZE (mode) != 0)
254 return 1;
48e8382e
PB
255 }
256
2c88418c
RS
257 switch (code)
258 {
259 case SYMBOL_REF:
48e8382e
PB
260 if (SYMBOL_REF_WEAK (x))
261 return 1;
262 if (!CONSTANT_POOL_ADDRESS_P (x))
263 {
264 tree decl;
265 HOST_WIDE_INT decl_size;
266
267 if (offset < 0)
268 return 1;
269 if (size == 0)
270 size = GET_MODE_SIZE (mode);
271 if (size == 0)
272 return offset != 0;
273
274 /* If the size of the access or of the symbol is unknown,
275 assume the worst. */
276 decl = SYMBOL_REF_DECL (x);
277
278 /* Else check that the access is in bounds. TODO: restructure
71c00b5c 279 expr_size/tree_expr_size/int_expr_size and just use the latter. */
48e8382e
PB
280 if (!decl)
281 decl_size = -1;
282 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
283 decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
284 ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
285 : -1);
286 else if (TREE_CODE (decl) == STRING_CST)
287 decl_size = TREE_STRING_LENGTH (decl);
288 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
289 decl_size = int_size_in_bytes (TREE_TYPE (decl));
290 else
291 decl_size = -1;
292
293 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
294 }
295
296 return 0;
ff0b6b99 297
2c88418c 298 case LABEL_REF:
2c88418c
RS
299 return 0;
300
301 case REG:
302 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
4f73495e
RH
303 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
304 || x == stack_pointer_rtx
305 /* The arg pointer varies if it is not a fixed register. */
306 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
307 return 0;
308 /* All of the virtual frame registers are stack references. */
309 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
310 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
311 return 0;
312 return 1;
2c88418c
RS
313
314 case CONST:
48e8382e
PB
315 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
316 mode, unaligned_mems);
2c88418c
RS
317
318 case PLUS:
2358ff91 319 /* An address is assumed not to trap if:
48e8382e
PB
320 - it is the pic register plus a constant. */
321 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
322 return 0;
323
324 /* - or it is an address that can't trap plus a constant integer,
2358ff91
EB
325 with the proper remainder modulo the mode size if we are
326 considering unaligned memory references. */
481683e1 327 if (CONST_INT_P (XEXP (x, 1))
48e8382e
PB
328 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
329 size, mode, unaligned_mems))
2358ff91
EB
330 return 0;
331
332 return 1;
2c88418c
RS
333
334 case LO_SUM:
4f73495e 335 case PRE_MODIFY:
48e8382e
PB
336 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
337 mode, unaligned_mems);
4f73495e
RH
338
339 case PRE_DEC:
340 case PRE_INC:
341 case POST_DEC:
342 case POST_INC:
343 case POST_MODIFY:
48e8382e
PB
344 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
345 mode, unaligned_mems);
4f73495e 346
e9a25f70
JL
347 default:
348 break;
2c88418c
RS
349 }
350
351 /* If it isn't one of the case above, it can cause a trap. */
352 return 1;
353}
354
2358ff91
EB
355/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
356
357int
f7d504c2 358rtx_addr_can_trap_p (const_rtx x)
2358ff91 359{
48e8382e 360 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
2358ff91
EB
361}
362
4977bab6
ZW
363/* Return true if X is an address that is known to not be zero. */
364
365bool
f7d504c2 366nonzero_address_p (const_rtx x)
4977bab6 367{
f7d504c2 368 const enum rtx_code code = GET_CODE (x);
4977bab6
ZW
369
370 switch (code)
371 {
372 case SYMBOL_REF:
373 return !SYMBOL_REF_WEAK (x);
374
375 case LABEL_REF:
376 return true;
377
4977bab6
ZW
378 case REG:
379 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
380 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
381 || x == stack_pointer_rtx
382 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
383 return true;
384 /* All of the virtual frame registers are stack references. */
385 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
386 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
387 return true;
388 return false;
389
390 case CONST:
391 return nonzero_address_p (XEXP (x, 0));
392
393 case PLUS:
481683e1 394 if (CONST_INT_P (XEXP (x, 1)))
942d7821 395 return nonzero_address_p (XEXP (x, 0));
4977bab6
ZW
396 /* Handle PIC references. */
397 else if (XEXP (x, 0) == pic_offset_table_rtx
398 && CONSTANT_P (XEXP (x, 1)))
399 return true;
400 return false;
401
402 case PRE_MODIFY:
403 /* Similar to the above; allow positive offsets. Further, since
404 auto-inc is only allowed in memories, the register must be a
405 pointer. */
481683e1 406 if (CONST_INT_P (XEXP (x, 1))
4977bab6
ZW
407 && INTVAL (XEXP (x, 1)) > 0)
408 return true;
409 return nonzero_address_p (XEXP (x, 0));
410
411 case PRE_INC:
412 /* Similarly. Further, the offset is always positive. */
413 return true;
414
415 case PRE_DEC:
416 case POST_DEC:
417 case POST_INC:
418 case POST_MODIFY:
419 return nonzero_address_p (XEXP (x, 0));
420
421 case LO_SUM:
422 return nonzero_address_p (XEXP (x, 1));
423
424 default:
425 break;
426 }
427
428 /* If it isn't one of the case above, might be zero. */
429 return false;
430}
431
a6a2274a 432/* Return 1 if X refers to a memory location whose address
2c88418c 433 cannot be compared reliably with constant addresses,
a6a2274a 434 or if X refers to a BLKmode memory object.
e38fe8e0
BS
435 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
436 zero, we are slightly more conservative. */
2c88418c 437
4f588890
KG
438bool
439rtx_addr_varies_p (const_rtx x, bool for_alias)
2c88418c 440{
b3694847
SS
441 enum rtx_code code;
442 int i;
443 const char *fmt;
2c88418c
RS
444
445 if (x == 0)
446 return 0;
447
448 code = GET_CODE (x);
449 if (code == MEM)
e38fe8e0 450 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
451
452 fmt = GET_RTX_FORMAT (code);
453 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
454 if (fmt[i] == 'e')
833c0b26 455 {
e38fe8e0 456 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
457 return 1;
458 }
459 else if (fmt[i] == 'E')
460 {
461 int j;
462 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 463 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
464 return 1;
465 }
2c88418c
RS
466 return 0;
467}
468\f
469/* Return the value of the integer term in X, if one is apparent;
470 otherwise return 0.
471 Only obvious integer terms are detected.
3ef42a0c 472 This is used in cse.c with the `related_value' field. */
2c88418c 473
c166a311 474HOST_WIDE_INT
f7d504c2 475get_integer_term (const_rtx x)
2c88418c
RS
476{
477 if (GET_CODE (x) == CONST)
478 x = XEXP (x, 0);
479
480 if (GET_CODE (x) == MINUS
481683e1 481 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
482 return - INTVAL (XEXP (x, 1));
483 if (GET_CODE (x) == PLUS
481683e1 484 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
485 return INTVAL (XEXP (x, 1));
486 return 0;
487}
488
489/* If X is a constant, return the value sans apparent integer term;
490 otherwise return 0.
491 Only obvious integer terms are detected. */
492
493rtx
f7d504c2 494get_related_value (const_rtx x)
2c88418c
RS
495{
496 if (GET_CODE (x) != CONST)
497 return 0;
498 x = XEXP (x, 0);
499 if (GET_CODE (x) == PLUS
481683e1 500 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
501 return XEXP (x, 0);
502 else if (GET_CODE (x) == MINUS
481683e1 503 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
504 return XEXP (x, 0);
505 return 0;
506}
507\f
7ffb5e78
RS
508/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
509 to somewhere in the same object or object_block as SYMBOL. */
510
511bool
f7d504c2 512offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
7ffb5e78
RS
513{
514 tree decl;
515
516 if (GET_CODE (symbol) != SYMBOL_REF)
517 return false;
518
519 if (offset == 0)
520 return true;
521
522 if (offset > 0)
523 {
524 if (CONSTANT_POOL_ADDRESS_P (symbol)
525 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
526 return true;
527
528 decl = SYMBOL_REF_DECL (symbol);
529 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
530 return true;
531 }
532
533 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
534 && SYMBOL_REF_BLOCK (symbol)
535 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
536 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
537 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
538 return true;
539
540 return false;
541}
542
543/* Split X into a base and a constant offset, storing them in *BASE_OUT
544 and *OFFSET_OUT respectively. */
545
546void
547split_const (rtx x, rtx *base_out, rtx *offset_out)
548{
549 if (GET_CODE (x) == CONST)
550 {
551 x = XEXP (x, 0);
481683e1 552 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
7ffb5e78
RS
553 {
554 *base_out = XEXP (x, 0);
555 *offset_out = XEXP (x, 1);
556 return;
557 }
558 }
559 *base_out = x;
560 *offset_out = const0_rtx;
561}
562\f
4b983fdc
RH
563/* Return the number of places FIND appears within X. If COUNT_DEST is
564 zero, we do not count occurrences inside the destination of a SET. */
565
566int
f7d504c2 567count_occurrences (const_rtx x, const_rtx find, int count_dest)
4b983fdc
RH
568{
569 int i, j;
570 enum rtx_code code;
571 const char *format_ptr;
572 int count;
573
574 if (x == find)
575 return 1;
576
577 code = GET_CODE (x);
578
579 switch (code)
580 {
581 case REG:
d8116890 582 CASE_CONST_ANY:
4b983fdc
RH
583 case SYMBOL_REF:
584 case CODE_LABEL:
585 case PC:
586 case CC0:
587 return 0;
588
2372a062
BS
589 case EXPR_LIST:
590 count = count_occurrences (XEXP (x, 0), find, count_dest);
591 if (XEXP (x, 1))
592 count += count_occurrences (XEXP (x, 1), find, count_dest);
593 return count;
b8698a0f 594
4b983fdc 595 case MEM:
3c0cb5de 596 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
597 return 1;
598 break;
599
600 case SET:
601 if (SET_DEST (x) == find && ! count_dest)
602 return count_occurrences (SET_SRC (x), find, count_dest);
603 break;
604
605 default:
606 break;
607 }
608
609 format_ptr = GET_RTX_FORMAT (code);
610 count = 0;
611
612 for (i = 0; i < GET_RTX_LENGTH (code); i++)
613 {
614 switch (*format_ptr++)
615 {
616 case 'e':
617 count += count_occurrences (XEXP (x, i), find, count_dest);
618 break;
619
620 case 'E':
621 for (j = 0; j < XVECLEN (x, i); j++)
622 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
623 break;
624 }
625 }
626 return count;
627}
6fb5fa3c 628
7bc14a04
PB
629\f
630/* Return TRUE if OP is a register or subreg of a register that
631 holds an unsigned quantity. Otherwise, return FALSE. */
632
633bool
634unsigned_reg_p (rtx op)
635{
636 if (REG_P (op)
637 && REG_EXPR (op)
638 && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
639 return true;
640
641 if (GET_CODE (op) == SUBREG
642 && SUBREG_PROMOTED_UNSIGNED_P (op))
643 return true;
644
645 return false;
646}
647
4b983fdc 648\f
2c88418c
RS
649/* Nonzero if register REG appears somewhere within IN.
650 Also works if REG is not a register; in this case it checks
651 for a subexpression of IN that is Lisp "equal" to REG. */
652
653int
f7d504c2 654reg_mentioned_p (const_rtx reg, const_rtx in)
2c88418c 655{
b3694847
SS
656 const char *fmt;
657 int i;
658 enum rtx_code code;
2c88418c
RS
659
660 if (in == 0)
661 return 0;
662
663 if (reg == in)
664 return 1;
665
666 if (GET_CODE (in) == LABEL_REF)
667 return reg == XEXP (in, 0);
668
669 code = GET_CODE (in);
670
671 switch (code)
672 {
673 /* Compare registers by number. */
674 case REG:
f8cfc6aa 675 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
676
677 /* These codes have no constituent expressions
678 and are unique. */
679 case SCRATCH:
680 case CC0:
681 case PC:
682 return 0;
683
d8116890 684 CASE_CONST_ANY:
2c88418c
RS
685 /* These are kept unique for a given value. */
686 return 0;
a6a2274a 687
e9a25f70
JL
688 default:
689 break;
2c88418c
RS
690 }
691
692 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
693 return 1;
694
695 fmt = GET_RTX_FORMAT (code);
696
697 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
698 {
699 if (fmt[i] == 'E')
700 {
b3694847 701 int j;
2c88418c
RS
702 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
703 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
704 return 1;
705 }
706 else if (fmt[i] == 'e'
707 && reg_mentioned_p (reg, XEXP (in, i)))
708 return 1;
709 }
710 return 0;
711}
712\f
713/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
714 no CODE_LABEL insn. */
715
716int
f7d504c2 717no_labels_between_p (const_rtx beg, const_rtx end)
2c88418c 718{
b3694847 719 rtx p;
978f547f
JH
720 if (beg == end)
721 return 0;
2c88418c 722 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 723 if (LABEL_P (p))
2c88418c
RS
724 return 0;
725 return 1;
726}
727
728/* Nonzero if register REG is used in an insn between
729 FROM_INSN and TO_INSN (exclusive of those two). */
730
731int
f7d504c2 732reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
2c88418c 733{
b3694847 734 rtx insn;
2c88418c
RS
735
736 if (from_insn == to_insn)
737 return 0;
738
739 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
b5b8b0ac 740 if (NONDEBUG_INSN_P (insn)
8f3e7a26 741 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
76dd5923 742 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
743 return 1;
744 return 0;
745}
746\f
747/* Nonzero if the old value of X, a register, is referenced in BODY. If X
748 is entirely replaced by a new value and the only use is as a SET_DEST,
749 we do not consider it a reference. */
750
751int
f7d504c2 752reg_referenced_p (const_rtx x, const_rtx body)
2c88418c
RS
753{
754 int i;
755
756 switch (GET_CODE (body))
757 {
758 case SET:
759 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
760 return 1;
761
762 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
763 of a REG that occupies all of the REG, the insn references X if
764 it is mentioned in the destination. */
765 if (GET_CODE (SET_DEST (body)) != CC0
766 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 767 && !REG_P (SET_DEST (body))
2c88418c 768 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 769 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
770 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
771 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
772 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
773 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
774 && reg_overlap_mentioned_p (x, SET_DEST (body)))
775 return 1;
e9a25f70 776 return 0;
2c88418c
RS
777
778 case ASM_OPERANDS:
779 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
780 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
781 return 1;
e9a25f70 782 return 0;
2c88418c
RS
783
784 case CALL:
785 case USE:
14a774a9 786 case IF_THEN_ELSE:
2c88418c
RS
787 return reg_overlap_mentioned_p (x, body);
788
789 case TRAP_IF:
790 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
791
21b8482a
JJ
792 case PREFETCH:
793 return reg_overlap_mentioned_p (x, XEXP (body, 0));
794
2ac4fed0
RK
795 case UNSPEC:
796 case UNSPEC_VOLATILE:
2f9fb4c2
R
797 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
798 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
799 return 1;
800 return 0;
801
2c88418c
RS
802 case PARALLEL:
803 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
804 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
805 return 1;
e9a25f70 806 return 0;
a6a2274a 807
0d3ffb5a 808 case CLOBBER:
3c0cb5de 809 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
810 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
811 return 1;
812 return 0;
813
0c99ec5c
RH
814 case COND_EXEC:
815 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
816 return 1;
817 return reg_referenced_p (x, COND_EXEC_CODE (body));
818
e9a25f70
JL
819 default:
820 return 0;
2c88418c 821 }
2c88418c 822}
2c88418c
RS
823\f
824/* Nonzero if register REG is set or clobbered in an insn between
825 FROM_INSN and TO_INSN (exclusive of those two). */
826
827int
ed7a4b4b 828reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
2c88418c 829{
ed7a4b4b 830 const_rtx insn;
2c88418c
RS
831
832 if (from_insn == to_insn)
833 return 0;
834
835 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 836 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
837 return 1;
838 return 0;
839}
840
841/* Internals of reg_set_between_p. */
2c88418c 842int
ed7a4b4b 843reg_set_p (const_rtx reg, const_rtx insn)
2c88418c 844{
2c88418c
RS
845 /* We can be passed an insn or part of one. If we are passed an insn,
846 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
847 if (INSN_P (insn)
848 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 849 || (CALL_P (insn)
f8cfc6aa 850 && ((REG_P (reg)
4f1605d2 851 && REGNO (reg) < FIRST_PSEUDO_REGISTER
5da20cfe
RS
852 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
853 GET_MODE (reg), REGNO (reg)))
3c0cb5de 854 || MEM_P (reg)
4977bab6
ZW
855 || find_reg_fusage (insn, CLOBBER, reg)))))
856 return 1;
2c88418c 857
91b2d119 858 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
859}
860
861/* Similar to reg_set_between_p, but check all registers in X. Return 0
862 only if none of them are modified between START and END. Return 1 if
fa10beec 863 X contains a MEM; this routine does use memory aliasing. */
2c88418c
RS
864
865int
9678086d 866modified_between_p (const_rtx x, const_rtx start, const_rtx end)
2c88418c 867{
9678086d 868 const enum rtx_code code = GET_CODE (x);
6f7d635c 869 const char *fmt;
f8163c92 870 int i, j;
7b52eede
JH
871 rtx insn;
872
873 if (start == end)
874 return 0;
2c88418c
RS
875
876 switch (code)
877 {
d8116890 878 CASE_CONST_ANY:
2c88418c
RS
879 case CONST:
880 case SYMBOL_REF:
881 case LABEL_REF:
882 return 0;
883
884 case PC:
885 case CC0:
886 return 1;
887
888 case MEM:
7b52eede 889 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 890 return 1;
550b7784
KK
891 if (MEM_READONLY_P (x))
892 return 0;
7b52eede
JH
893 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
894 if (memory_modified_in_insn_p (x, insn))
895 return 1;
896 return 0;
2c88418c
RS
897 break;
898
899 case REG:
900 return reg_set_between_p (x, start, end);
a6a2274a 901
e9a25f70
JL
902 default:
903 break;
2c88418c
RS
904 }
905
906 fmt = GET_RTX_FORMAT (code);
907 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
908 {
909 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
910 return 1;
911
d4757e6a 912 else if (fmt[i] == 'E')
f8163c92
RK
913 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
914 if (modified_between_p (XVECEXP (x, i, j), start, end))
915 return 1;
916 }
917
918 return 0;
919}
920
921/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
922 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 923 does use memory aliasing. */
f8163c92
RK
924
925int
9678086d 926modified_in_p (const_rtx x, const_rtx insn)
f8163c92 927{
9678086d 928 const enum rtx_code code = GET_CODE (x);
6f7d635c 929 const char *fmt;
f8163c92
RK
930 int i, j;
931
932 switch (code)
933 {
d8116890 934 CASE_CONST_ANY:
f8163c92
RK
935 case CONST:
936 case SYMBOL_REF:
937 case LABEL_REF:
938 return 0;
939
940 case PC:
941 case CC0:
2c88418c
RS
942 return 1;
943
f8163c92 944 case MEM:
7b52eede 945 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 946 return 1;
550b7784
KK
947 if (MEM_READONLY_P (x))
948 return 0;
7b52eede
JH
949 if (memory_modified_in_insn_p (x, insn))
950 return 1;
951 return 0;
f8163c92
RK
952 break;
953
954 case REG:
955 return reg_set_p (x, insn);
e9a25f70
JL
956
957 default:
958 break;
f8163c92
RK
959 }
960
961 fmt = GET_RTX_FORMAT (code);
962 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
963 {
964 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
965 return 1;
966
d4757e6a 967 else if (fmt[i] == 'E')
f8163c92
RK
968 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
969 if (modified_in_p (XVECEXP (x, i, j), insn))
970 return 1;
971 }
972
2c88418c
RS
973 return 0;
974}
975\f
91b2d119
JH
976/* Helper function for set_of. */
977struct set_of_data
978 {
7bc980e1
KG
979 const_rtx found;
980 const_rtx pat;
91b2d119
JH
981 };
982
983static void
7bc980e1 984set_of_1 (rtx x, const_rtx pat, void *data1)
91b2d119 985{
7bc980e1
KG
986 struct set_of_data *const data = (struct set_of_data *) (data1);
987 if (rtx_equal_p (x, data->pat)
988 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
989 data->found = pat;
91b2d119
JH
990}
991
992/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 993 (either directly or via STRICT_LOW_PART and similar modifiers). */
7bc980e1
KG
994const_rtx
995set_of (const_rtx pat, const_rtx insn)
91b2d119
JH
996{
997 struct set_of_data data;
998 data.found = NULL_RTX;
999 data.pat = pat;
1000 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1001 return data.found;
1002}
e2724e63
BS
1003
1004/* This function, called through note_stores, collects sets and
1005 clobbers of hard registers in a HARD_REG_SET, which is pointed to
1006 by DATA. */
1007void
1008record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1009{
1010 HARD_REG_SET *pset = (HARD_REG_SET *)data;
1011 if (REG_P (x) && HARD_REGISTER_P (x))
1012 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1013}
1014
1015/* Examine INSN, and compute the set of hard registers written by it.
1016 Store it in *PSET. Should only be called after reload. */
1017void
1018find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset)
1019{
1020 rtx link;
1021
1022 CLEAR_HARD_REG_SET (*pset);
1023 note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1024 if (CALL_P (insn))
1025 IOR_HARD_REG_SET (*pset, call_used_reg_set);
1026 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1027 if (REG_NOTE_KIND (link) == REG_INC)
1028 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1029}
1030
1031/* A for_each_rtx subroutine of record_hard_reg_uses. */
1032static int
1033record_hard_reg_uses_1 (rtx *px, void *data)
1034{
1035 rtx x = *px;
1036 HARD_REG_SET *pused = (HARD_REG_SET *)data;
1037
1038 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1039 {
1040 int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
1041 while (nregs-- > 0)
1042 SET_HARD_REG_BIT (*pused, REGNO (x) + nregs);
1043 }
1044 return 0;
1045}
1046
1047/* Like record_hard_reg_sets, but called through note_uses. */
1048void
1049record_hard_reg_uses (rtx *px, void *data)
1050{
1051 for_each_rtx (px, record_hard_reg_uses_1, data);
1052}
91b2d119 1053\f
2c88418c
RS
1054/* Given an INSN, return a SET expression if this insn has only a single SET.
1055 It may also have CLOBBERs, USEs, or SET whose output
1056 will not be used, which we ignore. */
1057
1058rtx
f7d504c2 1059single_set_2 (const_rtx insn, const_rtx pat)
2c88418c 1060{
c9b89a21
JH
1061 rtx set = NULL;
1062 int set_verified = 1;
2c88418c 1063 int i;
c9b89a21 1064
b1cdafbb 1065 if (GET_CODE (pat) == PARALLEL)
2c88418c 1066 {
c9b89a21 1067 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 1068 {
c9b89a21
JH
1069 rtx sub = XVECEXP (pat, 0, i);
1070 switch (GET_CODE (sub))
1071 {
1072 case USE:
1073 case CLOBBER:
1074 break;
1075
1076 case SET:
1077 /* We can consider insns having multiple sets, where all
1078 but one are dead as single set insns. In common case
1079 only single set is present in the pattern so we want
f63d1bf7 1080 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
1081
1082 When we reach set first time, we just expect this is
1083 the single set we are looking for and only when more
1084 sets are found in the insn, we check them. */
1085 if (!set_verified)
1086 {
1087 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1088 && !side_effects_p (set))
1089 set = NULL;
1090 else
1091 set_verified = 1;
1092 }
1093 if (!set)
1094 set = sub, set_verified = 0;
1095 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1096 || side_effects_p (sub))
1097 return NULL_RTX;
1098 break;
1099
1100 default:
1101 return NULL_RTX;
1102 }
787ccee0 1103 }
2c88418c 1104 }
c9b89a21 1105 return set;
2c88418c 1106}
941c63ac
JL
1107
1108/* Given an INSN, return nonzero if it has more than one SET, else return
1109 zero. */
1110
5f7d3786 1111int
f7d504c2 1112multiple_sets (const_rtx insn)
941c63ac 1113{
cae8acdd 1114 int found;
941c63ac 1115 int i;
a6a2274a 1116
941c63ac 1117 /* INSN must be an insn. */
2c3c49de 1118 if (! INSN_P (insn))
941c63ac
JL
1119 return 0;
1120
1121 /* Only a PARALLEL can have multiple SETs. */
1122 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1123 {
1124 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1125 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1126 {
1127 /* If we have already found a SET, then return now. */
1128 if (found)
1129 return 1;
1130 else
1131 found = 1;
1132 }
1133 }
a6a2274a 1134
941c63ac
JL
1135 /* Either zero or one SET. */
1136 return 0;
1137}
2c88418c 1138\f
7142e318
JW
1139/* Return nonzero if the destination of SET equals the source
1140 and there are no side effects. */
1141
1142int
f7d504c2 1143set_noop_p (const_rtx set)
7142e318
JW
1144{
1145 rtx src = SET_SRC (set);
1146 rtx dst = SET_DEST (set);
1147
371b8fc0
JH
1148 if (dst == pc_rtx && src == pc_rtx)
1149 return 1;
1150
3c0cb5de 1151 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1152 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1153
46d096a3 1154 if (GET_CODE (dst) == ZERO_EXTRACT)
7142e318 1155 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1156 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1157 && !side_effects_p (src);
7142e318
JW
1158
1159 if (GET_CODE (dst) == STRICT_LOW_PART)
1160 dst = XEXP (dst, 0);
1161
1162 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1163 {
1164 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1165 return 0;
1166 src = SUBREG_REG (src);
1167 dst = SUBREG_REG (dst);
1168 }
1169
f8cfc6aa 1170 return (REG_P (src) && REG_P (dst)
7142e318
JW
1171 && REGNO (src) == REGNO (dst));
1172}
0005550b
JH
1173\f
1174/* Return nonzero if an insn consists only of SETs, each of which only sets a
1175 value to itself. */
1176
1177int
fa233e34 1178noop_move_p (const_rtx insn)
0005550b
JH
1179{
1180 rtx pat = PATTERN (insn);
1181
b5832b43
JH
1182 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1183 return 1;
1184
0005550b
JH
1185 /* Insns carrying these notes are useful later on. */
1186 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1187 return 0;
1188
1189 if (GET_CODE (pat) == SET && set_noop_p (pat))
1190 return 1;
1191
1192 if (GET_CODE (pat) == PARALLEL)
1193 {
1194 int i;
1195 /* If nothing but SETs of registers to themselves,
1196 this insn can also be deleted. */
1197 for (i = 0; i < XVECLEN (pat, 0); i++)
1198 {
1199 rtx tem = XVECEXP (pat, 0, i);
1200
1201 if (GET_CODE (tem) == USE
1202 || GET_CODE (tem) == CLOBBER)
1203 continue;
1204
1205 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1206 return 0;
1207 }
1208
1209 return 1;
1210 }
1211 return 0;
1212}
1213\f
7142e318 1214
63be01fb
JW
1215/* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1216 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1217 If the object was modified, if we hit a partial assignment to X, or hit a
1218 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1219 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1220 be the src. */
2c88418c
RS
1221
1222rtx
0c20a65f 1223find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
2c88418c
RS
1224{
1225 rtx p;
1226
4b4bf941 1227 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
2c88418c 1228 p = PREV_INSN (p))
2c3c49de 1229 if (INSN_P (p))
2c88418c
RS
1230 {
1231 rtx set = single_set (p);
c166a311 1232 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
2c88418c
RS
1233
1234 if (set && rtx_equal_p (x, SET_DEST (set)))
1235 {
1236 rtx src = SET_SRC (set);
1237
1238 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1239 src = XEXP (note, 0);
1240
63be01fb
JW
1241 if ((valid_to == NULL_RTX
1242 || ! modified_between_p (src, PREV_INSN (p), valid_to))
2c88418c
RS
1243 /* Reject hard registers because we don't usually want
1244 to use them; we'd rather use a pseudo. */
f8cfc6aa 1245 && (! (REG_P (src)
89d3d442 1246 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
2c88418c
RS
1247 {
1248 *pinsn = p;
1249 return src;
1250 }
1251 }
a6a2274a 1252
2c88418c
RS
1253 /* If set in non-simple way, we don't have a value. */
1254 if (reg_set_p (x, p))
1255 break;
1256 }
1257
1258 return x;
a6a2274a 1259}
2c88418c
RS
1260\f
1261/* Return nonzero if register in range [REGNO, ENDREGNO)
1262 appears either explicitly or implicitly in X
1263 other than being stored into.
1264
1265 References contained within the substructure at LOC do not count.
1266 LOC may be zero, meaning don't ignore anything. */
1267
1268int
f7d504c2 1269refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
0c20a65f 1270 rtx *loc)
2c88418c 1271{
770ae6cc
RK
1272 int i;
1273 unsigned int x_regno;
1274 RTX_CODE code;
1275 const char *fmt;
2c88418c
RS
1276
1277 repeat:
1278 /* The contents of a REG_NONNEG note is always zero, so we must come here
1279 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1280 if (x == 0)
1281 return 0;
1282
1283 code = GET_CODE (x);
1284
1285 switch (code)
1286 {
1287 case REG:
770ae6cc 1288 x_regno = REGNO (x);
f8163c92
RK
1289
1290 /* If we modifying the stack, frame, or argument pointer, it will
1291 clobber a virtual register. In fact, we could be more precise,
1292 but it isn't worth it. */
770ae6cc 1293 if ((x_regno == STACK_POINTER_REGNUM
f8163c92 1294#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
770ae6cc 1295 || x_regno == ARG_POINTER_REGNUM
f8163c92 1296#endif
770ae6cc 1297 || x_regno == FRAME_POINTER_REGNUM)
f8163c92
RK
1298 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1299 return 1;
1300
09e18274 1301 return endregno > x_regno && regno < END_REGNO (x);
2c88418c
RS
1302
1303 case SUBREG:
1304 /* If this is a SUBREG of a hard reg, we can see exactly which
1305 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1306 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1307 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1308 {
ddef6bc7 1309 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1310 unsigned int inner_endregno
403c659c 1311 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
f1f4e530 1312 ? subreg_nregs (x) : 1);
2c88418c
RS
1313
1314 return endregno > inner_regno && regno < inner_endregno;
1315 }
1316 break;
1317
1318 case CLOBBER:
1319 case SET:
1320 if (&SET_DEST (x) != loc
1321 /* Note setting a SUBREG counts as referring to the REG it is in for
1322 a pseudo but not for hard registers since we can
1323 treat each word individually. */
1324 && ((GET_CODE (SET_DEST (x)) == SUBREG
1325 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1326 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1327 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1328 && refers_to_regno_p (regno, endregno,
1329 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1330 || (!REG_P (SET_DEST (x))
2c88418c
RS
1331 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1332 return 1;
1333
1334 if (code == CLOBBER || loc == &SET_SRC (x))
1335 return 0;
1336 x = SET_SRC (x);
1337 goto repeat;
e9a25f70
JL
1338
1339 default:
1340 break;
2c88418c
RS
1341 }
1342
1343 /* X does not match, so try its subexpressions. */
1344
1345 fmt = GET_RTX_FORMAT (code);
1346 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1347 {
1348 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1349 {
1350 if (i == 0)
1351 {
1352 x = XEXP (x, 0);
1353 goto repeat;
1354 }
1355 else
1356 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1357 return 1;
1358 }
1359 else if (fmt[i] == 'E')
1360 {
b3694847 1361 int j;
6a87d634 1362 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1363 if (loc != &XVECEXP (x, i, j)
1364 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1365 return 1;
1366 }
1367 }
1368 return 0;
1369}
1370
1371/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1372 we check if any register number in X conflicts with the relevant register
1373 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1374 contains a MEM (we don't bother checking for memory addresses that can't
1375 conflict because we expect this to be a rare case. */
1376
1377int
f7d504c2 1378reg_overlap_mentioned_p (const_rtx x, const_rtx in)
2c88418c 1379{
770ae6cc 1380 unsigned int regno, endregno;
2c88418c 1381
6f626d1b
PB
1382 /* If either argument is a constant, then modifying X can not
1383 affect IN. Here we look at IN, we can profitably combine
1384 CONSTANT_P (x) with the switch statement below. */
1385 if (CONSTANT_P (in))
b98b49ac 1386 return 0;
0c99ec5c 1387
6f626d1b 1388 recurse:
0c99ec5c 1389 switch (GET_CODE (x))
2c88418c 1390 {
6f626d1b
PB
1391 case STRICT_LOW_PART:
1392 case ZERO_EXTRACT:
1393 case SIGN_EXTRACT:
1394 /* Overly conservative. */
1395 x = XEXP (x, 0);
1396 goto recurse;
1397
0c99ec5c 1398 case SUBREG:
2c88418c
RS
1399 regno = REGNO (SUBREG_REG (x));
1400 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1401 regno = subreg_regno (x);
f1f4e530
JM
1402 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1403 ? subreg_nregs (x) : 1);
0c99ec5c 1404 goto do_reg;
2c88418c 1405
0c99ec5c
RH
1406 case REG:
1407 regno = REGNO (x);
09e18274 1408 endregno = END_REGNO (x);
f1f4e530 1409 do_reg:
8e2e89f7 1410 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1411
0c99ec5c
RH
1412 case MEM:
1413 {
1414 const char *fmt;
1415 int i;
2c88418c 1416
3c0cb5de 1417 if (MEM_P (in))
2c88418c
RS
1418 return 1;
1419
0c99ec5c
RH
1420 fmt = GET_RTX_FORMAT (GET_CODE (in));
1421 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
3b009185
RH
1422 if (fmt[i] == 'e')
1423 {
1424 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1425 return 1;
1426 }
1427 else if (fmt[i] == 'E')
1428 {
1429 int j;
1430 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1431 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1432 return 1;
1433 }
c0222c21 1434
0c99ec5c
RH
1435 return 0;
1436 }
1437
1438 case SCRATCH:
1439 case PC:
1440 case CC0:
1441 return reg_mentioned_p (x, in);
1442
1443 case PARALLEL:
37ceff9d 1444 {
90d036a0 1445 int i;
37ceff9d
RH
1446
1447 /* If any register in here refers to it we return true. */
7193d1dc
RK
1448 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1449 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1450 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1451 return 1;
7193d1dc 1452 return 0;
37ceff9d 1453 }
2c88418c 1454
0c99ec5c 1455 default:
41374e13 1456 gcc_assert (CONSTANT_P (x));
6f626d1b
PB
1457 return 0;
1458 }
2c88418c
RS
1459}
1460\f
2c88418c 1461/* Call FUN on each register or MEM that is stored into or clobbered by X.
c3a1ef9d
MM
1462 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1463 ignored by note_stores, but passed to FUN.
1464
1465 FUN receives three arguments:
1466 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1467 2. the SET or CLOBBER rtx that does the store,
1468 3. the pointer DATA provided to note_stores.
2c88418c
RS
1469
1470 If the item being stored in or clobbered is a SUBREG of a hard register,
1471 the SUBREG will be passed. */
a6a2274a 1472
2c88418c 1473void
7bc980e1 1474note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
2c88418c 1475{
aa317c97 1476 int i;
90d036a0 1477
aa317c97
KG
1478 if (GET_CODE (x) == COND_EXEC)
1479 x = COND_EXEC_CODE (x);
90d036a0 1480
aa317c97
KG
1481 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1482 {
1483 rtx dest = SET_DEST (x);
1484
1485 while ((GET_CODE (dest) == SUBREG
1486 && (!REG_P (SUBREG_REG (dest))
1487 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1488 || GET_CODE (dest) == ZERO_EXTRACT
1489 || GET_CODE (dest) == STRICT_LOW_PART)
1490 dest = XEXP (dest, 0);
1491
1492 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1493 each of whose first operand is a register. */
1494 if (GET_CODE (dest) == PARALLEL)
1495 {
1496 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1497 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1498 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1499 }
1500 else
1501 (*fun) (dest, x, data);
1502 }
770ae6cc 1503
aa317c97
KG
1504 else if (GET_CODE (x) == PARALLEL)
1505 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1506 note_stores (XVECEXP (x, 0, i), fun, data);
1507}
2c88418c 1508\f
e2373f95
RK
1509/* Like notes_stores, but call FUN for each expression that is being
1510 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1511 FUN for each expression, not any interior subexpressions. FUN receives a
1512 pointer to the expression and the DATA passed to this function.
1513
1514 Note that this is not quite the same test as that done in reg_referenced_p
1515 since that considers something as being referenced if it is being
1516 partially set, while we do not. */
1517
1518void
0c20a65f 1519note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1520{
1521 rtx body = *pbody;
1522 int i;
1523
1524 switch (GET_CODE (body))
1525 {
1526 case COND_EXEC:
1527 (*fun) (&COND_EXEC_TEST (body), data);
1528 note_uses (&COND_EXEC_CODE (body), fun, data);
1529 return;
1530
1531 case PARALLEL:
1532 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1533 note_uses (&XVECEXP (body, 0, i), fun, data);
1534 return;
1535
bbbc206e
BS
1536 case SEQUENCE:
1537 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1538 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1539 return;
1540
e2373f95
RK
1541 case USE:
1542 (*fun) (&XEXP (body, 0), data);
1543 return;
1544
1545 case ASM_OPERANDS:
1546 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1547 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1548 return;
1549
1550 case TRAP_IF:
1551 (*fun) (&TRAP_CONDITION (body), data);
1552 return;
1553
21b8482a
JJ
1554 case PREFETCH:
1555 (*fun) (&XEXP (body, 0), data);
1556 return;
1557
e2373f95
RK
1558 case UNSPEC:
1559 case UNSPEC_VOLATILE:
1560 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1561 (*fun) (&XVECEXP (body, 0, i), data);
1562 return;
1563
1564 case CLOBBER:
3c0cb5de 1565 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1566 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1567 return;
1568
1569 case SET:
1570 {
1571 rtx dest = SET_DEST (body);
1572
1573 /* For sets we replace everything in source plus registers in memory
1574 expression in store and operands of a ZERO_EXTRACT. */
1575 (*fun) (&SET_SRC (body), data);
1576
1577 if (GET_CODE (dest) == ZERO_EXTRACT)
1578 {
1579 (*fun) (&XEXP (dest, 1), data);
1580 (*fun) (&XEXP (dest, 2), data);
1581 }
1582
1583 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1584 dest = XEXP (dest, 0);
1585
3c0cb5de 1586 if (MEM_P (dest))
e2373f95
RK
1587 (*fun) (&XEXP (dest, 0), data);
1588 }
1589 return;
1590
1591 default:
1592 /* All the other possibilities never store. */
1593 (*fun) (pbody, data);
1594 return;
1595 }
1596}
1597\f
2c88418c
RS
1598/* Return nonzero if X's old contents don't survive after INSN.
1599 This will be true if X is (cc0) or if X is a register and
1600 X dies in INSN or because INSN entirely sets X.
1601
46d096a3
SB
1602 "Entirely set" means set directly and not through a SUBREG, or
1603 ZERO_EXTRACT, so no trace of the old contents remains.
2c88418c
RS
1604 Likewise, REG_INC does not count.
1605
1606 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1607 but for this use that makes no difference, since regs don't overlap
1608 during their lifetimes. Therefore, this function may be used
6fb5fa3c 1609 at any time after deaths have been computed.
2c88418c
RS
1610
1611 If REG is a hard reg that occupies multiple machine registers, this
1612 function will only return 1 if each of those registers will be replaced
1613 by INSN. */
1614
1615int
f7d504c2 1616dead_or_set_p (const_rtx insn, const_rtx x)
2c88418c 1617{
09e18274 1618 unsigned int regno, end_regno;
770ae6cc 1619 unsigned int i;
2c88418c
RS
1620
1621 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1622 if (GET_CODE (x) == CC0)
1623 return 1;
1624
41374e13 1625 gcc_assert (REG_P (x));
2c88418c
RS
1626
1627 regno = REGNO (x);
09e18274
RS
1628 end_regno = END_REGNO (x);
1629 for (i = regno; i < end_regno; i++)
2c88418c
RS
1630 if (! dead_or_set_regno_p (insn, i))
1631 return 0;
1632
1633 return 1;
1634}
1635
194acded
HPN
1636/* Return TRUE iff DEST is a register or subreg of a register and
1637 doesn't change the number of words of the inner register, and any
1638 part of the register is TEST_REGNO. */
1639
1640static bool
f7d504c2 1641covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
1642{
1643 unsigned int regno, endregno;
1644
1645 if (GET_CODE (dest) == SUBREG
1646 && (((GET_MODE_SIZE (GET_MODE (dest))
1647 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1648 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1649 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1650 dest = SUBREG_REG (dest);
1651
1652 if (!REG_P (dest))
1653 return false;
1654
1655 regno = REGNO (dest);
09e18274 1656 endregno = END_REGNO (dest);
194acded
HPN
1657 return (test_regno >= regno && test_regno < endregno);
1658}
1659
1660/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1661 any member matches the covers_regno_no_parallel_p criteria. */
1662
1663static bool
f7d504c2 1664covers_regno_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
1665{
1666 if (GET_CODE (dest) == PARALLEL)
1667 {
1668 /* Some targets place small structures in registers for return
1669 values of functions, and those registers are wrapped in
1670 PARALLELs that we may see as the destination of a SET. */
1671 int i;
1672
1673 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1674 {
1675 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1676 if (inner != NULL_RTX
1677 && covers_regno_no_parallel_p (inner, test_regno))
1678 return true;
1679 }
1680
1681 return false;
1682 }
1683 else
1684 return covers_regno_no_parallel_p (dest, test_regno);
1685}
1686
6fb5fa3c 1687/* Utility function for dead_or_set_p to check an individual register. */
2c88418c
RS
1688
1689int
f7d504c2 1690dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
2c88418c 1691{
f7d504c2 1692 const_rtx pattern;
2c88418c 1693
0a2287bf
RH
1694 /* See if there is a death note for something that includes TEST_REGNO. */
1695 if (find_regno_note (insn, REG_DEAD, test_regno))
1696 return 1;
2c88418c 1697
4b4bf941 1698 if (CALL_P (insn)
8f3e7a26
RK
1699 && find_regno_fusage (insn, CLOBBER, test_regno))
1700 return 1;
1701
0c99ec5c
RH
1702 pattern = PATTERN (insn);
1703
1704 if (GET_CODE (pattern) == COND_EXEC)
1705 pattern = COND_EXEC_CODE (pattern);
1706
1707 if (GET_CODE (pattern) == SET)
194acded 1708 return covers_regno_p (SET_DEST (pattern), test_regno);
0c99ec5c 1709 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 1710 {
b3694847 1711 int i;
2c88418c 1712
0c99ec5c 1713 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 1714 {
0c99ec5c
RH
1715 rtx body = XVECEXP (pattern, 0, i);
1716
1717 if (GET_CODE (body) == COND_EXEC)
1718 body = COND_EXEC_CODE (body);
2c88418c 1719
194acded
HPN
1720 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1721 && covers_regno_p (SET_DEST (body), test_regno))
1722 return 1;
2c88418c
RS
1723 }
1724 }
1725
1726 return 0;
1727}
1728
1729/* Return the reg-note of kind KIND in insn INSN, if there is one.
1730 If DATUM is nonzero, look for one whose datum is DATUM. */
1731
1732rtx
f7d504c2 1733find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
2c88418c 1734{
b3694847 1735 rtx link;
2c88418c 1736
7a40b8b1 1737 gcc_checking_assert (insn);
af082de3 1738
ae78d276 1739 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1740 if (! INSN_P (insn))
ae78d276 1741 return 0;
cd798543
AP
1742 if (datum == 0)
1743 {
1744 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1745 if (REG_NOTE_KIND (link) == kind)
1746 return link;
1747 return 0;
1748 }
ae78d276 1749
2c88418c 1750 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 1751 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
1752 return link;
1753 return 0;
1754}
1755
1756/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
1757 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1758 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1759 it might be the case that the note overlaps REGNO. */
2c88418c
RS
1760
1761rtx
f7d504c2 1762find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 1763{
b3694847 1764 rtx link;
2c88418c 1765
ae78d276 1766 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1767 if (! INSN_P (insn))
ae78d276
MM
1768 return 0;
1769
2c88418c
RS
1770 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1771 if (REG_NOTE_KIND (link) == kind
1772 /* Verify that it is a register, so that scratch and MEM won't cause a
1773 problem here. */
f8cfc6aa 1774 && REG_P (XEXP (link, 0))
99309f3b 1775 && REGNO (XEXP (link, 0)) <= regno
09e18274 1776 && END_REGNO (XEXP (link, 0)) > regno)
2c88418c
RS
1777 return link;
1778 return 0;
1779}
8f3e7a26 1780
d9c695ff
RK
1781/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1782 has such a note. */
1783
1784rtx
f7d504c2 1785find_reg_equal_equiv_note (const_rtx insn)
d9c695ff 1786{
cd648cec 1787 rtx link;
d9c695ff 1788
cd648cec 1789 if (!INSN_P (insn))
d9c695ff 1790 return 0;
ea8f106d 1791
cd648cec
JH
1792 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1793 if (REG_NOTE_KIND (link) == REG_EQUAL
1794 || REG_NOTE_KIND (link) == REG_EQUIV)
1795 {
ea8f106d
SB
1796 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1797 insns that have multiple sets. Checking single_set to
1798 make sure of this is not the proper check, as explained
1799 in the comment in set_unique_reg_note.
1800
1801 This should be changed into an assert. */
1802 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
cd648cec
JH
1803 return 0;
1804 return link;
1805 }
1806 return NULL;
d9c695ff
RK
1807}
1808
2a450639
RS
1809/* Check whether INSN is a single_set whose source is known to be
1810 equivalent to a constant. Return that constant if so, otherwise
1811 return null. */
1812
1813rtx
f7d504c2 1814find_constant_src (const_rtx insn)
2a450639
RS
1815{
1816 rtx note, set, x;
1817
1818 set = single_set (insn);
1819 if (set)
1820 {
1821 x = avoid_constant_pool_reference (SET_SRC (set));
1822 if (CONSTANT_P (x))
1823 return x;
1824 }
1825
1826 note = find_reg_equal_equiv_note (insn);
1827 if (note && CONSTANT_P (XEXP (note, 0)))
1828 return XEXP (note, 0);
1829
1830 return NULL_RTX;
1831}
1832
8f3e7a26
RK
1833/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1834 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1835
1836int
f7d504c2 1837find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
8f3e7a26
RK
1838{
1839 /* If it's not a CALL_INSN, it can't possibly have a
1840 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 1841 if (!CALL_P (insn))
8f3e7a26
RK
1842 return 0;
1843
41374e13 1844 gcc_assert (datum);
8f3e7a26 1845
f8cfc6aa 1846 if (!REG_P (datum))
8f3e7a26 1847 {
b3694847 1848 rtx link;
8f3e7a26
RK
1849
1850 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 1851 link;
8f3e7a26 1852 link = XEXP (link, 1))
a6a2274a 1853 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 1854 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 1855 return 1;
8f3e7a26
RK
1856 }
1857 else
1858 {
770ae6cc 1859 unsigned int regno = REGNO (datum);
8f3e7a26
RK
1860
1861 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1862 to pseudo registers, so don't bother checking. */
1863
1864 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 1865 {
09e18274 1866 unsigned int end_regno = END_HARD_REGNO (datum);
770ae6cc 1867 unsigned int i;
8f3e7a26
RK
1868
1869 for (i = regno; i < end_regno; i++)
1870 if (find_regno_fusage (insn, code, i))
1871 return 1;
a6a2274a 1872 }
8f3e7a26
RK
1873 }
1874
1875 return 0;
1876}
1877
1878/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1879 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1880
1881int
f7d504c2 1882find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 1883{
b3694847 1884 rtx link;
8f3e7a26
RK
1885
1886 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1887 to pseudo registers, so don't bother checking. */
1888
1889 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 1890 || !CALL_P (insn) )
8f3e7a26
RK
1891 return 0;
1892
1893 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 1894 {
770ae6cc 1895 rtx op, reg;
83ab3839
RH
1896
1897 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 1898 && REG_P (reg = XEXP (op, 0))
09e18274
RS
1899 && REGNO (reg) <= regno
1900 && END_HARD_REGNO (reg) > regno)
83ab3839
RH
1901 return 1;
1902 }
8f3e7a26
RK
1903
1904 return 0;
1905}
a6a063b8 1906
2c88418c 1907\f
efc0b2bd
ILT
1908/* Allocate a register note with kind KIND and datum DATUM. LIST is
1909 stored as the pointer to the next register note. */
65c5f2a6 1910
efc0b2bd
ILT
1911rtx
1912alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
65c5f2a6
ILT
1913{
1914 rtx note;
1915
1916 switch (kind)
1917 {
1918 case REG_CC_SETTER:
1919 case REG_CC_USER:
1920 case REG_LABEL_TARGET:
1921 case REG_LABEL_OPERAND:
0a35513e 1922 case REG_TM:
65c5f2a6
ILT
1923 /* These types of register notes use an INSN_LIST rather than an
1924 EXPR_LIST, so that copying is done right and dumps look
1925 better. */
efc0b2bd 1926 note = alloc_INSN_LIST (datum, list);
65c5f2a6
ILT
1927 PUT_REG_NOTE_KIND (note, kind);
1928 break;
1929
1930 default:
efc0b2bd 1931 note = alloc_EXPR_LIST (kind, datum, list);
65c5f2a6
ILT
1932 break;
1933 }
1934
efc0b2bd
ILT
1935 return note;
1936}
1937
1938/* Add register note with kind KIND and datum DATUM to INSN. */
1939
1940void
1941add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1942{
1943 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
65c5f2a6
ILT
1944}
1945
2c88418c
RS
1946/* Remove register note NOTE from the REG_NOTES of INSN. */
1947
1948void
f7d504c2 1949remove_note (rtx insn, const_rtx note)
2c88418c 1950{
b3694847 1951 rtx link;
2c88418c 1952
49c3bb12
RH
1953 if (note == NULL_RTX)
1954 return;
1955
2c88418c 1956 if (REG_NOTES (insn) == note)
6fb5fa3c
DB
1957 REG_NOTES (insn) = XEXP (note, 1);
1958 else
1959 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1960 if (XEXP (link, 1) == note)
1961 {
1962 XEXP (link, 1) = XEXP (note, 1);
1963 break;
1964 }
1965
1966 switch (REG_NOTE_KIND (note))
2c88418c 1967 {
6fb5fa3c
DB
1968 case REG_EQUAL:
1969 case REG_EQUIV:
1970 df_notes_rescan (insn);
1971 break;
1972 default:
1973 break;
2c88418c 1974 }
2c88418c 1975}
55a98783 1976
7cd689bc
SB
1977/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1978
1979void
1980remove_reg_equal_equiv_notes (rtx insn)
1981{
1982 rtx *loc;
1983
1984 loc = &REG_NOTES (insn);
1985 while (*loc)
1986 {
1987 enum reg_note kind = REG_NOTE_KIND (*loc);
1988 if (kind == REG_EQUAL || kind == REG_EQUIV)
1989 *loc = XEXP (*loc, 1);
1990 else
1991 loc = &XEXP (*loc, 1);
1992 }
1993}
885c9b5d
EB
1994
1995/* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
1996
1997void
1998remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
1999{
2000 df_ref eq_use;
2001
2002 if (!df)
2003 return;
2004
2005 /* This loop is a little tricky. We cannot just go down the chain because
2006 it is being modified by some actions in the loop. So we just iterate
2007 over the head. We plan to drain the list anyway. */
2008 while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2009 {
2010 rtx insn = DF_REF_INSN (eq_use);
2011 rtx note = find_reg_equal_equiv_note (insn);
2012
2013 /* This assert is generally triggered when someone deletes a REG_EQUAL
2014 or REG_EQUIV note by hacking the list manually rather than calling
2015 remove_note. */
2016 gcc_assert (note);
2017
2018 remove_note (insn, note);
2019 }
2020}
7cd689bc 2021
5f0d2358
RK
2022/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2023 return 1 if it is found. A simple equality test is used to determine if
2024 NODE matches. */
2025
2026int
f7d504c2 2027in_expr_list_p (const_rtx listp, const_rtx node)
5f0d2358 2028{
f7d504c2 2029 const_rtx x;
5f0d2358
RK
2030
2031 for (x = listp; x; x = XEXP (x, 1))
2032 if (node == XEXP (x, 0))
2033 return 1;
2034
2035 return 0;
2036}
2037
dd248abd
RK
2038/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2039 remove that entry from the list if it is found.
55a98783 2040
dd248abd 2041 A simple equality test is used to determine if NODE matches. */
55a98783
JL
2042
2043void
f7d504c2 2044remove_node_from_expr_list (const_rtx node, rtx *listp)
55a98783
JL
2045{
2046 rtx temp = *listp;
2047 rtx prev = NULL_RTX;
2048
2049 while (temp)
2050 {
2051 if (node == XEXP (temp, 0))
2052 {
2053 /* Splice the node out of the list. */
2054 if (prev)
2055 XEXP (prev, 1) = XEXP (temp, 1);
2056 else
2057 *listp = XEXP (temp, 1);
2058
2059 return;
2060 }
dd248abd
RK
2061
2062 prev = temp;
55a98783
JL
2063 temp = XEXP (temp, 1);
2064 }
2065}
2c88418c 2066\f
2b067faf
RS
2067/* Nonzero if X contains any volatile instructions. These are instructions
2068 which may cause unpredictable machine state instructions, and thus no
2069 instructions should be moved or combined across them. This includes
2070 only volatile asms and UNSPEC_VOLATILE instructions. */
2071
2072int
f7d504c2 2073volatile_insn_p (const_rtx x)
2b067faf 2074{
f7d504c2 2075 const RTX_CODE code = GET_CODE (x);
2b067faf
RS
2076 switch (code)
2077 {
2078 case LABEL_REF:
2079 case SYMBOL_REF:
2b067faf 2080 case CONST:
d8116890 2081 CASE_CONST_ANY:
2b067faf
RS
2082 case CC0:
2083 case PC:
2084 case REG:
2085 case SCRATCH:
2086 case CLOBBER:
2b067faf
RS
2087 case ADDR_VEC:
2088 case ADDR_DIFF_VEC:
2089 case CALL:
2090 case MEM:
2091 return 0;
2092
2093 case UNSPEC_VOLATILE:
2094 /* case TRAP_IF: This isn't clear yet. */
2095 return 1;
2096
4c46ea23 2097 case ASM_INPUT:
2b067faf
RS
2098 case ASM_OPERANDS:
2099 if (MEM_VOLATILE_P (x))
2100 return 1;
e9a25f70
JL
2101
2102 default:
2103 break;
2b067faf
RS
2104 }
2105
2106 /* Recursively scan the operands of this expression. */
2107
2108 {
f7d504c2 2109 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2110 int i;
a6a2274a 2111
2b067faf
RS
2112 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2113 {
2114 if (fmt[i] == 'e')
2115 {
31001f72 2116 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
2117 return 1;
2118 }
d4757e6a 2119 else if (fmt[i] == 'E')
2b067faf 2120 {
b3694847 2121 int j;
2b067faf 2122 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 2123 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
2124 return 1;
2125 }
2126 }
2127 }
2128 return 0;
2129}
2130
2c88418c 2131/* Nonzero if X contains any volatile memory references
2ac4fed0 2132 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
2133
2134int
f7d504c2 2135volatile_refs_p (const_rtx x)
2c88418c 2136{
f7d504c2 2137 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2138 switch (code)
2139 {
2140 case LABEL_REF:
2141 case SYMBOL_REF:
2c88418c 2142 case CONST:
d8116890 2143 CASE_CONST_ANY:
2c88418c
RS
2144 case CC0:
2145 case PC:
2146 case REG:
2147 case SCRATCH:
2148 case CLOBBER:
2c88418c
RS
2149 case ADDR_VEC:
2150 case ADDR_DIFF_VEC:
2151 return 0;
2152
2ac4fed0 2153 case UNSPEC_VOLATILE:
2c88418c
RS
2154 return 1;
2155
2156 case MEM:
4c46ea23 2157 case ASM_INPUT:
2c88418c
RS
2158 case ASM_OPERANDS:
2159 if (MEM_VOLATILE_P (x))
2160 return 1;
e9a25f70
JL
2161
2162 default:
2163 break;
2c88418c
RS
2164 }
2165
2166 /* Recursively scan the operands of this expression. */
2167
2168 {
f7d504c2 2169 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2170 int i;
a6a2274a 2171
2c88418c
RS
2172 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2173 {
2174 if (fmt[i] == 'e')
2175 {
2176 if (volatile_refs_p (XEXP (x, i)))
2177 return 1;
2178 }
d4757e6a 2179 else if (fmt[i] == 'E')
2c88418c 2180 {
b3694847 2181 int j;
2c88418c
RS
2182 for (j = 0; j < XVECLEN (x, i); j++)
2183 if (volatile_refs_p (XVECEXP (x, i, j)))
2184 return 1;
2185 }
2186 }
2187 }
2188 return 0;
2189}
2190
2191/* Similar to above, except that it also rejects register pre- and post-
2192 incrementing. */
2193
2194int
f7d504c2 2195side_effects_p (const_rtx x)
2c88418c 2196{
f7d504c2 2197 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2198 switch (code)
2199 {
2200 case LABEL_REF:
2201 case SYMBOL_REF:
2c88418c 2202 case CONST:
d8116890 2203 CASE_CONST_ANY:
2c88418c
RS
2204 case CC0:
2205 case PC:
2206 case REG:
2207 case SCRATCH:
2c88418c
RS
2208 case ADDR_VEC:
2209 case ADDR_DIFF_VEC:
b5b8b0ac 2210 case VAR_LOCATION:
2c88418c
RS
2211 return 0;
2212
2213 case CLOBBER:
2214 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2215 when some combination can't be done. If we see one, don't think
2216 that we can simplify the expression. */
2217 return (GET_MODE (x) != VOIDmode);
2218
2219 case PRE_INC:
2220 case PRE_DEC:
2221 case POST_INC:
2222 case POST_DEC:
1fb9c5cd
MH
2223 case PRE_MODIFY:
2224 case POST_MODIFY:
2c88418c 2225 case CALL:
2ac4fed0 2226 case UNSPEC_VOLATILE:
2c88418c
RS
2227 /* case TRAP_IF: This isn't clear yet. */
2228 return 1;
2229
2230 case MEM:
4c46ea23 2231 case ASM_INPUT:
2c88418c
RS
2232 case ASM_OPERANDS:
2233 if (MEM_VOLATILE_P (x))
2234 return 1;
e9a25f70
JL
2235
2236 default:
2237 break;
2c88418c
RS
2238 }
2239
2240 /* Recursively scan the operands of this expression. */
2241
2242 {
b3694847
SS
2243 const char *fmt = GET_RTX_FORMAT (code);
2244 int i;
a6a2274a 2245
2c88418c
RS
2246 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2247 {
2248 if (fmt[i] == 'e')
2249 {
2250 if (side_effects_p (XEXP (x, i)))
2251 return 1;
2252 }
d4757e6a 2253 else if (fmt[i] == 'E')
2c88418c 2254 {
b3694847 2255 int j;
2c88418c
RS
2256 for (j = 0; j < XVECLEN (x, i); j++)
2257 if (side_effects_p (XVECEXP (x, i, j)))
2258 return 1;
2259 }
2260 }
2261 }
2262 return 0;
2263}
2264\f
e755fcf5 2265/* Return nonzero if evaluating rtx X might cause a trap.
48e8382e
PB
2266 FLAGS controls how to consider MEMs. A nonzero means the context
2267 of the access may have changed from the original, such that the
2268 address may have become invalid. */
2c88418c 2269
215b063c 2270int
f7d504c2 2271may_trap_p_1 (const_rtx x, unsigned flags)
2c88418c
RS
2272{
2273 int i;
2274 enum rtx_code code;
6f7d635c 2275 const char *fmt;
48e8382e
PB
2276
2277 /* We make no distinction currently, but this function is part of
2278 the internal target-hooks ABI so we keep the parameter as
2279 "unsigned flags". */
2280 bool code_changed = flags != 0;
2c88418c
RS
2281
2282 if (x == 0)
2283 return 0;
2284 code = GET_CODE (x);
2285 switch (code)
2286 {
2287 /* Handle these cases quickly. */
d8116890 2288 CASE_CONST_ANY:
2c88418c
RS
2289 case SYMBOL_REF:
2290 case LABEL_REF:
2291 case CONST:
2292 case PC:
2293 case CC0:
2294 case REG:
2295 case SCRATCH:
2296 return 0;
2297
215b063c 2298 case UNSPEC:
2ac4fed0 2299 case UNSPEC_VOLATILE:
215b063c
PB
2300 return targetm.unspec_may_trap_p (x, flags);
2301
2302 case ASM_INPUT:
2c88418c
RS
2303 case TRAP_IF:
2304 return 1;
2305
22aa60a1
RH
2306 case ASM_OPERANDS:
2307 return MEM_VOLATILE_P (x);
2308
2c88418c
RS
2309 /* Memory ref can trap unless it's a static var or a stack slot. */
2310 case MEM:
d809253a
EB
2311 /* Recognize specific pattern of stack checking probes. */
2312 if (flag_stack_check
2313 && MEM_VOLATILE_P (x)
2314 && XEXP (x, 0) == stack_pointer_rtx)
2315 return 1;
e755fcf5 2316 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
48e8382e
PB
2317 reference; moving it out of context such as when moving code
2318 when optimizing, might cause its address to become invalid. */
2319 code_changed
2320 || !MEM_NOTRAP_P (x))
2321 {
f5541398 2322 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
48e8382e
PB
2323 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2324 GET_MODE (x), code_changed);
2325 }
2326
2327 return 0;
2c88418c
RS
2328
2329 /* Division by a non-constant might trap. */
2330 case DIV:
2331 case MOD:
2332 case UDIV:
2333 case UMOD:
52bfebf0
RS
2334 if (HONOR_SNANS (GET_MODE (x)))
2335 return 1;
3d8bf70f 2336 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
f9013075
DE
2337 return flag_trapping_math;
2338 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2c88418c 2339 return 1;
e9a25f70
JL
2340 break;
2341
b278301b
RK
2342 case EXPR_LIST:
2343 /* An EXPR_LIST is used to represent a function call. This
2344 certainly may trap. */
2345 return 1;
e9a25f70 2346
734508ea
JW
2347 case GE:
2348 case GT:
2349 case LE:
2350 case LT:
19aec195 2351 case LTGT:
55143861 2352 case COMPARE:
734508ea 2353 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2354 if (!flag_trapping_math)
2355 break;
734508ea
JW
2356 /* ??? There is no machine independent way to check for tests that trap
2357 when COMPARE is used, though many targets do make this distinction.
2358 For instance, sparc uses CCFPE for compares which generate exceptions
2359 and CCFP for compares which do not generate exceptions. */
52bfebf0 2360 if (HONOR_NANS (GET_MODE (x)))
55143861
JJ
2361 return 1;
2362 /* But often the compare has some CC mode, so check operand
2363 modes as well. */
52bfebf0
RS
2364 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2365 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2366 return 1;
2367 break;
2368
2369 case EQ:
2370 case NE:
2371 if (HONOR_SNANS (GET_MODE (x)))
2372 return 1;
2373 /* Often comparison is CC mode, so check operand modes. */
2374 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2375 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
55143861
JJ
2376 return 1;
2377 break;
2378
22fd5743
FH
2379 case FIX:
2380 /* Conversion of floating point might trap. */
2381 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2382 return 1;
2383 break;
2384
05cc23e8
RH
2385 case NEG:
2386 case ABS:
e3947b34 2387 case SUBREG:
05cc23e8
RH
2388 /* These operations don't trap even with floating point. */
2389 break;
2390
2c88418c
RS
2391 default:
2392 /* Any floating arithmetic may trap. */
3d8bf70f 2393 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
f5eb5fd0 2394 && flag_trapping_math)
2c88418c
RS
2395 return 1;
2396 }
2397
2398 fmt = GET_RTX_FORMAT (code);
2399 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2400 {
2401 if (fmt[i] == 'e')
2402 {
e755fcf5 2403 if (may_trap_p_1 (XEXP (x, i), flags))
2c88418c
RS
2404 return 1;
2405 }
2406 else if (fmt[i] == 'E')
2407 {
b3694847 2408 int j;
2c88418c 2409 for (j = 0; j < XVECLEN (x, i); j++)
e755fcf5 2410 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2c88418c
RS
2411 return 1;
2412 }
2413 }
2414 return 0;
2415}
2358ff91
EB
2416
2417/* Return nonzero if evaluating rtx X might cause a trap. */
2418
2419int
f7d504c2 2420may_trap_p (const_rtx x)
2358ff91 2421{
e755fcf5
ZD
2422 return may_trap_p_1 (x, 0);
2423}
2424
c0220ea4 2425/* Same as above, but additionally return nonzero if evaluating rtx X might
2358ff91
EB
2426 cause a fault. We define a fault for the purpose of this function as a
2427 erroneous execution condition that cannot be encountered during the normal
2428 execution of a valid program; the typical example is an unaligned memory
2429 access on a strict alignment machine. The compiler guarantees that it
2430 doesn't generate code that will fault from a valid program, but this
2431 guarantee doesn't mean anything for individual instructions. Consider
2432 the following example:
2433
2434 struct S { int d; union { char *cp; int *ip; }; };
2435
2436 int foo(struct S *s)
2437 {
2438 if (s->d == 1)
2439 return *s->ip;
2440 else
2441 return *s->cp;
2442 }
2443
2444 on a strict alignment machine. In a valid program, foo will never be
2445 invoked on a structure for which d is equal to 1 and the underlying
2446 unique field of the union not aligned on a 4-byte boundary, but the
2447 expression *s->ip might cause a fault if considered individually.
2448
2449 At the RTL level, potentially problematic expressions will almost always
2450 verify may_trap_p; for example, the above dereference can be emitted as
2451 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2452 However, suppose that foo is inlined in a caller that causes s->cp to
2453 point to a local character variable and guarantees that s->d is not set
2454 to 1; foo may have been effectively translated into pseudo-RTL as:
2455
2456 if ((reg:SI) == 1)
2457 (set (reg:SI) (mem:SI (%fp - 7)))
2458 else
2459 (set (reg:QI) (mem:QI (%fp - 7)))
2460
2461 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2462 memory reference to a stack slot, but it will certainly cause a fault
2463 on a strict alignment machine. */
2464
2465int
f7d504c2 2466may_trap_or_fault_p (const_rtx x)
2358ff91 2467{
48e8382e 2468 return may_trap_p_1 (x, 1);
2358ff91 2469}
2c88418c
RS
2470\f
2471/* Return nonzero if X contains a comparison that is not either EQ or NE,
2472 i.e., an inequality. */
2473
2474int
f7d504c2 2475inequality_comparisons_p (const_rtx x)
2c88418c 2476{
b3694847
SS
2477 const char *fmt;
2478 int len, i;
f7d504c2 2479 const enum rtx_code code = GET_CODE (x);
2c88418c
RS
2480
2481 switch (code)
2482 {
2483 case REG:
2484 case SCRATCH:
2485 case PC:
2486 case CC0:
d8116890 2487 CASE_CONST_ANY:
2c88418c
RS
2488 case CONST:
2489 case LABEL_REF:
2490 case SYMBOL_REF:
2491 return 0;
2492
2493 case LT:
2494 case LTU:
2495 case GT:
2496 case GTU:
2497 case LE:
2498 case LEU:
2499 case GE:
2500 case GEU:
2501 return 1;
a6a2274a 2502
e9a25f70
JL
2503 default:
2504 break;
2c88418c
RS
2505 }
2506
2507 len = GET_RTX_LENGTH (code);
2508 fmt = GET_RTX_FORMAT (code);
2509
2510 for (i = 0; i < len; i++)
2511 {
2512 if (fmt[i] == 'e')
2513 {
2514 if (inequality_comparisons_p (XEXP (x, i)))
2515 return 1;
2516 }
2517 else if (fmt[i] == 'E')
2518 {
b3694847 2519 int j;
2c88418c
RS
2520 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2521 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2522 return 1;
2523 }
2524 }
a6a2274a 2525
2c88418c
RS
2526 return 0;
2527}
2528\f
1ed0205e
VM
2529/* Replace any occurrence of FROM in X with TO. The function does
2530 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2531
2532 Note that copying is not done so X must not be shared unless all copies
2533 are to be modified. */
2534
2535rtx
0c20a65f 2536replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2537{
b3694847
SS
2538 int i, j;
2539 const char *fmt;
2c88418c
RS
2540
2541 if (x == from)
2542 return to;
2543
2544 /* Allow this function to make replacements in EXPR_LISTs. */
2545 if (x == 0)
2546 return 0;
2547
9dd791c8
AO
2548 if (GET_CODE (x) == SUBREG)
2549 {
55d796da 2550 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
9dd791c8 2551
481683e1 2552 if (CONST_INT_P (new_rtx))
9dd791c8 2553 {
55d796da 2554 x = simplify_subreg (GET_MODE (x), new_rtx,
9dd791c8
AO
2555 GET_MODE (SUBREG_REG (x)),
2556 SUBREG_BYTE (x));
41374e13 2557 gcc_assert (x);
9dd791c8
AO
2558 }
2559 else
55d796da 2560 SUBREG_REG (x) = new_rtx;
9dd791c8
AO
2561
2562 return x;
2563 }
2564 else if (GET_CODE (x) == ZERO_EXTEND)
2565 {
55d796da 2566 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
9dd791c8 2567
481683e1 2568 if (CONST_INT_P (new_rtx))
9dd791c8
AO
2569 {
2570 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
55d796da 2571 new_rtx, GET_MODE (XEXP (x, 0)));
41374e13 2572 gcc_assert (x);
9dd791c8
AO
2573 }
2574 else
55d796da 2575 XEXP (x, 0) = new_rtx;
9dd791c8
AO
2576
2577 return x;
2578 }
2579
2c88418c
RS
2580 fmt = GET_RTX_FORMAT (GET_CODE (x));
2581 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2582 {
2583 if (fmt[i] == 'e')
2584 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2585 else if (fmt[i] == 'E')
2586 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2587 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2588 }
2589
2590 return x;
a6a2274a 2591}
2c88418c 2592\f
39811184 2593/* Replace occurrences of the old label in *X with the new one.
4af16369 2594 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
39811184
JZ
2595
2596int
0c20a65f 2597replace_label (rtx *x, void *data)
39811184
JZ
2598{
2599 rtx l = *x;
4af16369
JZ
2600 rtx old_label = ((replace_label_data *) data)->r1;
2601 rtx new_label = ((replace_label_data *) data)->r2;
2602 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
39811184
JZ
2603
2604 if (l == NULL_RTX)
2605 return 0;
2606
173cd571
JZ
2607 if (GET_CODE (l) == SYMBOL_REF
2608 && CONSTANT_POOL_ADDRESS_P (l))
4af16369 2609 {
173cd571 2610 rtx c = get_pool_constant (l);
4af16369
JZ
2611 if (rtx_referenced_p (old_label, c))
2612 {
2613 rtx new_c, new_l;
2614 replace_label_data *d = (replace_label_data *) data;
0c20a65f 2615
4af16369
JZ
2616 /* Create a copy of constant C; replace the label inside
2617 but do not update LABEL_NUSES because uses in constant pool
2618 are not counted. */
2619 new_c = copy_rtx (c);
2620 d->update_label_nuses = false;
2621 for_each_rtx (&new_c, replace_label, data);
2622 d->update_label_nuses = update_label_nuses;
2623
2624 /* Add the new constant NEW_C to constant pool and replace
2625 the old reference to constant by new reference. */
173cd571 2626 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
4af16369
JZ
2627 *x = replace_rtx (l, l, new_l);
2628 }
2629 return 0;
2630 }
2631
39811184
JZ
2632 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2633 field. This is not handled by for_each_rtx because it doesn't
2634 handle unprinted ('0') fields. */
4b4bf941 2635 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
39811184 2636 JUMP_LABEL (l) = new_label;
39811184 2637
4af16369
JZ
2638 if ((GET_CODE (l) == LABEL_REF
2639 || GET_CODE (l) == INSN_LIST)
2640 && XEXP (l, 0) == old_label)
2641 {
2642 XEXP (l, 0) = new_label;
2643 if (update_label_nuses)
2644 {
2645 ++LABEL_NUSES (new_label);
2646 --LABEL_NUSES (old_label);
2647 }
2648 return 0;
2649 }
39811184
JZ
2650
2651 return 0;
2652}
2653
4af16369
JZ
2654/* When *BODY is equal to X or X is directly referenced by *BODY
2655 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2656 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
39811184
JZ
2657
2658static int
0c20a65f 2659rtx_referenced_p_1 (rtx *body, void *x)
39811184 2660{
4af16369
JZ
2661 rtx y = (rtx) x;
2662
2663 if (*body == NULL_RTX)
2664 return y == NULL_RTX;
2665
2666 /* Return true if a label_ref *BODY refers to label Y. */
4b4bf941 2667 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
4af16369
JZ
2668 return XEXP (*body, 0) == y;
2669
2670 /* If *BODY is a reference to pool constant traverse the constant. */
2671 if (GET_CODE (*body) == SYMBOL_REF
2672 && CONSTANT_POOL_ADDRESS_P (*body))
2673 return rtx_referenced_p (y, get_pool_constant (*body));
2674
2675 /* By default, compare the RTL expressions. */
2676 return rtx_equal_p (*body, y);
39811184
JZ
2677}
2678
4af16369 2679/* Return true if X is referenced in BODY. */
39811184
JZ
2680
2681int
0c20a65f 2682rtx_referenced_p (rtx x, rtx body)
39811184 2683{
4af16369 2684 return for_each_rtx (&body, rtx_referenced_p_1, x);
39811184
JZ
2685}
2686
ee735eef
JZ
2687/* If INSN is a tablejump return true and store the label (before jump table) to
2688 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
2689
2690bool
f7d504c2 2691tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
39811184 2692{
ee735eef
JZ
2693 rtx label, table;
2694
dc0ff1c8
BS
2695 if (!JUMP_P (insn))
2696 return false;
2697
2698 label = JUMP_LABEL (insn);
2699 if (label != NULL_RTX && !ANY_RETURN_P (label)
ee735eef 2700 && (table = next_active_insn (label)) != NULL_RTX
481683e1 2701 && JUMP_TABLE_DATA_P (table))
39811184 2702 {
ee735eef
JZ
2703 if (labelp)
2704 *labelp = label;
2705 if (tablep)
2706 *tablep = table;
39811184
JZ
2707 return true;
2708 }
2709 return false;
2710}
2711
fce7e199
RH
2712/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2713 constant that is not in the constant pool and not in the condition
2714 of an IF_THEN_ELSE. */
2a1777af
JL
2715
2716static int
f7d504c2 2717computed_jump_p_1 (const_rtx x)
2a1777af 2718{
f7d504c2 2719 const enum rtx_code code = GET_CODE (x);
2a1777af 2720 int i, j;
6f7d635c 2721 const char *fmt;
2a1777af
JL
2722
2723 switch (code)
2724 {
2a1777af
JL
2725 case LABEL_REF:
2726 case PC:
2727 return 0;
2728
fce7e199 2729 case CONST:
d8116890 2730 CASE_CONST_ANY:
fce7e199 2731 case SYMBOL_REF:
2a1777af
JL
2732 case REG:
2733 return 1;
2734
2735 case MEM:
2736 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2737 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2738
2739 case IF_THEN_ELSE:
fce7e199
RH
2740 return (computed_jump_p_1 (XEXP (x, 1))
2741 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
2742
2743 default:
2744 break;
2a1777af
JL
2745 }
2746
2747 fmt = GET_RTX_FORMAT (code);
2748 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2749 {
2750 if (fmt[i] == 'e'
fce7e199 2751 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
2752 return 1;
2753
d4757e6a 2754 else if (fmt[i] == 'E')
2a1777af 2755 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 2756 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
2757 return 1;
2758 }
2759
2760 return 0;
2761}
2762
2763/* Return nonzero if INSN is an indirect jump (aka computed jump).
2764
2765 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 2766 we can recognize them by a (use (label_ref)). */
2a1777af
JL
2767
2768int
f7d504c2 2769computed_jump_p (const_rtx insn)
2a1777af
JL
2770{
2771 int i;
4b4bf941 2772 if (JUMP_P (insn))
2a1777af
JL
2773 {
2774 rtx pat = PATTERN (insn);
2a1777af 2775
cf7c4aa6
HPN
2776 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2777 if (JUMP_LABEL (insn) != NULL)
f759eb8b 2778 return 0;
cf7c4aa6
HPN
2779
2780 if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
2781 {
2782 int len = XVECLEN (pat, 0);
2783 int has_use_labelref = 0;
2784
2785 for (i = len - 1; i >= 0; i--)
2786 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2787 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2788 == LABEL_REF))
2789 has_use_labelref = 1;
2790
2791 if (! has_use_labelref)
2792 for (i = len - 1; i >= 0; i--)
2793 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2794 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 2795 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
2796 return 1;
2797 }
2798 else if (GET_CODE (pat) == SET
2799 && SET_DEST (pat) == pc_rtx
fce7e199 2800 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
2801 return 1;
2802 }
2803 return 0;
2804}
ccc2d6d0 2805
cf94b0fc
PB
2806/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2807 calls. Processes the subexpressions of EXP and passes them to F. */
2808static int
2809for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2810{
2811 int result, i, j;
2812 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2813 rtx *x;
2814
2815 for (; format[n] != '\0'; n++)
2816 {
2817 switch (format[n])
2818 {
2819 case 'e':
2820 /* Call F on X. */
2821 x = &XEXP (exp, n);
2822 result = (*f) (x, data);
2823 if (result == -1)
2824 /* Do not traverse sub-expressions. */
2825 continue;
2826 else if (result != 0)
2827 /* Stop the traversal. */
2828 return result;
b8698a0f 2829
cf94b0fc
PB
2830 if (*x == NULL_RTX)
2831 /* There are no sub-expressions. */
2832 continue;
b8698a0f 2833
cf94b0fc
PB
2834 i = non_rtx_starting_operands[GET_CODE (*x)];
2835 if (i >= 0)
2836 {
2837 result = for_each_rtx_1 (*x, i, f, data);
2838 if (result != 0)
2839 return result;
2840 }
2841 break;
2842
2843 case 'V':
2844 case 'E':
2845 if (XVEC (exp, n) == 0)
2846 continue;
2847 for (j = 0; j < XVECLEN (exp, n); ++j)
2848 {
2849 /* Call F on X. */
2850 x = &XVECEXP (exp, n, j);
2851 result = (*f) (x, data);
2852 if (result == -1)
2853 /* Do not traverse sub-expressions. */
2854 continue;
2855 else if (result != 0)
2856 /* Stop the traversal. */
2857 return result;
b8698a0f 2858
cf94b0fc
PB
2859 if (*x == NULL_RTX)
2860 /* There are no sub-expressions. */
2861 continue;
b8698a0f 2862
cf94b0fc
PB
2863 i = non_rtx_starting_operands[GET_CODE (*x)];
2864 if (i >= 0)
2865 {
2866 result = for_each_rtx_1 (*x, i, f, data);
2867 if (result != 0)
2868 return result;
2869 }
2870 }
2871 break;
2872
2873 default:
2874 /* Nothing to do. */
2875 break;
2876 }
2877 }
2878
2879 return 0;
2880}
2881
ccc2d6d0
MM
2882/* Traverse X via depth-first search, calling F for each
2883 sub-expression (including X itself). F is also passed the DATA.
2884 If F returns -1, do not traverse sub-expressions, but continue
2885 traversing the rest of the tree. If F ever returns any other
40f03658 2886 nonzero value, stop the traversal, and return the value returned
ccc2d6d0
MM
2887 by F. Otherwise, return 0. This function does not traverse inside
2888 tree structure that contains RTX_EXPRs, or into sub-expressions
2889 whose format code is `0' since it is not known whether or not those
2890 codes are actually RTL.
2891
2892 This routine is very general, and could (should?) be used to
2893 implement many of the other routines in this file. */
2894
ae0b51ef 2895int
0c20a65f 2896for_each_rtx (rtx *x, rtx_function f, void *data)
ccc2d6d0
MM
2897{
2898 int result;
ccc2d6d0
MM
2899 int i;
2900
2901 /* Call F on X. */
b987f237 2902 result = (*f) (x, data);
ccc2d6d0
MM
2903 if (result == -1)
2904 /* Do not traverse sub-expressions. */
2905 return 0;
2906 else if (result != 0)
2907 /* Stop the traversal. */
2908 return result;
2909
2910 if (*x == NULL_RTX)
2911 /* There are no sub-expressions. */
2912 return 0;
2913
cf94b0fc
PB
2914 i = non_rtx_starting_operands[GET_CODE (*x)];
2915 if (i < 0)
2916 return 0;
ccc2d6d0 2917
cf94b0fc 2918 return for_each_rtx_1 (*x, i, f, data);
ccc2d6d0 2919}
3ec2b590 2920
4deef538
AO
2921\f
2922
2923/* Data structure that holds the internal state communicated between
2924 for_each_inc_dec, for_each_inc_dec_find_mem and
2925 for_each_inc_dec_find_inc_dec. */
2926
2927struct for_each_inc_dec_ops {
2928 /* The function to be called for each autoinc operation found. */
2929 for_each_inc_dec_fn fn;
2930 /* The opaque argument to be passed to it. */
2931 void *arg;
2932 /* The MEM we're visiting, if any. */
2933 rtx mem;
2934};
2935
2936static int for_each_inc_dec_find_mem (rtx *r, void *d);
2937
2938/* Find PRE/POST-INC/DEC/MODIFY operations within *R, extract the
2939 operands of the equivalent add insn and pass the result to the
2940 operator specified by *D. */
2941
2942static int
2943for_each_inc_dec_find_inc_dec (rtx *r, void *d)
2944{
2945 rtx x = *r;
2946 struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *)d;
cf94b0fc 2947
4deef538
AO
2948 switch (GET_CODE (x))
2949 {
2950 case PRE_INC:
2951 case POST_INC:
2952 {
2953 int size = GET_MODE_SIZE (GET_MODE (data->mem));
2954 rtx r1 = XEXP (x, 0);
2955 rtx c = gen_int_mode (size, GET_MODE (r1));
2956 return data->fn (data->mem, x, r1, r1, c, data->arg);
2957 }
2958
2959 case PRE_DEC:
2960 case POST_DEC:
2961 {
2962 int size = GET_MODE_SIZE (GET_MODE (data->mem));
2963 rtx r1 = XEXP (x, 0);
2964 rtx c = gen_int_mode (-size, GET_MODE (r1));
2965 return data->fn (data->mem, x, r1, r1, c, data->arg);
2966 }
2967
2968 case PRE_MODIFY:
2969 case POST_MODIFY:
2970 {
2971 rtx r1 = XEXP (x, 0);
2972 rtx add = XEXP (x, 1);
2973 return data->fn (data->mem, x, r1, add, NULL, data->arg);
2974 }
2975
2976 case MEM:
2977 {
2978 rtx save = data->mem;
2979 int ret = for_each_inc_dec_find_mem (r, d);
2980 data->mem = save;
2981 return ret;
2982 }
2983
2984 default:
2985 return 0;
2986 }
2987}
2988
2989/* If *R is a MEM, find PRE/POST-INC/DEC/MODIFY operations within its
2990 address, extract the operands of the equivalent add insn and pass
2991 the result to the operator specified by *D. */
2992
2993static int
2994for_each_inc_dec_find_mem (rtx *r, void *d)
2995{
2996 rtx x = *r;
2997 if (x != NULL_RTX && MEM_P (x))
2998 {
2999 struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *) d;
3000 int result;
3001
3002 data->mem = x;
3003
3004 result = for_each_rtx (&XEXP (x, 0), for_each_inc_dec_find_inc_dec,
3005 data);
3006 if (result)
3007 return result;
3008
3009 return -1;
3010 }
3011 return 0;
3012}
3013
3014/* Traverse *X looking for MEMs, and for autoinc operations within
3015 them. For each such autoinc operation found, call FN, passing it
3016 the innermost enclosing MEM, the operation itself, the RTX modified
3017 by the operation, two RTXs (the second may be NULL) that, once
3018 added, represent the value to be held by the modified RTX
3019 afterwards, and ARG. FN is to return -1 to skip looking for other
3020 autoinc operations within the visited operation, 0 to continue the
3021 traversal, or any other value to have it returned to the caller of
3022 for_each_inc_dec. */
3023
3024int
3025for_each_inc_dec (rtx *x,
3026 for_each_inc_dec_fn fn,
3027 void *arg)
3028{
3029 struct for_each_inc_dec_ops data;
3030
3031 data.fn = fn;
3032 data.arg = arg;
3033 data.mem = NULL;
3034
3035 return for_each_rtx (x, for_each_inc_dec_find_mem, &data);
3036}
3037
3038\f
777b1b71
RH
3039/* Searches X for any reference to REGNO, returning the rtx of the
3040 reference found if any. Otherwise, returns NULL_RTX. */
3041
3042rtx
0c20a65f 3043regno_use_in (unsigned int regno, rtx x)
777b1b71 3044{
b3694847 3045 const char *fmt;
777b1b71
RH
3046 int i, j;
3047 rtx tem;
3048
f8cfc6aa 3049 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
3050 return x;
3051
3052 fmt = GET_RTX_FORMAT (GET_CODE (x));
3053 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3054 {
3055 if (fmt[i] == 'e')
3056 {
3057 if ((tem = regno_use_in (regno, XEXP (x, i))))
3058 return tem;
3059 }
3060 else if (fmt[i] == 'E')
3061 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3062 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3063 return tem;
3064 }
3065
3066 return NULL_RTX;
3067}
2dfa9a87 3068
e5c56fd9
JH
3069/* Return a value indicating whether OP, an operand of a commutative
3070 operation, is preferred as the first or second operand. The higher
3071 the value, the stronger the preference for being the first operand.
3072 We use negative values to indicate a preference for the first operand
3073 and positive values for the second operand. */
3074
9b3bd424 3075int
0c20a65f 3076commutative_operand_precedence (rtx op)
e5c56fd9 3077{
e3d6e740 3078 enum rtx_code code = GET_CODE (op);
b8698a0f 3079
e5c56fd9 3080 /* Constants always come the second operand. Prefer "nice" constants. */
e3d6e740 3081 if (code == CONST_INT)
7e0b4eae 3082 return -8;
e3d6e740 3083 if (code == CONST_DOUBLE)
7e0b4eae 3084 return -7;
091a3ac7
CF
3085 if (code == CONST_FIXED)
3086 return -7;
9ce79a7a 3087 op = avoid_constant_pool_reference (op);
79b82df3 3088 code = GET_CODE (op);
ec8e098d
PB
3089
3090 switch (GET_RTX_CLASS (code))
3091 {
3092 case RTX_CONST_OBJ:
3093 if (code == CONST_INT)
7e0b4eae 3094 return -6;
ec8e098d 3095 if (code == CONST_DOUBLE)
7e0b4eae 3096 return -5;
091a3ac7
CF
3097 if (code == CONST_FIXED)
3098 return -5;
7e0b4eae 3099 return -4;
ec8e098d
PB
3100
3101 case RTX_EXTRA:
3102 /* SUBREGs of objects should come second. */
3103 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
7e0b4eae 3104 return -3;
6fb5fa3c 3105 return 0;
ec8e098d
PB
3106
3107 case RTX_OBJ:
3108 /* Complex expressions should be the first, so decrease priority
7e0b4eae
PB
3109 of objects. Prefer pointer objects over non pointer objects. */
3110 if ((REG_P (op) && REG_POINTER (op))
3111 || (MEM_P (op) && MEM_POINTER (op)))
3112 return -1;
3113 return -2;
ec8e098d
PB
3114
3115 case RTX_COMM_ARITH:
3116 /* Prefer operands that are themselves commutative to be first.
3117 This helps to make things linear. In particular,
3118 (and (and (reg) (reg)) (not (reg))) is canonical. */
3119 return 4;
3120
3121 case RTX_BIN_ARITH:
3122 /* If only one operand is a binary expression, it will be the first
3123 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3124 is canonical, although it will usually be further simplified. */
3125 return 2;
b8698a0f 3126
ec8e098d
PB
3127 case RTX_UNARY:
3128 /* Then prefer NEG and NOT. */
3129 if (code == NEG || code == NOT)
3130 return 1;
e5c56fd9 3131
ec8e098d
PB
3132 default:
3133 return 0;
3134 }
e5c56fd9
JH
3135}
3136
f63d1bf7 3137/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
3138 in order to canonicalize expression. */
3139
7e0b4eae 3140bool
0c20a65f 3141swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 3142{
9b3bd424
RH
3143 return (commutative_operand_precedence (x)
3144 < commutative_operand_precedence (y));
e5c56fd9 3145}
2dfa9a87
MH
3146
3147/* Return 1 if X is an autoincrement side effect and the register is
3148 not the stack pointer. */
3149int
f7d504c2 3150auto_inc_p (const_rtx x)
2dfa9a87
MH
3151{
3152 switch (GET_CODE (x))
3153 {
3154 case PRE_INC:
3155 case POST_INC:
3156 case PRE_DEC:
3157 case POST_DEC:
3158 case PRE_MODIFY:
3159 case POST_MODIFY:
3160 /* There are no REG_INC notes for SP. */
3161 if (XEXP (x, 0) != stack_pointer_rtx)
3162 return 1;
3163 default:
3164 break;
3165 }
3166 return 0;
3167}
3b10cf4b 3168
f9da5064 3169/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 3170int
f7d504c2 3171loc_mentioned_in_p (rtx *loc, const_rtx in)
db7ba742 3172{
a52b023a
PB
3173 enum rtx_code code;
3174 const char *fmt;
db7ba742
R
3175 int i, j;
3176
a52b023a
PB
3177 if (!in)
3178 return 0;
3179
3180 code = GET_CODE (in);
3181 fmt = GET_RTX_FORMAT (code);
db7ba742
R
3182 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3183 {
db7ba742
R
3184 if (fmt[i] == 'e')
3185 {
e0651058 3186 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
db7ba742
R
3187 return 1;
3188 }
3189 else if (fmt[i] == 'E')
3190 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
e0651058
AO
3191 if (loc == &XVECEXP (in, i, j)
3192 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
db7ba742
R
3193 return 1;
3194 }
3195 return 0;
3196}
ddef6bc7 3197
bb51e270
RS
3198/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3199 and SUBREG_BYTE, return the bit offset where the subreg begins
3200 (counting from the least significant bit of the operand). */
33aceff2
JW
3201
3202unsigned int
bb51e270
RS
3203subreg_lsb_1 (enum machine_mode outer_mode,
3204 enum machine_mode inner_mode,
3205 unsigned int subreg_byte)
33aceff2 3206{
33aceff2
JW
3207 unsigned int bitpos;
3208 unsigned int byte;
3209 unsigned int word;
3210
3211 /* A paradoxical subreg begins at bit position 0. */
5511bc5a 3212 if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
33aceff2
JW
3213 return 0;
3214
3215 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3216 /* If the subreg crosses a word boundary ensure that
3217 it also begins and ends on a word boundary. */
41374e13
NS
3218 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3219 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3220 && (subreg_byte % UNITS_PER_WORD
3221 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
33aceff2
JW
3222
3223 if (WORDS_BIG_ENDIAN)
3224 word = (GET_MODE_SIZE (inner_mode)
bb51e270 3225 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 3226 else
bb51e270 3227 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
3228 bitpos = word * BITS_PER_WORD;
3229
3230 if (BYTES_BIG_ENDIAN)
3231 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 3232 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 3233 else
bb51e270 3234 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
3235 bitpos += byte * BITS_PER_UNIT;
3236
3237 return bitpos;
3238}
3239
bb51e270
RS
3240/* Given a subreg X, return the bit offset where the subreg begins
3241 (counting from the least significant bit of the reg). */
3242
3243unsigned int
f7d504c2 3244subreg_lsb (const_rtx x)
bb51e270
RS
3245{
3246 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3247 SUBREG_BYTE (x));
3248}
3249
f1f4e530 3250/* Fill in information about a subreg of a hard register.
ddef6bc7
JJ
3251 xregno - A regno of an inner hard subreg_reg (or what will become one).
3252 xmode - The mode of xregno.
3253 offset - The byte offset.
3254 ymode - The mode of a top level SUBREG (or what may become one).
f1f4e530 3255 info - Pointer to structure to fill in. */
c619e982 3256void
f1f4e530
JM
3257subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3258 unsigned int offset, enum machine_mode ymode,
3259 struct subreg_info *info)
04c5580f 3260{
8521c414 3261 int nregs_xmode, nregs_ymode;
04c5580f 3262 int mode_multiple, nregs_multiple;
f1f4e530 3263 int offset_adj, y_offset, y_offset_adj;
8521c414 3264 int regsize_xmode, regsize_ymode;
f1f4e530 3265 bool rknown;
04c5580f 3266
41374e13 3267 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
04c5580f 3268
f1f4e530
JM
3269 rknown = false;
3270
dd79bb7e
GK
3271 /* If there are holes in a non-scalar mode in registers, we expect
3272 that it is made up of its units concatenated together. */
8521c414 3273 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
dd79bb7e 3274 {
8521c414
JM
3275 enum machine_mode xmode_unit;
3276
3277 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3278 if (GET_MODE_INNER (xmode) == VOIDmode)
3279 xmode_unit = xmode;
3280 else
3281 xmode_unit = GET_MODE_INNER (xmode);
3282 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3283 gcc_assert (nregs_xmode
3284 == (GET_MODE_NUNITS (xmode)
3285 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3286 gcc_assert (hard_regno_nregs[xregno][xmode]
3287 == (hard_regno_nregs[xregno][xmode_unit]
3288 * GET_MODE_NUNITS (xmode)));
dd79bb7e
GK
3289
3290 /* You can only ask for a SUBREG of a value with holes in the middle
3291 if you don't cross the holes. (Such a SUBREG should be done by
3292 picking a different register class, or doing it in memory if
3293 necessary.) An example of a value with holes is XCmode on 32-bit
3294 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
b8698a0f 3295 3 for each part, but in memory it's two 128-bit parts.
dd79bb7e
GK
3296 Padding is assumed to be at the end (not necessarily the 'high part')
3297 of each unit. */
b8698a0f 3298 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
8521c414
JM
3299 < GET_MODE_NUNITS (xmode))
3300 && (offset / GET_MODE_SIZE (xmode_unit)
dd79bb7e 3301 != ((offset + GET_MODE_SIZE (ymode) - 1)
8521c414 3302 / GET_MODE_SIZE (xmode_unit))))
f1f4e530
JM
3303 {
3304 info->representable_p = false;
3305 rknown = true;
3306 }
dd79bb7e
GK
3307 }
3308 else
3309 nregs_xmode = hard_regno_nregs[xregno][xmode];
b8698a0f 3310
66fd46b6 3311 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3312
dd79bb7e 3313 /* Paradoxical subregs are otherwise valid. */
f1f4e530
JM
3314 if (!rknown
3315 && offset == 0
5511bc5a 3316 && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
f1f4e530
JM
3317 {
3318 info->representable_p = true;
3319 /* If this is a big endian paradoxical subreg, which uses more
3320 actual hard registers than the original register, we must
3321 return a negative offset so that we find the proper highpart
3322 of the register. */
3323 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
c0a6a1ef 3324 ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
f1f4e530
JM
3325 info->offset = nregs_xmode - nregs_ymode;
3326 else
3327 info->offset = 0;
3328 info->nregs = nregs_ymode;
3329 return;
3330 }
04c5580f 3331
8521c414
JM
3332 /* If registers store different numbers of bits in the different
3333 modes, we cannot generally form this subreg. */
f1f4e530 3334 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
5f7fc2b8
JM
3335 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3336 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3337 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
f1f4e530
JM
3338 {
3339 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
f1f4e530 3340 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
f1f4e530
JM
3341 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3342 {
3343 info->representable_p = false;
3344 info->nregs
3345 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3346 info->offset = offset / regsize_xmode;
3347 return;
3348 }
3349 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3350 {
3351 info->representable_p = false;
3352 info->nregs
3353 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3354 info->offset = offset / regsize_xmode;
3355 return;
3356 }
3357 }
8521c414 3358
dd79bb7e 3359 /* Lowpart subregs are otherwise valid. */
f1f4e530
JM
3360 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3361 {
3362 info->representable_p = true;
3363 rknown = true;
a446b4e8
JM
3364
3365 if (offset == 0 || nregs_xmode == nregs_ymode)
3366 {
3367 info->offset = 0;
3368 info->nregs = nregs_ymode;
3369 return;
3370 }
f1f4e530 3371 }
04c5580f 3372
dd79bb7e
GK
3373 /* This should always pass, otherwise we don't know how to verify
3374 the constraint. These conditions may be relaxed but
3375 subreg_regno_offset would need to be redesigned. */
41374e13 3376 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
41374e13 3377 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
04c5580f 3378
c0a6a1ef
BS
3379 if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3380 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3381 {
3382 HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3383 HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3384 HOST_WIDE_INT off_low = offset & (ysize - 1);
3385 HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3386 offset = (xsize - ysize - off_high) | off_low;
3387 }
b20b352b 3388 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3389 values. The subreg must represent a lowpart of given field.
04c5580f 3390 Compute what field it is. */
f1f4e530
JM
3391 offset_adj = offset;
3392 offset_adj -= subreg_lowpart_offset (ymode,
3393 mode_for_size (GET_MODE_BITSIZE (xmode)
3394 / nregs_xmode,
3395 MODE_INT, 0));
04c5580f 3396
dd79bb7e 3397 /* Size of ymode must not be greater than the size of xmode. */
04c5580f 3398 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 3399 gcc_assert (mode_multiple != 0);
04c5580f
JH
3400
3401 y_offset = offset / GET_MODE_SIZE (ymode);
f1f4e530
JM
3402 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3403 nregs_multiple = nregs_xmode / nregs_ymode;
41374e13 3404
f1f4e530 3405 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
41374e13
NS
3406 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3407
f1f4e530
JM
3408 if (!rknown)
3409 {
3410 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3411 rknown = true;
3412 }
3413 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3414 info->nregs = nregs_ymode;
3415}
3416
3417/* This function returns the regno offset of a subreg expression.
3418 xregno - A regno of an inner hard subreg_reg (or what will become one).
3419 xmode - The mode of xregno.
3420 offset - The byte offset.
3421 ymode - The mode of a top level SUBREG (or what may become one).
3422 RETURN - The regno offset which would be used. */
3423unsigned int
3424subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3425 unsigned int offset, enum machine_mode ymode)
3426{
3427 struct subreg_info info;
3428 subreg_get_info (xregno, xmode, offset, ymode, &info);
3429 return info.offset;
3430}
3431
3432/* This function returns true when the offset is representable via
3433 subreg_offset in the given regno.
3434 xregno - A regno of an inner hard subreg_reg (or what will become one).
3435 xmode - The mode of xregno.
3436 offset - The byte offset.
3437 ymode - The mode of a top level SUBREG (or what may become one).
3438 RETURN - Whether the offset is representable. */
3439bool
3440subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3441 unsigned int offset, enum machine_mode ymode)
3442{
3443 struct subreg_info info;
3444 subreg_get_info (xregno, xmode, offset, ymode, &info);
05cee290 3445 return info.representable_p;
04c5580f
JH
3446}
3447
eef302d2
RS
3448/* Return the number of a YMODE register to which
3449
3450 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3451
3452 can be simplified. Return -1 if the subreg can't be simplified.
3453
3454 XREGNO is a hard register number. */
3455
3456int
3457simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3458 unsigned int offset, enum machine_mode ymode)
3459{
3460 struct subreg_info info;
3461 unsigned int yregno;
3462
3463#ifdef CANNOT_CHANGE_MODE_CLASS
3464 /* Give the backend a chance to disallow the mode change. */
3465 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3466 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3467 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3468 return -1;
3469#endif
3470
3471 /* We shouldn't simplify stack-related registers. */
3472 if ((!reload_completed || frame_pointer_needed)
d4e0d036 3473 && xregno == FRAME_POINTER_REGNUM)
eef302d2
RS
3474 return -1;
3475
3476 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3477 && xregno == ARG_POINTER_REGNUM)
3478 return -1;
3479
3480 if (xregno == STACK_POINTER_REGNUM)
3481 return -1;
3482
3483 /* Try to get the register offset. */
3484 subreg_get_info (xregno, xmode, offset, ymode, &info);
3485 if (!info.representable_p)
3486 return -1;
3487
3488 /* Make sure that the offsetted register value is in range. */
3489 yregno = xregno + info.offset;
3490 if (!HARD_REGISTER_NUM_P (yregno))
3491 return -1;
3492
3493 /* See whether (reg:YMODE YREGNO) is valid.
3494
3495 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
eb93b31f
EB
3496 This is a kludge to work around how complex FP arguments are passed
3497 on IA-64 and should be fixed. See PR target/49226. */
eef302d2
RS
3498 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3499 && HARD_REGNO_MODE_OK (xregno, xmode))
3500 return -1;
3501
3502 return (int) yregno;
3503}
3504
dc297297 3505/* Return the final regno that a subreg expression refers to. */
a6a2274a 3506unsigned int
f7d504c2 3507subreg_regno (const_rtx x)
ddef6bc7
JJ
3508{
3509 unsigned int ret;
3510 rtx subreg = SUBREG_REG (x);
3511 int regno = REGNO (subreg);
3512
a6a2274a
KH
3513 ret = regno + subreg_regno_offset (regno,
3514 GET_MODE (subreg),
ddef6bc7
JJ
3515 SUBREG_BYTE (x),
3516 GET_MODE (x));
3517 return ret;
3518
3519}
f1f4e530
JM
3520
3521/* Return the number of registers that a subreg expression refers
3522 to. */
3523unsigned int
f7d504c2 3524subreg_nregs (const_rtx x)
ba49cb7b
KZ
3525{
3526 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3527}
3528
3529/* Return the number of registers that a subreg REG with REGNO
3530 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3531 changed so that the regno can be passed in. */
3532
3533unsigned int
3534subreg_nregs_with_regno (unsigned int regno, const_rtx x)
f1f4e530
JM
3535{
3536 struct subreg_info info;
3537 rtx subreg = SUBREG_REG (x);
f1f4e530
JM
3538
3539 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3540 &info);
3541 return info.nregs;
3542}
3543
ba49cb7b 3544
833366d6
JH
3545struct parms_set_data
3546{
3547 int nregs;
3548 HARD_REG_SET regs;
3549};
3550
3551/* Helper function for noticing stores to parameter registers. */
3552static void
7bc980e1 3553parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6 3554{
1634b18f 3555 struct parms_set_data *const d = (struct parms_set_data *) data;
833366d6
JH
3556 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3557 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3558 {
3559 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3560 d->nregs--;
3561 }
3562}
3563
a6a2274a 3564/* Look backward for first parameter to be loaded.
b2df20b4
DJ
3565 Note that loads of all parameters will not necessarily be
3566 found if CSE has eliminated some of them (e.g., an argument
3567 to the outer function is passed down as a parameter).
833366d6
JH
3568 Do not skip BOUNDARY. */
3569rtx
0c20a65f 3570find_first_parameter_load (rtx call_insn, rtx boundary)
833366d6
JH
3571{
3572 struct parms_set_data parm;
b2df20b4 3573 rtx p, before, first_set;
833366d6
JH
3574
3575 /* Since different machines initialize their parameter registers
3576 in different orders, assume nothing. Collect the set of all
3577 parameter registers. */
3578 CLEAR_HARD_REG_SET (parm.regs);
3579 parm.nregs = 0;
3580 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3581 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3582 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6 3583 {
41374e13 3584 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
833366d6
JH
3585
3586 /* We only care about registers which can hold function
3587 arguments. */
3588 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3589 continue;
3590
3591 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3592 parm.nregs++;
3593 }
3594 before = call_insn;
b2df20b4 3595 first_set = call_insn;
833366d6
JH
3596
3597 /* Search backward for the first set of a register in this set. */
3598 while (parm.nregs && before != boundary)
3599 {
3600 before = PREV_INSN (before);
3601
3602 /* It is possible that some loads got CSEed from one call to
3603 another. Stop in that case. */
4b4bf941 3604 if (CALL_P (before))
833366d6
JH
3605 break;
3606
dbc1a163 3607 /* Our caller needs either ensure that we will find all sets
833366d6 3608 (in case code has not been optimized yet), or take care
eaec9b3d 3609 for possible labels in a way by setting boundary to preceding
833366d6 3610 CODE_LABEL. */
4b4bf941 3611 if (LABEL_P (before))
dbc1a163 3612 {
41374e13 3613 gcc_assert (before == boundary);
dbc1a163
RH
3614 break;
3615 }
833366d6 3616
0d025d43 3617 if (INSN_P (before))
b2df20b4
DJ
3618 {
3619 int nregs_old = parm.nregs;
3620 note_stores (PATTERN (before), parms_set, &parm);
3621 /* If we found something that did not set a parameter reg,
3622 we're done. Do not keep going, as that might result
3623 in hoisting an insn before the setting of a pseudo
3624 that is used by the hoisted insn. */
3625 if (nregs_old != parm.nregs)
3626 first_set = before;
3627 else
3628 break;
3629 }
833366d6 3630 }
b2df20b4 3631 return first_set;
833366d6 3632}
3dec4024 3633
14b493d6 3634/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3635 call instruction. */
3636
3637bool
9678086d 3638keep_with_call_p (const_rtx insn)
3dec4024
JH
3639{
3640 rtx set;
3641
3642 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3643 {
f8cfc6aa 3644 if (REG_P (SET_DEST (set))
5df533b3 3645 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3646 && fixed_regs[REGNO (SET_DEST (set))]
3647 && general_operand (SET_SRC (set), VOIDmode))
3648 return true;
f8cfc6aa 3649 if (REG_P (SET_SRC (set))
82f81f18 3650 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
f8cfc6aa 3651 && REG_P (SET_DEST (set))
3dec4024
JH
3652 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3653 return true;
bc204393
RH
3654 /* There may be a stack pop just after the call and before the store
3655 of the return register. Search for the actual store when deciding
3656 if we can break or not. */
3dec4024
JH
3657 if (SET_DEST (set) == stack_pointer_rtx)
3658 {
75547801 3659 /* This CONST_CAST is okay because next_nonnote_insn just
4e9b57fa 3660 returns its argument and we assign it to a const_rtx
75547801 3661 variable. */
b1d5455a 3662 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
bc204393 3663 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3664 return true;
3665 }
3666 }
3667 return false;
3668}
71d2c5bd 3669
432f982f
JH
3670/* Return true if LABEL is a target of JUMP_INSN. This applies only
3671 to non-complex jumps. That is, direct unconditional, conditional,
3672 and tablejumps, but not computed jumps or returns. It also does
3673 not apply to the fallthru case of a conditional jump. */
3674
3675bool
f7d504c2 3676label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
432f982f
JH
3677{
3678 rtx tmp = JUMP_LABEL (jump_insn);
3679
3680 if (label == tmp)
3681 return true;
3682
3683 if (tablejump_p (jump_insn, NULL, &tmp))
3684 {
3685 rtvec vec = XVEC (PATTERN (tmp),
3686 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3687 int i, veclen = GET_NUM_ELEM (vec);
3688
3689 for (i = 0; i < veclen; ++i)
3690 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3691 return true;
3692 }
3693
cb2f563b
HPN
3694 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3695 return true;
3696
432f982f
JH
3697 return false;
3698}
3699
f894b69b
PB
3700\f
3701/* Return an estimate of the cost of computing rtx X.
3702 One use is in cse, to decide which expression to keep in the hash table.
3703 Another is in rtl generation, to pick the cheapest way to multiply.
b8698a0f 3704 Other uses like the latter are expected in the future.
f40751dd 3705
68f932c4
RS
3706 X appears as operand OPNO in an expression with code OUTER_CODE.
3707 SPEED specifies whether costs optimized for speed or size should
f40751dd 3708 be returned. */
f894b69b
PB
3709
3710int
68f932c4 3711rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
f894b69b
PB
3712{
3713 int i, j;
3714 enum rtx_code code;
3715 const char *fmt;
3716 int total;
e098c169 3717 int factor;
f894b69b
PB
3718
3719 if (x == 0)
3720 return 0;
3721
e098c169
HPN
3722 /* A size N times larger than UNITS_PER_WORD likely needs N times as
3723 many insns, taking N times as long. */
3724 factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD;
3725 if (factor == 0)
3726 factor = 1;
3727
f894b69b
PB
3728 /* Compute the default costs of certain things.
3729 Note that targetm.rtx_costs can override the defaults. */
3730
3731 code = GET_CODE (x);
3732 switch (code)
3733 {
3734 case MULT:
e098c169
HPN
3735 /* Multiplication has time-complexity O(N*N), where N is the
3736 number of units (translated from digits) when using
3737 schoolbook long multiplication. */
3738 total = factor * factor * COSTS_N_INSNS (5);
f894b69b
PB
3739 break;
3740 case DIV:
3741 case UDIV:
3742 case MOD:
3743 case UMOD:
e098c169
HPN
3744 /* Similarly, complexity for schoolbook long division. */
3745 total = factor * factor * COSTS_N_INSNS (7);
f894b69b
PB
3746 break;
3747 case USE:
db3edc20 3748 /* Used in combine.c as a marker. */
f894b69b
PB
3749 total = 0;
3750 break;
e098c169
HPN
3751 case SET:
3752 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
3753 the mode for the factor. */
3754 factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD;
3755 if (factor == 0)
3756 factor = 1;
3757 /* Pass through. */
f894b69b 3758 default:
e098c169 3759 total = factor * COSTS_N_INSNS (1);
f894b69b
PB
3760 }
3761
3762 switch (code)
3763 {
3764 case REG:
3765 return 0;
3766
3767 case SUBREG:
edb81165 3768 total = 0;
f894b69b
PB
3769 /* If we can't tie these modes, make this expensive. The larger
3770 the mode, the more expensive it is. */
3771 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
e098c169 3772 return COSTS_N_INSNS (2 + factor);
f894b69b
PB
3773 break;
3774
3775 default:
68f932c4 3776 if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
f894b69b
PB
3777 return total;
3778 break;
3779 }
3780
3781 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3782 which is already in total. */
3783
3784 fmt = GET_RTX_FORMAT (code);
3785 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3786 if (fmt[i] == 'e')
68f932c4 3787 total += rtx_cost (XEXP (x, i), code, i, speed);
f894b69b
PB
3788 else if (fmt[i] == 'E')
3789 for (j = 0; j < XVECLEN (x, i); j++)
68f932c4 3790 total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
f894b69b
PB
3791
3792 return total;
3793}
22939744
BS
3794
3795/* Fill in the structure C with information about both speed and size rtx
68f932c4 3796 costs for X, which is operand OPNO in an expression with code OUTER. */
22939744
BS
3797
3798void
68f932c4
RS
3799get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3800 struct full_rtx_costs *c)
22939744 3801{
68f932c4
RS
3802 c->speed = rtx_cost (x, outer, opno, true);
3803 c->size = rtx_cost (x, outer, opno, false);
22939744
BS
3804}
3805
f894b69b
PB
3806\f
3807/* Return cost of address expression X.
b8698a0f 3808 Expect that X is properly formed address reference.
f40751dd
JH
3809
3810 SPEED parameter specify whether costs optimized for speed or size should
3811 be returned. */
f894b69b
PB
3812
3813int
09e881c9 3814address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
f894b69b 3815{
f894b69b
PB
3816 /* We may be asked for cost of various unusual addresses, such as operands
3817 of push instruction. It is not worthwhile to complicate writing
3818 of the target hook by such cases. */
3819
09e881c9 3820 if (!memory_address_addr_space_p (mode, x, as))
f894b69b
PB
3821 return 1000;
3822
f40751dd 3823 return targetm.address_cost (x, speed);
f894b69b
PB
3824}
3825
3826/* If the target doesn't override, compute the cost as with arithmetic. */
3827
3828int
f40751dd 3829default_address_cost (rtx x, bool speed)
f894b69b 3830{
68f932c4 3831 return rtx_cost (x, MEM, 0, speed);
f894b69b 3832}
2f93eea8
PB
3833\f
3834
3835unsigned HOST_WIDE_INT
fa233e34 3836nonzero_bits (const_rtx x, enum machine_mode mode)
2f93eea8
PB
3837{
3838 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3839}
3840
3841unsigned int
fa233e34 3842num_sign_bit_copies (const_rtx x, enum machine_mode mode)
2f93eea8
PB
3843{
3844 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3845}
3846
3847/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3848 It avoids exponential behavior in nonzero_bits1 when X has
3849 identical subexpressions on the first or the second level. */
3850
3851static unsigned HOST_WIDE_INT
fa233e34 3852cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
3853 enum machine_mode known_mode,
3854 unsigned HOST_WIDE_INT known_ret)
3855{
3856 if (x == known_x && mode == known_mode)
3857 return known_ret;
3858
3859 /* Try to find identical subexpressions. If found call
3860 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3861 precomputed value for the subexpression as KNOWN_RET. */
3862
3863 if (ARITHMETIC_P (x))
3864 {
3865 rtx x0 = XEXP (x, 0);
3866 rtx x1 = XEXP (x, 1);
3867
3868 /* Check the first level. */
3869 if (x0 == x1)
3870 return nonzero_bits1 (x, mode, x0, mode,
3871 cached_nonzero_bits (x0, mode, known_x,
3872 known_mode, known_ret));
3873
3874 /* Check the second level. */
3875 if (ARITHMETIC_P (x0)
3876 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3877 return nonzero_bits1 (x, mode, x1, mode,
3878 cached_nonzero_bits (x1, mode, known_x,
3879 known_mode, known_ret));
3880
3881 if (ARITHMETIC_P (x1)
3882 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3883 return nonzero_bits1 (x, mode, x0, mode,
3884 cached_nonzero_bits (x0, mode, known_x,
3885 known_mode, known_ret));
3886 }
3887
3888 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3889}
3890
3891/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3892 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3893 is less useful. We can't allow both, because that results in exponential
3894 run time recursion. There is a nullstone testcase that triggered
3895 this. This macro avoids accidental uses of num_sign_bit_copies. */
3896#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3897
3898/* Given an expression, X, compute which bits in X can be nonzero.
3899 We don't care about bits outside of those defined in MODE.
3900
3901 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3902 an arithmetic operation, we can do better. */
3903
3904static unsigned HOST_WIDE_INT
fa233e34 3905nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
3906 enum machine_mode known_mode,
3907 unsigned HOST_WIDE_INT known_ret)
3908{
3909 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3910 unsigned HOST_WIDE_INT inner_nz;
3911 enum rtx_code code;
2d0c270f 3912 enum machine_mode inner_mode;
5511bc5a 3913 unsigned int mode_width = GET_MODE_PRECISION (mode);
2f93eea8 3914
ff596cd2
RL
3915 /* For floating-point and vector values, assume all bits are needed. */
3916 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3917 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
3918 return nonzero;
3919
3920 /* If X is wider than MODE, use its mode instead. */
5511bc5a 3921 if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
2f93eea8
PB
3922 {
3923 mode = GET_MODE (x);
3924 nonzero = GET_MODE_MASK (mode);
5511bc5a 3925 mode_width = GET_MODE_PRECISION (mode);
2f93eea8
PB
3926 }
3927
3928 if (mode_width > HOST_BITS_PER_WIDE_INT)
3929 /* Our only callers in this case look for single bit values. So
3930 just return the mode mask. Those tests will then be false. */
3931 return nonzero;
3932
3933#ifndef WORD_REGISTER_OPERATIONS
3934 /* If MODE is wider than X, but both are a single word for both the host
3935 and target machines, we can compute this from which bits of the
3936 object might be nonzero in its own mode, taking into account the fact
3937 that on many CISC machines, accessing an object in a wider mode
3938 causes the high-order bits to become undefined. So they are
3939 not known to be zero. */
3940
3941 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
5511bc5a
BS
3942 && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
3943 && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3944 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
3945 {
3946 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3947 known_x, known_mode, known_ret);
3948 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3949 return nonzero;
3950 }
3951#endif
3952
3953 code = GET_CODE (x);
3954 switch (code)
3955 {
3956 case REG:
3957#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3958 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3959 all the bits above ptr_mode are known to be zero. */
5932a4d4 3960 /* As we do not know which address space the pointer is referring to,
d4ebfa65
BE
3961 we can do this only if the target does not support different pointer
3962 or address modes depending on the address space. */
3963 if (target_default_pointer_address_modes_p ()
3964 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
2f93eea8
PB
3965 && REG_POINTER (x))
3966 nonzero &= GET_MODE_MASK (ptr_mode);
3967#endif
3968
3969 /* Include declared information about alignment of pointers. */
3970 /* ??? We don't properly preserve REG_POINTER changes across
3971 pointer-to-integer casts, so we can't trust it except for
3972 things that we know must be pointers. See execute/960116-1.c. */
3973 if ((x == stack_pointer_rtx
3974 || x == frame_pointer_rtx
3975 || x == arg_pointer_rtx)
3976 && REGNO_POINTER_ALIGN (REGNO (x)))
3977 {
3978 unsigned HOST_WIDE_INT alignment
3979 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3980
3981#ifdef PUSH_ROUNDING
3982 /* If PUSH_ROUNDING is defined, it is possible for the
3983 stack to be momentarily aligned only to that amount,
3984 so we pick the least alignment. */
3985 if (x == stack_pointer_rtx && PUSH_ARGS)
3986 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3987 alignment);
3988#endif
3989
3990 nonzero &= ~(alignment - 1);
3991 }
3992
3993 {
3994 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
55d796da 3995 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
2f93eea8
PB
3996 known_mode, known_ret,
3997 &nonzero_for_hook);
3998
55d796da
KG
3999 if (new_rtx)
4000 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
2f93eea8
PB
4001 known_mode, known_ret);
4002
4003 return nonzero_for_hook;
4004 }
4005
4006 case CONST_INT:
4007#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4008 /* If X is negative in MODE, sign-extend the value. */
c04fc4f0
EB
4009 if (INTVAL (x) > 0
4010 && mode_width < BITS_PER_WORD
4011 && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4012 != 0)
4013 return UINTVAL (x) | ((unsigned HOST_WIDE_INT) (-1) << mode_width);
2f93eea8
PB
4014#endif
4015
c04fc4f0 4016 return UINTVAL (x);
2f93eea8
PB
4017
4018 case MEM:
4019#ifdef LOAD_EXTEND_OP
4020 /* In many, if not most, RISC machines, reading a byte from memory
4021 zeros the rest of the register. Noticing that fact saves a lot
4022 of extra zero-extends. */
4023 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4024 nonzero &= GET_MODE_MASK (GET_MODE (x));
4025#endif
4026 break;
4027
4028 case EQ: case NE:
4029 case UNEQ: case LTGT:
4030 case GT: case GTU: case UNGT:
4031 case LT: case LTU: case UNLT:
4032 case GE: case GEU: case UNGE:
4033 case LE: case LEU: case UNLE:
4034 case UNORDERED: case ORDERED:
2f93eea8
PB
4035 /* If this produces an integer result, we know which bits are set.
4036 Code here used to clear bits outside the mode of X, but that is
4037 now done above. */
b8698a0f
L
4038 /* Mind that MODE is the mode the caller wants to look at this
4039 operation in, and not the actual operation mode. We can wind
505ac507
RH
4040 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4041 that describes the results of a vector compare. */
4042 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2f93eea8
PB
4043 && mode_width <= HOST_BITS_PER_WIDE_INT)
4044 nonzero = STORE_FLAG_VALUE;
4045 break;
4046
4047 case NEG:
4048#if 0
4049 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4050 and num_sign_bit_copies. */
4051 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5511bc5a 4052 == GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4053 nonzero = 1;
4054#endif
4055
86cdf393 4056 if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
2f93eea8
PB
4057 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4058 break;
4059
4060 case ABS:
4061#if 0
4062 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4063 and num_sign_bit_copies. */
4064 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5511bc5a 4065 == GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4066 nonzero = 1;
4067#endif
4068 break;
4069
4070 case TRUNCATE:
4071 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4072 known_x, known_mode, known_ret)
4073 & GET_MODE_MASK (mode));
4074 break;
4075
4076 case ZERO_EXTEND:
4077 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4078 known_x, known_mode, known_ret);
4079 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4080 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4081 break;
4082
4083 case SIGN_EXTEND:
4084 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4085 Otherwise, show all the bits in the outer mode but not the inner
4086 may be nonzero. */
4087 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4088 known_x, known_mode, known_ret);
4089 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4090 {
4091 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
2d0c270f 4092 if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
2f93eea8
PB
4093 inner_nz |= (GET_MODE_MASK (mode)
4094 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4095 }
4096
4097 nonzero &= inner_nz;
4098 break;
4099
4100 case AND:
4101 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4102 known_x, known_mode, known_ret)
4103 & cached_nonzero_bits (XEXP (x, 1), mode,
4104 known_x, known_mode, known_ret);
4105 break;
4106
4107 case XOR: case IOR:
4108 case UMIN: case UMAX: case SMIN: case SMAX:
4109 {
c04fc4f0
EB
4110 unsigned HOST_WIDE_INT nonzero0
4111 = cached_nonzero_bits (XEXP (x, 0), mode,
4112 known_x, known_mode, known_ret);
2f93eea8
PB
4113
4114 /* Don't call nonzero_bits for the second time if it cannot change
4115 anything. */
4116 if ((nonzero & nonzero0) != nonzero)
4117 nonzero &= nonzero0
4118 | cached_nonzero_bits (XEXP (x, 1), mode,
4119 known_x, known_mode, known_ret);
4120 }
4121 break;
4122
4123 case PLUS: case MINUS:
4124 case MULT:
4125 case DIV: case UDIV:
4126 case MOD: case UMOD:
4127 /* We can apply the rules of arithmetic to compute the number of
4128 high- and low-order zero bits of these operations. We start by
4129 computing the width (position of the highest-order nonzero bit)
4130 and the number of low-order zero bits for each value. */
4131 {
c04fc4f0
EB
4132 unsigned HOST_WIDE_INT nz0
4133 = cached_nonzero_bits (XEXP (x, 0), mode,
4134 known_x, known_mode, known_ret);
4135 unsigned HOST_WIDE_INT nz1
4136 = cached_nonzero_bits (XEXP (x, 1), mode,
4137 known_x, known_mode, known_ret);
5511bc5a 4138 int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
2f93eea8
PB
4139 int width0 = floor_log2 (nz0) + 1;
4140 int width1 = floor_log2 (nz1) + 1;
4141 int low0 = floor_log2 (nz0 & -nz0);
4142 int low1 = floor_log2 (nz1 & -nz1);
c04fc4f0
EB
4143 unsigned HOST_WIDE_INT op0_maybe_minusp
4144 = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4145 unsigned HOST_WIDE_INT op1_maybe_minusp
4146 = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
2f93eea8
PB
4147 unsigned int result_width = mode_width;
4148 int result_low = 0;
4149
4150 switch (code)
4151 {
4152 case PLUS:
4153 result_width = MAX (width0, width1) + 1;
4154 result_low = MIN (low0, low1);
4155 break;
4156 case MINUS:
4157 result_low = MIN (low0, low1);
4158 break;
4159 case MULT:
4160 result_width = width0 + width1;
4161 result_low = low0 + low1;
4162 break;
4163 case DIV:
4164 if (width1 == 0)
4165 break;
c04fc4f0 4166 if (!op0_maybe_minusp && !op1_maybe_minusp)
2f93eea8
PB
4167 result_width = width0;
4168 break;
4169 case UDIV:
4170 if (width1 == 0)
4171 break;
4172 result_width = width0;
4173 break;
4174 case MOD:
4175 if (width1 == 0)
4176 break;
c04fc4f0 4177 if (!op0_maybe_minusp && !op1_maybe_minusp)
2f93eea8
PB
4178 result_width = MIN (width0, width1);
4179 result_low = MIN (low0, low1);
4180 break;
4181 case UMOD:
4182 if (width1 == 0)
4183 break;
4184 result_width = MIN (width0, width1);
4185 result_low = MIN (low0, low1);
4186 break;
4187 default:
41374e13 4188 gcc_unreachable ();
2f93eea8
PB
4189 }
4190
4191 if (result_width < mode_width)
c04fc4f0 4192 nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
2f93eea8
PB
4193
4194 if (result_low > 0)
c04fc4f0 4195 nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
2f93eea8
PB
4196 }
4197 break;
4198
4199 case ZERO_EXTRACT:
481683e1 4200 if (CONST_INT_P (XEXP (x, 1))
2f93eea8 4201 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
c04fc4f0 4202 nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
2f93eea8
PB
4203 break;
4204
4205 case SUBREG:
4206 /* If this is a SUBREG formed for a promoted variable that has
4207 been zero-extended, we know that at least the high-order bits
4208 are zero, though others might be too. */
4209
4210 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4211 nonzero = GET_MODE_MASK (GET_MODE (x))
4212 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4213 known_x, known_mode, known_ret);
4214
2d0c270f 4215 inner_mode = GET_MODE (SUBREG_REG (x));
2f93eea8
PB
4216 /* If the inner mode is a single word for both the host and target
4217 machines, we can compute this from which bits of the inner
4218 object might be nonzero. */
5511bc5a
BS
4219 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4220 && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
2f93eea8
PB
4221 {
4222 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4223 known_x, known_mode, known_ret);
4224
4225#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4226 /* If this is a typical RISC machine, we only have to worry
4227 about the way loads are extended. */
2d0c270f
BS
4228 if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4229 ? val_signbit_known_set_p (inner_mode, nonzero)
4230 : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
3c0cb5de 4231 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4232#endif
4233 {
4234 /* On many CISC machines, accessing an object in a wider mode
4235 causes the high-order bits to become undefined. So they are
4236 not known to be zero. */
5511bc5a
BS
4237 if (GET_MODE_PRECISION (GET_MODE (x))
4238 > GET_MODE_PRECISION (inner_mode))
2f93eea8 4239 nonzero |= (GET_MODE_MASK (GET_MODE (x))
2d0c270f 4240 & ~GET_MODE_MASK (inner_mode));
2f93eea8
PB
4241 }
4242 }
4243 break;
4244
4245 case ASHIFTRT:
4246 case LSHIFTRT:
4247 case ASHIFT:
4248 case ROTATE:
4249 /* The nonzero bits are in two classes: any bits within MODE
4250 that aren't in GET_MODE (x) are always significant. The rest of the
4251 nonzero bits are those that are significant in the operand of
4252 the shift when shifted the appropriate number of bits. This
4253 shows that high-order bits are cleared by the right shift and
4254 low-order bits by left shifts. */
481683e1 4255 if (CONST_INT_P (XEXP (x, 1))
2f93eea8 4256 && INTVAL (XEXP (x, 1)) >= 0
39b2ac74 4257 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5511bc5a 4258 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4259 {
4260 enum machine_mode inner_mode = GET_MODE (x);
5511bc5a 4261 unsigned int width = GET_MODE_PRECISION (inner_mode);
2f93eea8
PB
4262 int count = INTVAL (XEXP (x, 1));
4263 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
c04fc4f0
EB
4264 unsigned HOST_WIDE_INT op_nonzero
4265 = cached_nonzero_bits (XEXP (x, 0), mode,
4266 known_x, known_mode, known_ret);
2f93eea8
PB
4267 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4268 unsigned HOST_WIDE_INT outer = 0;
4269
4270 if (mode_width > width)
4271 outer = (op_nonzero & nonzero & ~mode_mask);
4272
4273 if (code == LSHIFTRT)
4274 inner >>= count;
4275 else if (code == ASHIFTRT)
4276 {
4277 inner >>= count;
4278
4279 /* If the sign bit may have been nonzero before the shift, we
4280 need to mark all the places it could have been copied to
4281 by the shift as possibly nonzero. */
c04fc4f0
EB
4282 if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4283 inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4284 << (width - count);
2f93eea8
PB
4285 }
4286 else if (code == ASHIFT)
4287 inner <<= count;
4288 else
4289 inner = ((inner << (count % width)
4290 | (inner >> (width - (count % width)))) & mode_mask);
4291
4292 nonzero &= (outer | inner);
4293 }
4294 break;
4295
4296 case FFS:
4297 case POPCOUNT:
4298 /* This is at most the number of bits in the mode. */
c04fc4f0 4299 nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4300 break;
4301
4302 case CLZ:
4303 /* If CLZ has a known value at zero, then the nonzero bits are
4304 that value, plus the number of bits in the mode minus one. */
4305 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
c04fc4f0
EB
4306 nonzero
4307 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4308 else
4309 nonzero = -1;
4310 break;
4311
4312 case CTZ:
4313 /* If CTZ has a known value at zero, then the nonzero bits are
4314 that value, plus the number of bits in the mode minus one. */
4315 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
c04fc4f0
EB
4316 nonzero
4317 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4318 else
4319 nonzero = -1;
4320 break;
4321
8840ae2b
JJ
4322 case CLRSB:
4323 /* This is at most the number of bits in the mode minus 1. */
4324 nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4325 break;
4326
2f93eea8
PB
4327 case PARITY:
4328 nonzero = 1;
4329 break;
4330
4331 case IF_THEN_ELSE:
4332 {
c04fc4f0
EB
4333 unsigned HOST_WIDE_INT nonzero_true
4334 = cached_nonzero_bits (XEXP (x, 1), mode,
4335 known_x, known_mode, known_ret);
2f93eea8
PB
4336
4337 /* Don't call nonzero_bits for the second time if it cannot change
4338 anything. */
4339 if ((nonzero & nonzero_true) != nonzero)
4340 nonzero &= nonzero_true
4341 | cached_nonzero_bits (XEXP (x, 2), mode,
4342 known_x, known_mode, known_ret);
4343 }
4344 break;
4345
4346 default:
4347 break;
4348 }
4349
4350 return nonzero;
4351}
4352
4353/* See the macro definition above. */
4354#undef cached_num_sign_bit_copies
4355
4356\f
4357/* The function cached_num_sign_bit_copies is a wrapper around
4358 num_sign_bit_copies1. It avoids exponential behavior in
4359 num_sign_bit_copies1 when X has identical subexpressions on the
4360 first or the second level. */
4361
4362static unsigned int
fa233e34 4363cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
4364 enum machine_mode known_mode,
4365 unsigned int known_ret)
4366{
4367 if (x == known_x && mode == known_mode)
4368 return known_ret;
4369
4370 /* Try to find identical subexpressions. If found call
4371 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4372 the precomputed value for the subexpression as KNOWN_RET. */
4373
4374 if (ARITHMETIC_P (x))
4375 {
4376 rtx x0 = XEXP (x, 0);
4377 rtx x1 = XEXP (x, 1);
4378
4379 /* Check the first level. */
4380 if (x0 == x1)
4381 return
4382 num_sign_bit_copies1 (x, mode, x0, mode,
4383 cached_num_sign_bit_copies (x0, mode, known_x,
4384 known_mode,
4385 known_ret));
4386
4387 /* Check the second level. */
4388 if (ARITHMETIC_P (x0)
4389 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4390 return
4391 num_sign_bit_copies1 (x, mode, x1, mode,
4392 cached_num_sign_bit_copies (x1, mode, known_x,
4393 known_mode,
4394 known_ret));
4395
4396 if (ARITHMETIC_P (x1)
4397 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4398 return
4399 num_sign_bit_copies1 (x, mode, x0, mode,
4400 cached_num_sign_bit_copies (x0, mode, known_x,
4401 known_mode,
4402 known_ret));
4403 }
4404
4405 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4406}
4407
4408/* Return the number of bits at the high-order end of X that are known to
4409 be equal to the sign bit. X will be used in mode MODE; if MODE is
4410 VOIDmode, X will be used in its own mode. The returned value will always
4411 be between 1 and the number of bits in MODE. */
4412
4413static unsigned int
fa233e34 4414num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
4415 enum machine_mode known_mode,
4416 unsigned int known_ret)
4417{
4418 enum rtx_code code = GET_CODE (x);
5511bc5a 4419 unsigned int bitwidth = GET_MODE_PRECISION (mode);
2f93eea8
PB
4420 int num0, num1, result;
4421 unsigned HOST_WIDE_INT nonzero;
4422
4423 /* If we weren't given a mode, use the mode of X. If the mode is still
4424 VOIDmode, we don't know anything. Likewise if one of the modes is
4425 floating-point. */
4426
4427 if (mode == VOIDmode)
4428 mode = GET_MODE (x);
4429
ff596cd2
RL
4430 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4431 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
4432 return 1;
4433
4434 /* For a smaller object, just ignore the high bits. */
5511bc5a 4435 if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4436 {
4437 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4438 known_x, known_mode, known_ret);
4439 return MAX (1,
5511bc5a 4440 num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
2f93eea8
PB
4441 }
4442
5511bc5a 4443 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4444 {
4445#ifndef WORD_REGISTER_OPERATIONS
5511bc5a
BS
4446 /* If this machine does not do all register operations on the entire
4447 register and MODE is wider than the mode of X, we can say nothing
4448 at all about the high-order bits. */
2f93eea8
PB
4449 return 1;
4450#else
4451 /* Likewise on machines that do, if the mode of the object is smaller
4452 than a word and loads of that size don't sign extend, we can say
4453 nothing about the high order bits. */
5511bc5a 4454 if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
2f93eea8
PB
4455#ifdef LOAD_EXTEND_OP
4456 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4457#endif
4458 )
4459 return 1;
4460#endif
4461 }
4462
4463 switch (code)
4464 {
4465 case REG:
4466
4467#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4468 /* If pointers extend signed and this is a pointer in Pmode, say that
4469 all the bits above ptr_mode are known to be sign bit copies. */
5932a4d4 4470 /* As we do not know which address space the pointer is referring to,
d4ebfa65
BE
4471 we can do this only if the target does not support different pointer
4472 or address modes depending on the address space. */
4473 if (target_default_pointer_address_modes_p ()
4474 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4475 && mode == Pmode && REG_POINTER (x))
5511bc5a 4476 return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
2f93eea8
PB
4477#endif
4478
4479 {
4480 unsigned int copies_for_hook = 1, copies = 1;
55d796da 4481 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
2f93eea8
PB
4482 known_mode, known_ret,
4483 &copies_for_hook);
4484
55d796da
KG
4485 if (new_rtx)
4486 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
2f93eea8
PB
4487 known_mode, known_ret);
4488
4489 if (copies > 1 || copies_for_hook > 1)
4490 return MAX (copies, copies_for_hook);
4491
4492 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4493 }
4494 break;
4495
4496 case MEM:
4497#ifdef LOAD_EXTEND_OP
4498 /* Some RISC machines sign-extend all loads of smaller than a word. */
4499 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4500 return MAX (1, ((int) bitwidth
5511bc5a 4501 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
2f93eea8
PB
4502#endif
4503 break;
4504
4505 case CONST_INT:
4506 /* If the constant is negative, take its 1's complement and remask.
4507 Then see how many zero bits we have. */
c04fc4f0 4508 nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
2f93eea8 4509 if (bitwidth <= HOST_BITS_PER_WIDE_INT
c04fc4f0 4510 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
4511 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4512
4513 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4514
4515 case SUBREG:
4516 /* If this is a SUBREG for a promoted object that is sign-extended
4517 and we are looking at it in a wider mode, we know that at least the
4518 high-order bits are known to be sign bit copies. */
4519
4520 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4521 {
4522 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4523 known_x, known_mode, known_ret);
4524 return MAX ((int) bitwidth
5511bc5a 4525 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
2f93eea8
PB
4526 num0);
4527 }
4528
4529 /* For a smaller object, just ignore the high bits. */
5511bc5a 4530 if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
2f93eea8
PB
4531 {
4532 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4533 known_x, known_mode, known_ret);
4534 return MAX (1, (num0
5511bc5a 4535 - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
2f93eea8
PB
4536 - bitwidth)));
4537 }
4538
4539#ifdef WORD_REGISTER_OPERATIONS
4540#ifdef LOAD_EXTEND_OP
4541 /* For paradoxical SUBREGs on machines where all register operations
4542 affect the entire register, just look inside. Note that we are
4543 passing MODE to the recursive call, so the number of sign bit copies
4544 will remain relative to that mode, not the inner mode. */
4545
4546 /* This works only if loads sign extend. Otherwise, if we get a
4547 reload for the inner part, it may be loaded from the stack, and
4548 then we lose all sign bit copies that existed before the store
4549 to the stack. */
4550
6a4bdc79 4551 if (paradoxical_subreg_p (x)
2f93eea8 4552 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4553 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4554 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4555 known_x, known_mode, known_ret);
4556#endif
4557#endif
4558 break;
4559
4560 case SIGN_EXTRACT:
481683e1 4561 if (CONST_INT_P (XEXP (x, 1)))
2f93eea8
PB
4562 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4563 break;
4564
4565 case SIGN_EXTEND:
5511bc5a 4566 return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
2f93eea8
PB
4567 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4568 known_x, known_mode, known_ret));
4569
4570 case TRUNCATE:
4571 /* For a smaller object, just ignore the high bits. */
4572 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4573 known_x, known_mode, known_ret);
5511bc5a 4574 return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
2f93eea8
PB
4575 - bitwidth)));
4576
4577 case NOT:
4578 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4579 known_x, known_mode, known_ret);
4580
4581 case ROTATE: case ROTATERT:
4582 /* If we are rotating left by a number of bits less than the number
4583 of sign bit copies, we can just subtract that amount from the
4584 number. */
481683e1 4585 if (CONST_INT_P (XEXP (x, 1))
2f93eea8
PB
4586 && INTVAL (XEXP (x, 1)) >= 0
4587 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4588 {
4589 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4590 known_x, known_mode, known_ret);
4591 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4592 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4593 }
4594 break;
4595
4596 case NEG:
4597 /* In general, this subtracts one sign bit copy. But if the value
4598 is known to be positive, the number of sign bit copies is the
4599 same as that of the input. Finally, if the input has just one bit
4600 that might be nonzero, all the bits are copies of the sign bit. */
4601 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4602 known_x, known_mode, known_ret);
4603 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4604 return num0 > 1 ? num0 - 1 : 1;
4605
4606 nonzero = nonzero_bits (XEXP (x, 0), mode);
4607 if (nonzero == 1)
4608 return bitwidth;
4609
4610 if (num0 > 1
c04fc4f0 4611 && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
2f93eea8
PB
4612 num0--;
4613
4614 return num0;
4615
4616 case IOR: case AND: case XOR:
4617 case SMIN: case SMAX: case UMIN: case UMAX:
4618 /* Logical operations will preserve the number of sign-bit copies.
4619 MIN and MAX operations always return one of the operands. */
4620 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4621 known_x, known_mode, known_ret);
4622 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4623 known_x, known_mode, known_ret);
22761ec3
AN
4624
4625 /* If num1 is clearing some of the top bits then regardless of
4626 the other term, we are guaranteed to have at least that many
4627 high-order zero bits. */
4628 if (code == AND
4629 && num1 > 1
4630 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4631 && CONST_INT_P (XEXP (x, 1))
c04fc4f0
EB
4632 && (UINTVAL (XEXP (x, 1))
4633 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
22761ec3
AN
4634 return num1;
4635
4636 /* Similarly for IOR when setting high-order bits. */
4637 if (code == IOR
4638 && num1 > 1
4639 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4640 && CONST_INT_P (XEXP (x, 1))
c04fc4f0
EB
4641 && (UINTVAL (XEXP (x, 1))
4642 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
22761ec3
AN
4643 return num1;
4644
2f93eea8
PB
4645 return MIN (num0, num1);
4646
4647 case PLUS: case MINUS:
4648 /* For addition and subtraction, we can have a 1-bit carry. However,
4649 if we are subtracting 1 from a positive number, there will not
4650 be such a carry. Furthermore, if the positive number is known to
4651 be 0 or 1, we know the result is either -1 or 0. */
4652
4653 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4654 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4655 {
4656 nonzero = nonzero_bits (XEXP (x, 0), mode);
c04fc4f0 4657 if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
2f93eea8
PB
4658 return (nonzero == 1 || nonzero == 0 ? bitwidth
4659 : bitwidth - floor_log2 (nonzero) - 1);
4660 }
4661
4662 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4663 known_x, known_mode, known_ret);
4664 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4665 known_x, known_mode, known_ret);
4666 result = MAX (1, MIN (num0, num1) - 1);
4667
2f93eea8
PB
4668 return result;
4669
4670 case MULT:
4671 /* The number of bits of the product is the sum of the number of
4672 bits of both terms. However, unless one of the terms if known
4673 to be positive, we must allow for an additional bit since negating
4674 a negative number can remove one sign bit copy. */
4675
4676 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4677 known_x, known_mode, known_ret);
4678 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4679 known_x, known_mode, known_ret);
4680
4681 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4682 if (result > 0
4683 && (bitwidth > HOST_BITS_PER_WIDE_INT
4684 || (((nonzero_bits (XEXP (x, 0), mode)
c04fc4f0 4685 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8 4686 && ((nonzero_bits (XEXP (x, 1), mode)
c04fc4f0
EB
4687 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4688 != 0))))
2f93eea8
PB
4689 result--;
4690
4691 return MAX (1, result);
4692
4693 case UDIV:
4694 /* The result must be <= the first operand. If the first operand
4695 has the high bit set, we know nothing about the number of sign
4696 bit copies. */
4697 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4698 return 1;
4699 else if ((nonzero_bits (XEXP (x, 0), mode)
c04fc4f0 4700 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
4701 return 1;
4702 else
4703 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4704 known_x, known_mode, known_ret);
4705
4706 case UMOD:
24d179b4
JJ
4707 /* The result must be <= the second operand. If the second operand
4708 has (or just might have) the high bit set, we know nothing about
4709 the number of sign bit copies. */
4710 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4711 return 1;
4712 else if ((nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 4713 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
24d179b4
JJ
4714 return 1;
4715 else
4716 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
2f93eea8
PB
4717 known_x, known_mode, known_ret);
4718
4719 case DIV:
4720 /* Similar to unsigned division, except that we have to worry about
4721 the case where the divisor is negative, in which case we have
4722 to add 1. */
4723 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4724 known_x, known_mode, known_ret);
4725 if (result > 1
4726 && (bitwidth > HOST_BITS_PER_WIDE_INT
4727 || (nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 4728 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
2f93eea8
PB
4729 result--;
4730
4731 return result;
4732
4733 case MOD:
4734 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4735 known_x, known_mode, known_ret);
4736 if (result > 1
4737 && (bitwidth > HOST_BITS_PER_WIDE_INT
4738 || (nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 4739 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
2f93eea8
PB
4740 result--;
4741
4742 return result;
4743
4744 case ASHIFTRT:
4745 /* Shifts by a constant add to the number of bits equal to the
4746 sign bit. */
4747 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4748 known_x, known_mode, known_ret);
481683e1 4749 if (CONST_INT_P (XEXP (x, 1))
39b2ac74 4750 && INTVAL (XEXP (x, 1)) > 0
5511bc5a 4751 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4752 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4753
4754 return num0;
4755
4756 case ASHIFT:
4757 /* Left shifts destroy copies. */
481683e1 4758 if (!CONST_INT_P (XEXP (x, 1))
2f93eea8 4759 || INTVAL (XEXP (x, 1)) < 0
39b2ac74 4760 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
5511bc5a 4761 || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4762 return 1;
4763
4764 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4765 known_x, known_mode, known_ret);
4766 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4767
4768 case IF_THEN_ELSE:
4769 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4770 known_x, known_mode, known_ret);
4771 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4772 known_x, known_mode, known_ret);
4773 return MIN (num0, num1);
4774
4775 case EQ: case NE: case GE: case GT: case LE: case LT:
4776 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4777 case GEU: case GTU: case LEU: case LTU:
4778 case UNORDERED: case ORDERED:
4779 /* If the constant is negative, take its 1's complement and remask.
4780 Then see how many zero bits we have. */
4781 nonzero = STORE_FLAG_VALUE;
4782 if (bitwidth <= HOST_BITS_PER_WIDE_INT
c04fc4f0 4783 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
4784 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4785
4786 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4787
4788 default:
4789 break;
4790 }
4791
4792 /* If we haven't been able to figure it out by one of the above rules,
4793 see if some of the high-order bits are known to be zero. If so,
4794 count those bits and return one less than that amount. If we can't
4795 safely compute the mask for this mode, always return BITWIDTH. */
4796
5511bc5a 4797 bitwidth = GET_MODE_PRECISION (mode);
2f93eea8
PB
4798 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4799 return 1;
4800
4801 nonzero = nonzero_bits (x, mode);
c04fc4f0 4802 return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
2f93eea8
PB
4803 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4804}
6fd21094
RS
4805
4806/* Calculate the rtx_cost of a single instruction. A return value of
4807 zero indicates an instruction pattern without a known cost. */
4808
4809int
f40751dd 4810insn_rtx_cost (rtx pat, bool speed)
6fd21094
RS
4811{
4812 int i, cost;
4813 rtx set;
4814
4815 /* Extract the single set rtx from the instruction pattern.
4816 We can't use single_set since we only have the pattern. */
4817 if (GET_CODE (pat) == SET)
4818 set = pat;
4819 else if (GET_CODE (pat) == PARALLEL)
4820 {
4821 set = NULL_RTX;
4822 for (i = 0; i < XVECLEN (pat, 0); i++)
4823 {
4824 rtx x = XVECEXP (pat, 0, i);
4825 if (GET_CODE (x) == SET)
4826 {
4827 if (set)
4828 return 0;
4829 set = x;
4830 }
4831 }
4832 if (!set)
4833 return 0;
4834 }
4835 else
4836 return 0;
4837
5e8f01f4 4838 cost = set_src_cost (SET_SRC (set), speed);
6fd21094
RS
4839 return cost > 0 ? cost : COSTS_N_INSNS (1);
4840}
75473b02
SB
4841
4842/* Given an insn INSN and condition COND, return the condition in a
4843 canonical form to simplify testing by callers. Specifically:
4844
4845 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4846 (2) Both operands will be machine operands; (cc0) will have been replaced.
4847 (3) If an operand is a constant, it will be the second operand.
4848 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4849 for GE, GEU, and LEU.
4850
4851 If the condition cannot be understood, or is an inequality floating-point
4852 comparison which needs to be reversed, 0 will be returned.
4853
4854 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4855
4856 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4857 insn used in locating the condition was found. If a replacement test
4858 of the condition is desired, it should be placed in front of that
4859 insn and we will be sure that the inputs are still valid.
4860
4861 If WANT_REG is nonzero, we wish the condition to be relative to that
4862 register, if possible. Therefore, do not canonicalize the condition
b8698a0f 4863 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
75473b02
SB
4864 to be a compare to a CC mode register.
4865
4866 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4867 and at INSN. */
4868
4869rtx
4870canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4871 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4872{
4873 enum rtx_code code;
4874 rtx prev = insn;
f7d504c2 4875 const_rtx set;
75473b02
SB
4876 rtx tem;
4877 rtx op0, op1;
4878 int reverse_code = 0;
4879 enum machine_mode mode;
569f8d98 4880 basic_block bb = BLOCK_FOR_INSN (insn);
75473b02
SB
4881
4882 code = GET_CODE (cond);
4883 mode = GET_MODE (cond);
4884 op0 = XEXP (cond, 0);
4885 op1 = XEXP (cond, 1);
4886
4887 if (reverse)
4888 code = reversed_comparison_code (cond, insn);
4889 if (code == UNKNOWN)
4890 return 0;
4891
4892 if (earliest)
4893 *earliest = insn;
4894
4895 /* If we are comparing a register with zero, see if the register is set
4896 in the previous insn to a COMPARE or a comparison operation. Perform
4897 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4898 in cse.c */
4899
4900 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4901 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4902 && op1 == CONST0_RTX (GET_MODE (op0))
4903 && op0 != want_reg)
4904 {
4905 /* Set nonzero when we find something of interest. */
4906 rtx x = 0;
4907
4908#ifdef HAVE_cc0
4909 /* If comparison with cc0, import actual comparison from compare
4910 insn. */
4911 if (op0 == cc0_rtx)
4912 {
4913 if ((prev = prev_nonnote_insn (prev)) == 0
4914 || !NONJUMP_INSN_P (prev)
4915 || (set = single_set (prev)) == 0
4916 || SET_DEST (set) != cc0_rtx)
4917 return 0;
4918
4919 op0 = SET_SRC (set);
4920 op1 = CONST0_RTX (GET_MODE (op0));
4921 if (earliest)
4922 *earliest = prev;
4923 }
4924#endif
4925
4926 /* If this is a COMPARE, pick up the two things being compared. */
4927 if (GET_CODE (op0) == COMPARE)
4928 {
4929 op1 = XEXP (op0, 1);
4930 op0 = XEXP (op0, 0);
4931 continue;
4932 }
4933 else if (!REG_P (op0))
4934 break;
4935
4936 /* Go back to the previous insn. Stop if it is not an INSN. We also
4937 stop if it isn't a single set or if it has a REG_INC note because
4938 we don't want to bother dealing with it. */
4939
f0fc0803 4940 prev = prev_nonnote_nondebug_insn (prev);
b5b8b0ac
AO
4941
4942 if (prev == 0
75473b02 4943 || !NONJUMP_INSN_P (prev)
569f8d98
ZD
4944 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4945 /* In cfglayout mode, there do not have to be labels at the
4946 beginning of a block, or jumps at the end, so the previous
4947 conditions would not stop us when we reach bb boundary. */
4948 || BLOCK_FOR_INSN (prev) != bb)
75473b02
SB
4949 break;
4950
4951 set = set_of (op0, prev);
4952
4953 if (set
4954 && (GET_CODE (set) != SET
4955 || !rtx_equal_p (SET_DEST (set), op0)))
4956 break;
4957
4958 /* If this is setting OP0, get what it sets it to if it looks
4959 relevant. */
4960 if (set)
4961 {
4962 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4963#ifdef FLOAT_STORE_FLAG_VALUE
4964 REAL_VALUE_TYPE fsfv;
4965#endif
4966
4967 /* ??? We may not combine comparisons done in a CCmode with
4968 comparisons not done in a CCmode. This is to aid targets
4969 like Alpha that have an IEEE compliant EQ instruction, and
4970 a non-IEEE compliant BEQ instruction. The use of CCmode is
4971 actually artificial, simply to prevent the combination, but
4972 should not affect other platforms.
4973
4974 However, we must allow VOIDmode comparisons to match either
4975 CCmode or non-CCmode comparison, because some ports have
4976 modeless comparisons inside branch patterns.
4977
4978 ??? This mode check should perhaps look more like the mode check
4979 in simplify_comparison in combine. */
4980
4981 if ((GET_CODE (SET_SRC (set)) == COMPARE
4982 || (((code == NE
4983 || (code == LT
2d0c270f
BS
4984 && val_signbit_known_set_p (inner_mode,
4985 STORE_FLAG_VALUE))
75473b02
SB
4986#ifdef FLOAT_STORE_FLAG_VALUE
4987 || (code == LT
3d8bf70f 4988 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4989 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4990 REAL_VALUE_NEGATIVE (fsfv)))
4991#endif
4992 ))
4993 && COMPARISON_P (SET_SRC (set))))
4994 && (((GET_MODE_CLASS (mode) == MODE_CC)
4995 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4996 || mode == VOIDmode || inner_mode == VOIDmode))
4997 x = SET_SRC (set);
4998 else if (((code == EQ
4999 || (code == GE
2d0c270f
BS
5000 && val_signbit_known_set_p (inner_mode,
5001 STORE_FLAG_VALUE))
75473b02
SB
5002#ifdef FLOAT_STORE_FLAG_VALUE
5003 || (code == GE
3d8bf70f 5004 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
5005 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5006 REAL_VALUE_NEGATIVE (fsfv)))
5007#endif
5008 ))
5009 && COMPARISON_P (SET_SRC (set))
5010 && (((GET_MODE_CLASS (mode) == MODE_CC)
5011 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
5012 || mode == VOIDmode || inner_mode == VOIDmode))
5013
5014 {
5015 reverse_code = 1;
5016 x = SET_SRC (set);
5017 }
5018 else
5019 break;
5020 }
5021
5022 else if (reg_set_p (op0, prev))
5023 /* If this sets OP0, but not directly, we have to give up. */
5024 break;
5025
5026 if (x)
5027 {
5028 /* If the caller is expecting the condition to be valid at INSN,
5029 make sure X doesn't change before INSN. */
5030 if (valid_at_insn_p)
5031 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5032 break;
5033 if (COMPARISON_P (x))
5034 code = GET_CODE (x);
5035 if (reverse_code)
5036 {
5037 code = reversed_comparison_code (x, prev);
5038 if (code == UNKNOWN)
5039 return 0;
5040 reverse_code = 0;
5041 }
5042
5043 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5044 if (earliest)
5045 *earliest = prev;
5046 }
5047 }
5048
5049 /* If constant is first, put it last. */
5050 if (CONSTANT_P (op0))
5051 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5052
5053 /* If OP0 is the result of a comparison, we weren't able to find what
5054 was really being compared, so fail. */
5055 if (!allow_cc_mode
5056 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5057 return 0;
5058
5059 /* Canonicalize any ordered comparison with integers involving equality
5060 if we can do computations in the relevant mode and we do not
5061 overflow. */
5062
5063 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
481683e1 5064 && CONST_INT_P (op1)
75473b02 5065 && GET_MODE (op0) != VOIDmode
5511bc5a 5066 && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
75473b02
SB
5067 {
5068 HOST_WIDE_INT const_val = INTVAL (op1);
5069 unsigned HOST_WIDE_INT uconst_val = const_val;
5070 unsigned HOST_WIDE_INT max_val
5071 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5072
5073 switch (code)
5074 {
5075 case LE:
5076 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5077 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5078 break;
5079
5080 /* When cross-compiling, const_val might be sign-extended from
5081 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5082 case GE:
c04fc4f0
EB
5083 if ((const_val & max_val)
5084 != ((unsigned HOST_WIDE_INT) 1
5511bc5a 5085 << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
75473b02
SB
5086 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5087 break;
5088
5089 case LEU:
5090 if (uconst_val < max_val)
5091 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5092 break;
5093
5094 case GEU:
5095 if (uconst_val != 0)
5096 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5097 break;
5098
5099 default:
5100 break;
5101 }
5102 }
5103
5104 /* Never return CC0; return zero instead. */
5105 if (CC0_P (op0))
5106 return 0;
5107
5108 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5109}
5110
5111/* Given a jump insn JUMP, return the condition that will cause it to branch
5112 to its JUMP_LABEL. If the condition cannot be understood, or is an
5113 inequality floating-point comparison which needs to be reversed, 0 will
5114 be returned.
5115
5116 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5117 insn used in locating the condition was found. If a replacement test
5118 of the condition is desired, it should be placed in front of that
5119 insn and we will be sure that the inputs are still valid. If EARLIEST
5120 is null, the returned condition will be valid at INSN.
5121
5122 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5123 compare CC mode register.
5124
5125 VALID_AT_INSN_P is the same as for canonicalize_condition. */
5126
5127rtx
5128get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
5129{
5130 rtx cond;
5131 int reverse;
5132 rtx set;
5133
5134 /* If this is not a standard conditional jump, we can't parse it. */
5135 if (!JUMP_P (jump)
5136 || ! any_condjump_p (jump))
5137 return 0;
5138 set = pc_set (jump);
5139
5140 cond = XEXP (SET_SRC (set), 0);
5141
5142 /* If this branches to JUMP_LABEL when the condition is false, reverse
5143 the condition. */
5144 reverse
5145 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
5146 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
5147
5148 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5149 allow_cc_mode, valid_at_insn_p);
5150}
5151
b12cbf2c
AN
5152/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5153 TARGET_MODE_REP_EXTENDED.
5154
5155 Note that we assume that the property of
5156 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5157 narrower than mode B. I.e., if A is a mode narrower than B then in
5158 order to be able to operate on it in mode B, mode A needs to
5159 satisfy the requirements set by the representation of mode B. */
5160
5161static void
5162init_num_sign_bit_copies_in_rep (void)
5163{
5164 enum machine_mode mode, in_mode;
5165
5166 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5167 in_mode = GET_MODE_WIDER_MODE (mode))
5168 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5169 mode = GET_MODE_WIDER_MODE (mode))
5170 {
5171 enum machine_mode i;
5172
5173 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5174 extends to the next widest mode. */
5175 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5176 || GET_MODE_WIDER_MODE (mode) == in_mode);
5177
5178 /* We are in in_mode. Count how many bits outside of mode
5179 have to be copies of the sign-bit. */
5180 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5181 {
5182 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5183
5184 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5185 /* We can only check sign-bit copies starting from the
5186 top-bit. In order to be able to check the bits we
5187 have already seen we pretend that subsequent bits
5188 have to be sign-bit copies too. */
5189 || num_sign_bit_copies_in_rep [in_mode][mode])
5190 num_sign_bit_copies_in_rep [in_mode][mode]
5511bc5a 5191 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
b12cbf2c
AN
5192 }
5193 }
5194}
5195
d3b72690
PB
5196/* Suppose that truncation from the machine mode of X to MODE is not a
5197 no-op. See if there is anything special about X so that we can
5198 assume it already contains a truncated value of MODE. */
5199
5200bool
fa233e34 5201truncated_to_mode (enum machine_mode mode, const_rtx x)
d3b72690 5202{
b12cbf2c
AN
5203 /* This register has already been used in MODE without explicit
5204 truncation. */
5205 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5206 return true;
5207
5208 /* See if we already satisfy the requirements of MODE. If yes we
5209 can just switch to MODE. */
5210 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5211 && (num_sign_bit_copies (x, GET_MODE (x))
5212 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5213 return true;
d3b72690 5214
b12cbf2c
AN
5215 return false;
5216}
cf94b0fc
PB
5217\f
5218/* Initialize non_rtx_starting_operands, which is used to speed up
5219 for_each_rtx. */
5220void
5221init_rtlanal (void)
5222{
5223 int i;
5224 for (i = 0; i < NUM_RTX_CODE; i++)
5225 {
5226 const char *format = GET_RTX_FORMAT (i);
5227 const char *first = strpbrk (format, "eEV");
5228 non_rtx_starting_operands[i] = first ? first - format : -1;
5229 }
b12cbf2c
AN
5230
5231 init_num_sign_bit_copies_in_rep ();
cf94b0fc 5232}
3d8504ac
RS
5233\f
5234/* Check whether this is a constant pool constant. */
5235bool
5236constant_pool_constant_p (rtx x)
5237{
5238 x = avoid_constant_pool_reference (x);
48175537 5239 return CONST_DOUBLE_P (x);
3d8504ac 5240}
842e098c
AN
5241\f
5242/* If M is a bitmask that selects a field of low-order bits within an item but
5243 not the entire word, return the length of the field. Return -1 otherwise.
5244 M is used in machine mode MODE. */
5245
5246int
5247low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5248{
5249 if (mode != VOIDmode)
5250 {
5511bc5a 5251 if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
842e098c
AN
5252 return -1;
5253 m &= GET_MODE_MASK (mode);
5254 }
5255
5256 return exact_log2 (m + 1);
5257}
372d6395
RS
5258
5259/* Return the mode of MEM's address. */
5260
5261enum machine_mode
5262get_address_mode (rtx mem)
5263{
5264 enum machine_mode mode;
5265
5266 gcc_assert (MEM_P (mem));
5267 mode = GET_MODE (XEXP (mem, 0));
5268 if (mode != VOIDmode)
5269 return mode;
5270 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5271}
ca3f2950
SB
5272\f
5273/* Split up a CONST_DOUBLE or integer constant rtx
5274 into two rtx's for single words,
5275 storing in *FIRST the word that comes first in memory in the target
5276 and in *SECOND the other. */
5277
5278void
5279split_double (rtx value, rtx *first, rtx *second)
5280{
5281 if (CONST_INT_P (value))
5282 {
5283 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5284 {
5285 /* In this case the CONST_INT holds both target words.
5286 Extract the bits from it into two word-sized pieces.
5287 Sign extend each half to HOST_WIDE_INT. */
5288 unsigned HOST_WIDE_INT low, high;
5289 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5290 unsigned bits_per_word = BITS_PER_WORD;
5291
5292 /* Set sign_bit to the most significant bit of a word. */
5293 sign_bit = 1;
5294 sign_bit <<= bits_per_word - 1;
5295
5296 /* Set mask so that all bits of the word are set. We could
5297 have used 1 << BITS_PER_WORD instead of basing the
5298 calculation on sign_bit. However, on machines where
5299 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5300 compiler warning, even though the code would never be
5301 executed. */
5302 mask = sign_bit << 1;
5303 mask--;
5304
5305 /* Set sign_extend as any remaining bits. */
5306 sign_extend = ~mask;
5307
5308 /* Pick the lower word and sign-extend it. */
5309 low = INTVAL (value);
5310 low &= mask;
5311 if (low & sign_bit)
5312 low |= sign_extend;
5313
5314 /* Pick the higher word, shifted to the least significant
5315 bits, and sign-extend it. */
5316 high = INTVAL (value);
5317 high >>= bits_per_word - 1;
5318 high >>= 1;
5319 high &= mask;
5320 if (high & sign_bit)
5321 high |= sign_extend;
5322
5323 /* Store the words in the target machine order. */
5324 if (WORDS_BIG_ENDIAN)
5325 {
5326 *first = GEN_INT (high);
5327 *second = GEN_INT (low);
5328 }
5329 else
5330 {
5331 *first = GEN_INT (low);
5332 *second = GEN_INT (high);
5333 }
5334 }
5335 else
5336 {
5337 /* The rule for using CONST_INT for a wider mode
5338 is that we regard the value as signed.
5339 So sign-extend it. */
5340 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5341 if (WORDS_BIG_ENDIAN)
5342 {
5343 *first = high;
5344 *second = value;
5345 }
5346 else
5347 {
5348 *first = value;
5349 *second = high;
5350 }
5351 }
5352 }
48175537 5353 else if (!CONST_DOUBLE_P (value))
ca3f2950
SB
5354 {
5355 if (WORDS_BIG_ENDIAN)
5356 {
5357 *first = const0_rtx;
5358 *second = value;
5359 }
5360 else
5361 {
5362 *first = value;
5363 *second = const0_rtx;
5364 }
5365 }
5366 else if (GET_MODE (value) == VOIDmode
5367 /* This is the old way we did CONST_DOUBLE integers. */
5368 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5369 {
5370 /* In an integer, the words are defined as most and least significant.
5371 So order them by the target's convention. */
5372 if (WORDS_BIG_ENDIAN)
5373 {
5374 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5375 *second = GEN_INT (CONST_DOUBLE_LOW (value));
5376 }
5377 else
5378 {
5379 *first = GEN_INT (CONST_DOUBLE_LOW (value));
5380 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5381 }
5382 }
5383 else
5384 {
5385 REAL_VALUE_TYPE r;
5386 long l[2];
5387 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5388
5389 /* Note, this converts the REAL_VALUE_TYPE to the target's
5390 format, splits up the floating point double and outputs
5391 exactly 32 bits of it into each of l[0] and l[1] --
5392 not necessarily BITS_PER_WORD bits. */
5393 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5394
5395 /* If 32 bits is an entire word for the target, but not for the host,
5396 then sign-extend on the host so that the number will look the same
5397 way on the host that it would on the target. See for instance
5398 simplify_unary_operation. The #if is needed to avoid compiler
5399 warnings. */
5400
5401#if HOST_BITS_PER_LONG > 32
5402 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5403 {
5404 if (l[0] & ((long) 1 << 31))
5405 l[0] |= ((long) (-1) << 32);
5406 if (l[1] & ((long) 1 << 31))
5407 l[1] |= ((long) (-1) << 32);
5408 }
5409#endif
5410
5411 *first = GEN_INT (l[0]);
5412 *second = GEN_INT (l[1]);
5413 }
5414}
5415