]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
cgraph.h (tree_function_versioning): Update prototype.
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
af082de3 1/* Analyze RTL for GNU compiler.
af841dbd 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
66647d44
JJ
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
2c88418c 5
1322177d 6This file is part of GCC.
2c88418c 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
2c88418c 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
2c88418c
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
2c88418c
RS
21
22
23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
e35b9579 27#include "toplev.h"
2c88418c 28#include "rtl.h"
3335f1d9 29#include "hard-reg-set.h"
bc204393
RH
30#include "insn-config.h"
31#include "recog.h"
f894b69b
PB
32#include "target.h"
33#include "output.h"
91ea4f8d 34#include "tm_p.h"
f5eb5fd0 35#include "flags.h"
66fd46b6 36#include "regs.h"
2f93eea8 37#include "function.h"
6fb5fa3c 38#include "df.h"
7ffb5e78 39#include "tree.h"
2c88418c 40
e2373f95 41/* Forward declarations */
7bc980e1 42static void set_of_1 (rtx, const_rtx, void *);
f7d504c2
KG
43static bool covers_regno_p (const_rtx, unsigned int);
44static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
0c20a65f 45static int rtx_referenced_p_1 (rtx *, void *);
f7d504c2 46static int computed_jump_p_1 (const_rtx);
7bc980e1 47static void parms_set (rtx, const_rtx, void *);
2a1777af 48
fa233e34
KG
49static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
50 const_rtx, enum machine_mode,
2f93eea8 51 unsigned HOST_WIDE_INT);
fa233e34
KG
52static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
53 const_rtx, enum machine_mode,
2f93eea8 54 unsigned HOST_WIDE_INT);
fa233e34 55static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
2f93eea8
PB
56 enum machine_mode,
57 unsigned int);
fa233e34 58static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
2f93eea8
PB
59 enum machine_mode, unsigned int);
60
cf94b0fc
PB
61/* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63static int non_rtx_starting_operands[NUM_RTX_CODE];
64
2c88418c
RS
65/* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
68
69int target_flags;
b12cbf2c
AN
70
71/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
72 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
73 SIGN_EXTEND then while narrowing we also have to enforce the
74 representation and sign-extend the value to mode DESTINATION_REP.
75
76 If the value is already sign-extended to DESTINATION_REP mode we
77 can just switch to DESTINATION mode on it. For each pair of
78 integral modes SOURCE and DESTINATION, when truncating from SOURCE
79 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
80 contains the number of high-order bits in SOURCE that have to be
81 copies of the sign-bit so that we can do this mode-switch to
82 DESTINATION. */
83
84static unsigned int
85num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
2c88418c
RS
86\f
87/* Return 1 if the value of X is unstable
88 (would be different at a different point in the program).
89 The frame pointer, arg pointer, etc. are considered stable
90 (within one function) and so is anything marked `unchanging'. */
91
92int
f7d504c2 93rtx_unstable_p (const_rtx x)
2c88418c 94{
f7d504c2 95 const RTX_CODE code = GET_CODE (x);
b3694847
SS
96 int i;
97 const char *fmt;
2c88418c 98
ae0fb1b9
JW
99 switch (code)
100 {
101 case MEM:
389fdba0 102 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 103
ae0fb1b9
JW
104 case CONST:
105 case CONST_INT:
106 case CONST_DOUBLE:
091a3ac7 107 case CONST_FIXED:
69ef87e2 108 case CONST_VECTOR:
ae0fb1b9
JW
109 case SYMBOL_REF:
110 case LABEL_REF:
111 return 0;
2c88418c 112
ae0fb1b9
JW
113 case REG:
114 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 115 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9 116 /* The arg pointer varies if it is not a fixed register. */
389fdba0 117 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b
RH
118 return 0;
119#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
120 /* ??? When call-clobbered, the value is stable modulo the restore
121 that must happen after a call. This currently screws up local-alloc
122 into believing that the restore is not needed. */
123 if (x == pic_offset_table_rtx)
124 return 0;
125#endif
126 return 1;
ae0fb1b9
JW
127
128 case ASM_OPERANDS:
129 if (MEM_VOLATILE_P (x))
130 return 1;
131
5d3cc252 132 /* Fall through. */
ae0fb1b9
JW
133
134 default:
135 break;
136 }
2c88418c
RS
137
138 fmt = GET_RTX_FORMAT (code);
139 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
140 if (fmt[i] == 'e')
9c82ac6b
JW
141 {
142 if (rtx_unstable_p (XEXP (x, i)))
143 return 1;
144 }
145 else if (fmt[i] == 'E')
146 {
147 int j;
148 for (j = 0; j < XVECLEN (x, i); j++)
149 if (rtx_unstable_p (XVECEXP (x, i, j)))
150 return 1;
151 }
152
2c88418c
RS
153 return 0;
154}
155
156/* Return 1 if X has a value that can vary even between two
157 executions of the program. 0 means X can be compared reliably
158 against certain constants or near-constants.
e38fe8e0
BS
159 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
160 zero, we are slightly more conservative.
2c88418c
RS
161 The frame pointer and the arg pointer are considered constant. */
162
4f588890
KG
163bool
164rtx_varies_p (const_rtx x, bool for_alias)
2c88418c 165{
e978d62e 166 RTX_CODE code;
b3694847
SS
167 int i;
168 const char *fmt;
2c88418c 169
e978d62e
PB
170 if (!x)
171 return 0;
172
173 code = GET_CODE (x);
2c88418c
RS
174 switch (code)
175 {
176 case MEM:
389fdba0 177 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 178
2c88418c
RS
179 case CONST:
180 case CONST_INT:
181 case CONST_DOUBLE:
091a3ac7 182 case CONST_FIXED:
69ef87e2 183 case CONST_VECTOR:
2c88418c
RS
184 case SYMBOL_REF:
185 case LABEL_REF:
186 return 0;
187
188 case REG:
189 /* Note that we have to test for the actual rtx used for the frame
190 and arg pointers and not just the register number in case we have
191 eliminated the frame and/or arg pointer and are using it
192 for pseudos. */
c0fc376b 193 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
194 /* The arg pointer varies if it is not a fixed register. */
195 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 196 return 0;
e38fe8e0
BS
197 if (x == pic_offset_table_rtx
198#ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
199 /* ??? When call-clobbered, the value is stable modulo the restore
200 that must happen after a call. This currently screws up
201 local-alloc into believing that the restore is not needed, so we
202 must return 0 only if we are called from alias analysis. */
203 && for_alias
c0fc376b 204#endif
e38fe8e0
BS
205 )
206 return 0;
c0fc376b 207 return 1;
2c88418c
RS
208
209 case LO_SUM:
210 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
211 (in fact it is related specifically to operand 1)
212 during alias analysis. */
213 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
214 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 215
ae0fb1b9
JW
216 case ASM_OPERANDS:
217 if (MEM_VOLATILE_P (x))
218 return 1;
219
5d3cc252 220 /* Fall through. */
ae0fb1b9 221
e9a25f70
JL
222 default:
223 break;
2c88418c
RS
224 }
225
226 fmt = GET_RTX_FORMAT (code);
227 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
228 if (fmt[i] == 'e')
9c82ac6b 229 {
e38fe8e0 230 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
231 return 1;
232 }
233 else if (fmt[i] == 'E')
234 {
235 int j;
236 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 237 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
238 return 1;
239 }
240
2c88418c
RS
241 return 0;
242}
243
2358ff91
EB
244/* Return nonzero if the use of X as an address in a MEM can cause a trap.
245 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
246 whether nonzero is returned for unaligned memory accesses on strict
247 alignment machines. */
2c88418c 248
2358ff91 249static int
48e8382e
PB
250rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
251 enum machine_mode mode, bool unaligned_mems)
2c88418c 252{
b3694847 253 enum rtx_code code = GET_CODE (x);
2c88418c 254
48e8382e
PB
255 if (STRICT_ALIGNMENT
256 && unaligned_mems
257 && GET_MODE_SIZE (mode) != 0)
258 {
259 HOST_WIDE_INT actual_offset = offset;
260#ifdef SPARC_STACK_BOUNDARY_HACK
261 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
262 the real alignment of %sp. However, when it does this, the
263 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
264 if (SPARC_STACK_BOUNDARY_HACK
265 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
266 actual_offset -= STACK_POINTER_OFFSET;
267#endif
268
65a74b5d
PB
269 if (actual_offset % GET_MODE_SIZE (mode) != 0)
270 return 1;
48e8382e
PB
271 }
272
2c88418c
RS
273 switch (code)
274 {
275 case SYMBOL_REF:
48e8382e
PB
276 if (SYMBOL_REF_WEAK (x))
277 return 1;
278 if (!CONSTANT_POOL_ADDRESS_P (x))
279 {
280 tree decl;
281 HOST_WIDE_INT decl_size;
282
283 if (offset < 0)
284 return 1;
285 if (size == 0)
286 size = GET_MODE_SIZE (mode);
287 if (size == 0)
288 return offset != 0;
289
290 /* If the size of the access or of the symbol is unknown,
291 assume the worst. */
292 decl = SYMBOL_REF_DECL (x);
293
294 /* Else check that the access is in bounds. TODO: restructure
71c00b5c 295 expr_size/tree_expr_size/int_expr_size and just use the latter. */
48e8382e
PB
296 if (!decl)
297 decl_size = -1;
298 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
299 decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
300 ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
301 : -1);
302 else if (TREE_CODE (decl) == STRING_CST)
303 decl_size = TREE_STRING_LENGTH (decl);
304 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
305 decl_size = int_size_in_bytes (TREE_TYPE (decl));
306 else
307 decl_size = -1;
308
309 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
310 }
311
312 return 0;
ff0b6b99 313
2c88418c 314 case LABEL_REF:
2c88418c
RS
315 return 0;
316
317 case REG:
318 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
4f73495e
RH
319 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
320 || x == stack_pointer_rtx
321 /* The arg pointer varies if it is not a fixed register. */
322 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
323 return 0;
324 /* All of the virtual frame registers are stack references. */
325 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
326 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
327 return 0;
328 return 1;
2c88418c
RS
329
330 case CONST:
48e8382e
PB
331 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
332 mode, unaligned_mems);
2c88418c
RS
333
334 case PLUS:
2358ff91 335 /* An address is assumed not to trap if:
48e8382e
PB
336 - it is the pic register plus a constant. */
337 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
338 return 0;
339
340 /* - or it is an address that can't trap plus a constant integer,
2358ff91
EB
341 with the proper remainder modulo the mode size if we are
342 considering unaligned memory references. */
481683e1 343 if (CONST_INT_P (XEXP (x, 1))
48e8382e
PB
344 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
345 size, mode, unaligned_mems))
2358ff91
EB
346 return 0;
347
348 return 1;
2c88418c
RS
349
350 case LO_SUM:
4f73495e 351 case PRE_MODIFY:
48e8382e
PB
352 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
353 mode, unaligned_mems);
4f73495e
RH
354
355 case PRE_DEC:
356 case PRE_INC:
357 case POST_DEC:
358 case POST_INC:
359 case POST_MODIFY:
48e8382e
PB
360 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
361 mode, unaligned_mems);
4f73495e 362
e9a25f70
JL
363 default:
364 break;
2c88418c
RS
365 }
366
367 /* If it isn't one of the case above, it can cause a trap. */
368 return 1;
369}
370
2358ff91
EB
371/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
372
373int
f7d504c2 374rtx_addr_can_trap_p (const_rtx x)
2358ff91 375{
48e8382e 376 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
2358ff91
EB
377}
378
4977bab6
ZW
379/* Return true if X is an address that is known to not be zero. */
380
381bool
f7d504c2 382nonzero_address_p (const_rtx x)
4977bab6 383{
f7d504c2 384 const enum rtx_code code = GET_CODE (x);
4977bab6
ZW
385
386 switch (code)
387 {
388 case SYMBOL_REF:
389 return !SYMBOL_REF_WEAK (x);
390
391 case LABEL_REF:
392 return true;
393
4977bab6
ZW
394 case REG:
395 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
396 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
397 || x == stack_pointer_rtx
398 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
399 return true;
400 /* All of the virtual frame registers are stack references. */
401 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
402 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
403 return true;
404 return false;
405
406 case CONST:
407 return nonzero_address_p (XEXP (x, 0));
408
409 case PLUS:
481683e1 410 if (CONST_INT_P (XEXP (x, 1)))
942d7821 411 return nonzero_address_p (XEXP (x, 0));
4977bab6
ZW
412 /* Handle PIC references. */
413 else if (XEXP (x, 0) == pic_offset_table_rtx
414 && CONSTANT_P (XEXP (x, 1)))
415 return true;
416 return false;
417
418 case PRE_MODIFY:
419 /* Similar to the above; allow positive offsets. Further, since
420 auto-inc is only allowed in memories, the register must be a
421 pointer. */
481683e1 422 if (CONST_INT_P (XEXP (x, 1))
4977bab6
ZW
423 && INTVAL (XEXP (x, 1)) > 0)
424 return true;
425 return nonzero_address_p (XEXP (x, 0));
426
427 case PRE_INC:
428 /* Similarly. Further, the offset is always positive. */
429 return true;
430
431 case PRE_DEC:
432 case POST_DEC:
433 case POST_INC:
434 case POST_MODIFY:
435 return nonzero_address_p (XEXP (x, 0));
436
437 case LO_SUM:
438 return nonzero_address_p (XEXP (x, 1));
439
440 default:
441 break;
442 }
443
444 /* If it isn't one of the case above, might be zero. */
445 return false;
446}
447
a6a2274a 448/* Return 1 if X refers to a memory location whose address
2c88418c 449 cannot be compared reliably with constant addresses,
a6a2274a 450 or if X refers to a BLKmode memory object.
e38fe8e0
BS
451 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
452 zero, we are slightly more conservative. */
2c88418c 453
4f588890
KG
454bool
455rtx_addr_varies_p (const_rtx x, bool for_alias)
2c88418c 456{
b3694847
SS
457 enum rtx_code code;
458 int i;
459 const char *fmt;
2c88418c
RS
460
461 if (x == 0)
462 return 0;
463
464 code = GET_CODE (x);
465 if (code == MEM)
e38fe8e0 466 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
467
468 fmt = GET_RTX_FORMAT (code);
469 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
470 if (fmt[i] == 'e')
833c0b26 471 {
e38fe8e0 472 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
473 return 1;
474 }
475 else if (fmt[i] == 'E')
476 {
477 int j;
478 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 479 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
480 return 1;
481 }
2c88418c
RS
482 return 0;
483}
484\f
485/* Return the value of the integer term in X, if one is apparent;
486 otherwise return 0.
487 Only obvious integer terms are detected.
3ef42a0c 488 This is used in cse.c with the `related_value' field. */
2c88418c 489
c166a311 490HOST_WIDE_INT
f7d504c2 491get_integer_term (const_rtx x)
2c88418c
RS
492{
493 if (GET_CODE (x) == CONST)
494 x = XEXP (x, 0);
495
496 if (GET_CODE (x) == MINUS
481683e1 497 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
498 return - INTVAL (XEXP (x, 1));
499 if (GET_CODE (x) == PLUS
481683e1 500 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
501 return INTVAL (XEXP (x, 1));
502 return 0;
503}
504
505/* If X is a constant, return the value sans apparent integer term;
506 otherwise return 0.
507 Only obvious integer terms are detected. */
508
509rtx
f7d504c2 510get_related_value (const_rtx x)
2c88418c
RS
511{
512 if (GET_CODE (x) != CONST)
513 return 0;
514 x = XEXP (x, 0);
515 if (GET_CODE (x) == PLUS
481683e1 516 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
517 return XEXP (x, 0);
518 else if (GET_CODE (x) == MINUS
481683e1 519 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
520 return XEXP (x, 0);
521 return 0;
522}
523\f
7ffb5e78
RS
524/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
525 to somewhere in the same object or object_block as SYMBOL. */
526
527bool
f7d504c2 528offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
7ffb5e78
RS
529{
530 tree decl;
531
532 if (GET_CODE (symbol) != SYMBOL_REF)
533 return false;
534
535 if (offset == 0)
536 return true;
537
538 if (offset > 0)
539 {
540 if (CONSTANT_POOL_ADDRESS_P (symbol)
541 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
542 return true;
543
544 decl = SYMBOL_REF_DECL (symbol);
545 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
546 return true;
547 }
548
549 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
550 && SYMBOL_REF_BLOCK (symbol)
551 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
552 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
553 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
554 return true;
555
556 return false;
557}
558
559/* Split X into a base and a constant offset, storing them in *BASE_OUT
560 and *OFFSET_OUT respectively. */
561
562void
563split_const (rtx x, rtx *base_out, rtx *offset_out)
564{
565 if (GET_CODE (x) == CONST)
566 {
567 x = XEXP (x, 0);
481683e1 568 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
7ffb5e78
RS
569 {
570 *base_out = XEXP (x, 0);
571 *offset_out = XEXP (x, 1);
572 return;
573 }
574 }
575 *base_out = x;
576 *offset_out = const0_rtx;
577}
578\f
4b983fdc
RH
579/* Return the number of places FIND appears within X. If COUNT_DEST is
580 zero, we do not count occurrences inside the destination of a SET. */
581
582int
f7d504c2 583count_occurrences (const_rtx x, const_rtx find, int count_dest)
4b983fdc
RH
584{
585 int i, j;
586 enum rtx_code code;
587 const char *format_ptr;
588 int count;
589
590 if (x == find)
591 return 1;
592
593 code = GET_CODE (x);
594
595 switch (code)
596 {
597 case REG:
598 case CONST_INT:
599 case CONST_DOUBLE:
091a3ac7 600 case CONST_FIXED:
69ef87e2 601 case CONST_VECTOR:
4b983fdc
RH
602 case SYMBOL_REF:
603 case CODE_LABEL:
604 case PC:
605 case CC0:
606 return 0;
607
2372a062
BS
608 case EXPR_LIST:
609 count = count_occurrences (XEXP (x, 0), find, count_dest);
610 if (XEXP (x, 1))
611 count += count_occurrences (XEXP (x, 1), find, count_dest);
612 return count;
b8698a0f 613
4b983fdc 614 case MEM:
3c0cb5de 615 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
616 return 1;
617 break;
618
619 case SET:
620 if (SET_DEST (x) == find && ! count_dest)
621 return count_occurrences (SET_SRC (x), find, count_dest);
622 break;
623
624 default:
625 break;
626 }
627
628 format_ptr = GET_RTX_FORMAT (code);
629 count = 0;
630
631 for (i = 0; i < GET_RTX_LENGTH (code); i++)
632 {
633 switch (*format_ptr++)
634 {
635 case 'e':
636 count += count_occurrences (XEXP (x, i), find, count_dest);
637 break;
638
639 case 'E':
640 for (j = 0; j < XVECLEN (x, i); j++)
641 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
642 break;
643 }
644 }
645 return count;
646}
6fb5fa3c 647
4b983fdc 648\f
2c88418c
RS
649/* Nonzero if register REG appears somewhere within IN.
650 Also works if REG is not a register; in this case it checks
651 for a subexpression of IN that is Lisp "equal" to REG. */
652
653int
f7d504c2 654reg_mentioned_p (const_rtx reg, const_rtx in)
2c88418c 655{
b3694847
SS
656 const char *fmt;
657 int i;
658 enum rtx_code code;
2c88418c
RS
659
660 if (in == 0)
661 return 0;
662
663 if (reg == in)
664 return 1;
665
666 if (GET_CODE (in) == LABEL_REF)
667 return reg == XEXP (in, 0);
668
669 code = GET_CODE (in);
670
671 switch (code)
672 {
673 /* Compare registers by number. */
674 case REG:
f8cfc6aa 675 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
676
677 /* These codes have no constituent expressions
678 and are unique. */
679 case SCRATCH:
680 case CC0:
681 case PC:
682 return 0;
683
684 case CONST_INT:
69ef87e2 685 case CONST_VECTOR:
2c88418c 686 case CONST_DOUBLE:
091a3ac7 687 case CONST_FIXED:
2c88418c
RS
688 /* These are kept unique for a given value. */
689 return 0;
a6a2274a 690
e9a25f70
JL
691 default:
692 break;
2c88418c
RS
693 }
694
695 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
696 return 1;
697
698 fmt = GET_RTX_FORMAT (code);
699
700 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
701 {
702 if (fmt[i] == 'E')
703 {
b3694847 704 int j;
2c88418c
RS
705 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
706 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
707 return 1;
708 }
709 else if (fmt[i] == 'e'
710 && reg_mentioned_p (reg, XEXP (in, i)))
711 return 1;
712 }
713 return 0;
714}
715\f
716/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
717 no CODE_LABEL insn. */
718
719int
f7d504c2 720no_labels_between_p (const_rtx beg, const_rtx end)
2c88418c 721{
b3694847 722 rtx p;
978f547f
JH
723 if (beg == end)
724 return 0;
2c88418c 725 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 726 if (LABEL_P (p))
2c88418c
RS
727 return 0;
728 return 1;
729}
730
731/* Nonzero if register REG is used in an insn between
732 FROM_INSN and TO_INSN (exclusive of those two). */
733
734int
f7d504c2 735reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
2c88418c 736{
b3694847 737 rtx insn;
2c88418c
RS
738
739 if (from_insn == to_insn)
740 return 0;
741
742 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
b5b8b0ac 743 if (NONDEBUG_INSN_P (insn)
8f3e7a26 744 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
76dd5923 745 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
746 return 1;
747 return 0;
748}
749\f
750/* Nonzero if the old value of X, a register, is referenced in BODY. If X
751 is entirely replaced by a new value and the only use is as a SET_DEST,
752 we do not consider it a reference. */
753
754int
f7d504c2 755reg_referenced_p (const_rtx x, const_rtx body)
2c88418c
RS
756{
757 int i;
758
759 switch (GET_CODE (body))
760 {
761 case SET:
762 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
763 return 1;
764
765 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
766 of a REG that occupies all of the REG, the insn references X if
767 it is mentioned in the destination. */
768 if (GET_CODE (SET_DEST (body)) != CC0
769 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 770 && !REG_P (SET_DEST (body))
2c88418c 771 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 772 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
773 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
774 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
775 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
776 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
777 && reg_overlap_mentioned_p (x, SET_DEST (body)))
778 return 1;
e9a25f70 779 return 0;
2c88418c
RS
780
781 case ASM_OPERANDS:
782 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
783 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
784 return 1;
e9a25f70 785 return 0;
2c88418c
RS
786
787 case CALL:
788 case USE:
14a774a9 789 case IF_THEN_ELSE:
2c88418c
RS
790 return reg_overlap_mentioned_p (x, body);
791
792 case TRAP_IF:
793 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
794
21b8482a
JJ
795 case PREFETCH:
796 return reg_overlap_mentioned_p (x, XEXP (body, 0));
797
2ac4fed0
RK
798 case UNSPEC:
799 case UNSPEC_VOLATILE:
2f9fb4c2
R
800 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
801 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
802 return 1;
803 return 0;
804
2c88418c
RS
805 case PARALLEL:
806 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
807 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
808 return 1;
e9a25f70 809 return 0;
a6a2274a 810
0d3ffb5a 811 case CLOBBER:
3c0cb5de 812 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
813 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
814 return 1;
815 return 0;
816
0c99ec5c
RH
817 case COND_EXEC:
818 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
819 return 1;
820 return reg_referenced_p (x, COND_EXEC_CODE (body));
821
e9a25f70
JL
822 default:
823 return 0;
2c88418c 824 }
2c88418c 825}
2c88418c
RS
826\f
827/* Nonzero if register REG is set or clobbered in an insn between
828 FROM_INSN and TO_INSN (exclusive of those two). */
829
830int
ed7a4b4b 831reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
2c88418c 832{
ed7a4b4b 833 const_rtx insn;
2c88418c
RS
834
835 if (from_insn == to_insn)
836 return 0;
837
838 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 839 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
840 return 1;
841 return 0;
842}
843
844/* Internals of reg_set_between_p. */
2c88418c 845int
ed7a4b4b 846reg_set_p (const_rtx reg, const_rtx insn)
2c88418c 847{
2c88418c
RS
848 /* We can be passed an insn or part of one. If we are passed an insn,
849 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
850 if (INSN_P (insn)
851 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 852 || (CALL_P (insn)
f8cfc6aa 853 && ((REG_P (reg)
4f1605d2 854 && REGNO (reg) < FIRST_PSEUDO_REGISTER
5da20cfe
RS
855 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
856 GET_MODE (reg), REGNO (reg)))
3c0cb5de 857 || MEM_P (reg)
4977bab6
ZW
858 || find_reg_fusage (insn, CLOBBER, reg)))))
859 return 1;
2c88418c 860
91b2d119 861 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
862}
863
864/* Similar to reg_set_between_p, but check all registers in X. Return 0
865 only if none of them are modified between START and END. Return 1 if
fa10beec 866 X contains a MEM; this routine does use memory aliasing. */
2c88418c
RS
867
868int
9678086d 869modified_between_p (const_rtx x, const_rtx start, const_rtx end)
2c88418c 870{
9678086d 871 const enum rtx_code code = GET_CODE (x);
6f7d635c 872 const char *fmt;
f8163c92 873 int i, j;
7b52eede
JH
874 rtx insn;
875
876 if (start == end)
877 return 0;
2c88418c
RS
878
879 switch (code)
880 {
881 case CONST_INT:
882 case CONST_DOUBLE:
091a3ac7 883 case CONST_FIXED:
69ef87e2 884 case CONST_VECTOR:
2c88418c
RS
885 case CONST:
886 case SYMBOL_REF:
887 case LABEL_REF:
888 return 0;
889
890 case PC:
891 case CC0:
892 return 1;
893
894 case MEM:
7b52eede 895 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 896 return 1;
550b7784
KK
897 if (MEM_READONLY_P (x))
898 return 0;
7b52eede
JH
899 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
900 if (memory_modified_in_insn_p (x, insn))
901 return 1;
902 return 0;
2c88418c
RS
903 break;
904
905 case REG:
906 return reg_set_between_p (x, start, end);
a6a2274a 907
e9a25f70
JL
908 default:
909 break;
2c88418c
RS
910 }
911
912 fmt = GET_RTX_FORMAT (code);
913 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
914 {
915 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
916 return 1;
917
d4757e6a 918 else if (fmt[i] == 'E')
f8163c92
RK
919 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
920 if (modified_between_p (XVECEXP (x, i, j), start, end))
921 return 1;
922 }
923
924 return 0;
925}
926
927/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
928 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 929 does use memory aliasing. */
f8163c92
RK
930
931int
9678086d 932modified_in_p (const_rtx x, const_rtx insn)
f8163c92 933{
9678086d 934 const enum rtx_code code = GET_CODE (x);
6f7d635c 935 const char *fmt;
f8163c92
RK
936 int i, j;
937
938 switch (code)
939 {
940 case CONST_INT:
941 case CONST_DOUBLE:
091a3ac7 942 case CONST_FIXED:
69ef87e2 943 case CONST_VECTOR:
f8163c92
RK
944 case CONST:
945 case SYMBOL_REF:
946 case LABEL_REF:
947 return 0;
948
949 case PC:
950 case CC0:
2c88418c
RS
951 return 1;
952
f8163c92 953 case MEM:
7b52eede 954 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 955 return 1;
550b7784
KK
956 if (MEM_READONLY_P (x))
957 return 0;
7b52eede
JH
958 if (memory_modified_in_insn_p (x, insn))
959 return 1;
960 return 0;
f8163c92
RK
961 break;
962
963 case REG:
964 return reg_set_p (x, insn);
e9a25f70
JL
965
966 default:
967 break;
f8163c92
RK
968 }
969
970 fmt = GET_RTX_FORMAT (code);
971 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
972 {
973 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
974 return 1;
975
d4757e6a 976 else if (fmt[i] == 'E')
f8163c92
RK
977 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
978 if (modified_in_p (XVECEXP (x, i, j), insn))
979 return 1;
980 }
981
2c88418c
RS
982 return 0;
983}
984\f
91b2d119
JH
985/* Helper function for set_of. */
986struct set_of_data
987 {
7bc980e1
KG
988 const_rtx found;
989 const_rtx pat;
91b2d119
JH
990 };
991
992static void
7bc980e1 993set_of_1 (rtx x, const_rtx pat, void *data1)
91b2d119 994{
7bc980e1
KG
995 struct set_of_data *const data = (struct set_of_data *) (data1);
996 if (rtx_equal_p (x, data->pat)
997 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
998 data->found = pat;
91b2d119
JH
999}
1000
1001/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 1002 (either directly or via STRICT_LOW_PART and similar modifiers). */
7bc980e1
KG
1003const_rtx
1004set_of (const_rtx pat, const_rtx insn)
91b2d119
JH
1005{
1006 struct set_of_data data;
1007 data.found = NULL_RTX;
1008 data.pat = pat;
1009 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1010 return data.found;
1011}
1012\f
2c88418c
RS
1013/* Given an INSN, return a SET expression if this insn has only a single SET.
1014 It may also have CLOBBERs, USEs, or SET whose output
1015 will not be used, which we ignore. */
1016
1017rtx
f7d504c2 1018single_set_2 (const_rtx insn, const_rtx pat)
2c88418c 1019{
c9b89a21
JH
1020 rtx set = NULL;
1021 int set_verified = 1;
2c88418c 1022 int i;
c9b89a21 1023
b1cdafbb 1024 if (GET_CODE (pat) == PARALLEL)
2c88418c 1025 {
c9b89a21 1026 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 1027 {
c9b89a21
JH
1028 rtx sub = XVECEXP (pat, 0, i);
1029 switch (GET_CODE (sub))
1030 {
1031 case USE:
1032 case CLOBBER:
1033 break;
1034
1035 case SET:
1036 /* We can consider insns having multiple sets, where all
1037 but one are dead as single set insns. In common case
1038 only single set is present in the pattern so we want
f63d1bf7 1039 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
1040
1041 When we reach set first time, we just expect this is
1042 the single set we are looking for and only when more
1043 sets are found in the insn, we check them. */
1044 if (!set_verified)
1045 {
1046 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1047 && !side_effects_p (set))
1048 set = NULL;
1049 else
1050 set_verified = 1;
1051 }
1052 if (!set)
1053 set = sub, set_verified = 0;
1054 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1055 || side_effects_p (sub))
1056 return NULL_RTX;
1057 break;
1058
1059 default:
1060 return NULL_RTX;
1061 }
787ccee0 1062 }
2c88418c 1063 }
c9b89a21 1064 return set;
2c88418c 1065}
941c63ac
JL
1066
1067/* Given an INSN, return nonzero if it has more than one SET, else return
1068 zero. */
1069
5f7d3786 1070int
f7d504c2 1071multiple_sets (const_rtx insn)
941c63ac 1072{
cae8acdd 1073 int found;
941c63ac 1074 int i;
a6a2274a 1075
941c63ac 1076 /* INSN must be an insn. */
2c3c49de 1077 if (! INSN_P (insn))
941c63ac
JL
1078 return 0;
1079
1080 /* Only a PARALLEL can have multiple SETs. */
1081 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1082 {
1083 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1084 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1085 {
1086 /* If we have already found a SET, then return now. */
1087 if (found)
1088 return 1;
1089 else
1090 found = 1;
1091 }
1092 }
a6a2274a 1093
941c63ac
JL
1094 /* Either zero or one SET. */
1095 return 0;
1096}
2c88418c 1097\f
7142e318
JW
1098/* Return nonzero if the destination of SET equals the source
1099 and there are no side effects. */
1100
1101int
f7d504c2 1102set_noop_p (const_rtx set)
7142e318
JW
1103{
1104 rtx src = SET_SRC (set);
1105 rtx dst = SET_DEST (set);
1106
371b8fc0
JH
1107 if (dst == pc_rtx && src == pc_rtx)
1108 return 1;
1109
3c0cb5de 1110 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1111 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1112
46d096a3 1113 if (GET_CODE (dst) == ZERO_EXTRACT)
7142e318 1114 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1115 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1116 && !side_effects_p (src);
7142e318
JW
1117
1118 if (GET_CODE (dst) == STRICT_LOW_PART)
1119 dst = XEXP (dst, 0);
1120
1121 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1122 {
1123 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1124 return 0;
1125 src = SUBREG_REG (src);
1126 dst = SUBREG_REG (dst);
1127 }
1128
f8cfc6aa 1129 return (REG_P (src) && REG_P (dst)
7142e318
JW
1130 && REGNO (src) == REGNO (dst));
1131}
0005550b
JH
1132\f
1133/* Return nonzero if an insn consists only of SETs, each of which only sets a
1134 value to itself. */
1135
1136int
fa233e34 1137noop_move_p (const_rtx insn)
0005550b
JH
1138{
1139 rtx pat = PATTERN (insn);
1140
b5832b43
JH
1141 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1142 return 1;
1143
0005550b
JH
1144 /* Insns carrying these notes are useful later on. */
1145 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1146 return 0;
1147
1148 if (GET_CODE (pat) == SET && set_noop_p (pat))
1149 return 1;
1150
1151 if (GET_CODE (pat) == PARALLEL)
1152 {
1153 int i;
1154 /* If nothing but SETs of registers to themselves,
1155 this insn can also be deleted. */
1156 for (i = 0; i < XVECLEN (pat, 0); i++)
1157 {
1158 rtx tem = XVECEXP (pat, 0, i);
1159
1160 if (GET_CODE (tem) == USE
1161 || GET_CODE (tem) == CLOBBER)
1162 continue;
1163
1164 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1165 return 0;
1166 }
1167
1168 return 1;
1169 }
1170 return 0;
1171}
1172\f
7142e318 1173
63be01fb
JW
1174/* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1175 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1176 If the object was modified, if we hit a partial assignment to X, or hit a
1177 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1178 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1179 be the src. */
2c88418c
RS
1180
1181rtx
0c20a65f 1182find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
2c88418c
RS
1183{
1184 rtx p;
1185
4b4bf941 1186 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
2c88418c 1187 p = PREV_INSN (p))
2c3c49de 1188 if (INSN_P (p))
2c88418c
RS
1189 {
1190 rtx set = single_set (p);
c166a311 1191 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
2c88418c
RS
1192
1193 if (set && rtx_equal_p (x, SET_DEST (set)))
1194 {
1195 rtx src = SET_SRC (set);
1196
1197 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1198 src = XEXP (note, 0);
1199
63be01fb
JW
1200 if ((valid_to == NULL_RTX
1201 || ! modified_between_p (src, PREV_INSN (p), valid_to))
2c88418c
RS
1202 /* Reject hard registers because we don't usually want
1203 to use them; we'd rather use a pseudo. */
f8cfc6aa 1204 && (! (REG_P (src)
89d3d442 1205 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
2c88418c
RS
1206 {
1207 *pinsn = p;
1208 return src;
1209 }
1210 }
a6a2274a 1211
2c88418c
RS
1212 /* If set in non-simple way, we don't have a value. */
1213 if (reg_set_p (x, p))
1214 break;
1215 }
1216
1217 return x;
a6a2274a 1218}
2c88418c
RS
1219\f
1220/* Return nonzero if register in range [REGNO, ENDREGNO)
1221 appears either explicitly or implicitly in X
1222 other than being stored into.
1223
1224 References contained within the substructure at LOC do not count.
1225 LOC may be zero, meaning don't ignore anything. */
1226
1227int
f7d504c2 1228refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
0c20a65f 1229 rtx *loc)
2c88418c 1230{
770ae6cc
RK
1231 int i;
1232 unsigned int x_regno;
1233 RTX_CODE code;
1234 const char *fmt;
2c88418c
RS
1235
1236 repeat:
1237 /* The contents of a REG_NONNEG note is always zero, so we must come here
1238 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1239 if (x == 0)
1240 return 0;
1241
1242 code = GET_CODE (x);
1243
1244 switch (code)
1245 {
1246 case REG:
770ae6cc 1247 x_regno = REGNO (x);
f8163c92
RK
1248
1249 /* If we modifying the stack, frame, or argument pointer, it will
1250 clobber a virtual register. In fact, we could be more precise,
1251 but it isn't worth it. */
770ae6cc 1252 if ((x_regno == STACK_POINTER_REGNUM
f8163c92 1253#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
770ae6cc 1254 || x_regno == ARG_POINTER_REGNUM
f8163c92 1255#endif
770ae6cc 1256 || x_regno == FRAME_POINTER_REGNUM)
f8163c92
RK
1257 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1258 return 1;
1259
09e18274 1260 return endregno > x_regno && regno < END_REGNO (x);
2c88418c
RS
1261
1262 case SUBREG:
1263 /* If this is a SUBREG of a hard reg, we can see exactly which
1264 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1265 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1266 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1267 {
ddef6bc7 1268 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1269 unsigned int inner_endregno
403c659c 1270 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
f1f4e530 1271 ? subreg_nregs (x) : 1);
2c88418c
RS
1272
1273 return endregno > inner_regno && regno < inner_endregno;
1274 }
1275 break;
1276
1277 case CLOBBER:
1278 case SET:
1279 if (&SET_DEST (x) != loc
1280 /* Note setting a SUBREG counts as referring to the REG it is in for
1281 a pseudo but not for hard registers since we can
1282 treat each word individually. */
1283 && ((GET_CODE (SET_DEST (x)) == SUBREG
1284 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1285 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1286 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1287 && refers_to_regno_p (regno, endregno,
1288 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1289 || (!REG_P (SET_DEST (x))
2c88418c
RS
1290 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1291 return 1;
1292
1293 if (code == CLOBBER || loc == &SET_SRC (x))
1294 return 0;
1295 x = SET_SRC (x);
1296 goto repeat;
e9a25f70
JL
1297
1298 default:
1299 break;
2c88418c
RS
1300 }
1301
1302 /* X does not match, so try its subexpressions. */
1303
1304 fmt = GET_RTX_FORMAT (code);
1305 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1306 {
1307 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1308 {
1309 if (i == 0)
1310 {
1311 x = XEXP (x, 0);
1312 goto repeat;
1313 }
1314 else
1315 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1316 return 1;
1317 }
1318 else if (fmt[i] == 'E')
1319 {
b3694847 1320 int j;
6a87d634 1321 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1322 if (loc != &XVECEXP (x, i, j)
1323 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1324 return 1;
1325 }
1326 }
1327 return 0;
1328}
1329
1330/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1331 we check if any register number in X conflicts with the relevant register
1332 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1333 contains a MEM (we don't bother checking for memory addresses that can't
1334 conflict because we expect this to be a rare case. */
1335
1336int
f7d504c2 1337reg_overlap_mentioned_p (const_rtx x, const_rtx in)
2c88418c 1338{
770ae6cc 1339 unsigned int regno, endregno;
2c88418c 1340
6f626d1b
PB
1341 /* If either argument is a constant, then modifying X can not
1342 affect IN. Here we look at IN, we can profitably combine
1343 CONSTANT_P (x) with the switch statement below. */
1344 if (CONSTANT_P (in))
b98b49ac 1345 return 0;
0c99ec5c 1346
6f626d1b 1347 recurse:
0c99ec5c 1348 switch (GET_CODE (x))
2c88418c 1349 {
6f626d1b
PB
1350 case STRICT_LOW_PART:
1351 case ZERO_EXTRACT:
1352 case SIGN_EXTRACT:
1353 /* Overly conservative. */
1354 x = XEXP (x, 0);
1355 goto recurse;
1356
0c99ec5c 1357 case SUBREG:
2c88418c
RS
1358 regno = REGNO (SUBREG_REG (x));
1359 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1360 regno = subreg_regno (x);
f1f4e530
JM
1361 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1362 ? subreg_nregs (x) : 1);
0c99ec5c 1363 goto do_reg;
2c88418c 1364
0c99ec5c
RH
1365 case REG:
1366 regno = REGNO (x);
09e18274 1367 endregno = END_REGNO (x);
f1f4e530 1368 do_reg:
8e2e89f7 1369 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1370
0c99ec5c
RH
1371 case MEM:
1372 {
1373 const char *fmt;
1374 int i;
2c88418c 1375
3c0cb5de 1376 if (MEM_P (in))
2c88418c
RS
1377 return 1;
1378
0c99ec5c
RH
1379 fmt = GET_RTX_FORMAT (GET_CODE (in));
1380 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
3b009185
RH
1381 if (fmt[i] == 'e')
1382 {
1383 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1384 return 1;
1385 }
1386 else if (fmt[i] == 'E')
1387 {
1388 int j;
1389 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1390 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1391 return 1;
1392 }
c0222c21 1393
0c99ec5c
RH
1394 return 0;
1395 }
1396
1397 case SCRATCH:
1398 case PC:
1399 case CC0:
1400 return reg_mentioned_p (x, in);
1401
1402 case PARALLEL:
37ceff9d 1403 {
90d036a0 1404 int i;
37ceff9d
RH
1405
1406 /* If any register in here refers to it we return true. */
7193d1dc
RK
1407 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1408 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1409 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1410 return 1;
7193d1dc 1411 return 0;
37ceff9d 1412 }
2c88418c 1413
0c99ec5c 1414 default:
41374e13 1415 gcc_assert (CONSTANT_P (x));
6f626d1b
PB
1416 return 0;
1417 }
2c88418c
RS
1418}
1419\f
2c88418c 1420/* Call FUN on each register or MEM that is stored into or clobbered by X.
c3a1ef9d
MM
1421 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1422 ignored by note_stores, but passed to FUN.
1423
1424 FUN receives three arguments:
1425 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1426 2. the SET or CLOBBER rtx that does the store,
1427 3. the pointer DATA provided to note_stores.
2c88418c
RS
1428
1429 If the item being stored in or clobbered is a SUBREG of a hard register,
1430 the SUBREG will be passed. */
a6a2274a 1431
2c88418c 1432void
7bc980e1 1433note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
2c88418c 1434{
aa317c97 1435 int i;
90d036a0 1436
aa317c97
KG
1437 if (GET_CODE (x) == COND_EXEC)
1438 x = COND_EXEC_CODE (x);
90d036a0 1439
aa317c97
KG
1440 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1441 {
1442 rtx dest = SET_DEST (x);
1443
1444 while ((GET_CODE (dest) == SUBREG
1445 && (!REG_P (SUBREG_REG (dest))
1446 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1447 || GET_CODE (dest) == ZERO_EXTRACT
1448 || GET_CODE (dest) == STRICT_LOW_PART)
1449 dest = XEXP (dest, 0);
1450
1451 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1452 each of whose first operand is a register. */
1453 if (GET_CODE (dest) == PARALLEL)
1454 {
1455 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1456 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1457 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1458 }
1459 else
1460 (*fun) (dest, x, data);
1461 }
770ae6cc 1462
aa317c97
KG
1463 else if (GET_CODE (x) == PARALLEL)
1464 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1465 note_stores (XVECEXP (x, 0, i), fun, data);
1466}
2c88418c 1467\f
e2373f95
RK
1468/* Like notes_stores, but call FUN for each expression that is being
1469 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1470 FUN for each expression, not any interior subexpressions. FUN receives a
1471 pointer to the expression and the DATA passed to this function.
1472
1473 Note that this is not quite the same test as that done in reg_referenced_p
1474 since that considers something as being referenced if it is being
1475 partially set, while we do not. */
1476
1477void
0c20a65f 1478note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1479{
1480 rtx body = *pbody;
1481 int i;
1482
1483 switch (GET_CODE (body))
1484 {
1485 case COND_EXEC:
1486 (*fun) (&COND_EXEC_TEST (body), data);
1487 note_uses (&COND_EXEC_CODE (body), fun, data);
1488 return;
1489
1490 case PARALLEL:
1491 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1492 note_uses (&XVECEXP (body, 0, i), fun, data);
1493 return;
1494
bbbc206e
BS
1495 case SEQUENCE:
1496 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1497 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1498 return;
1499
e2373f95
RK
1500 case USE:
1501 (*fun) (&XEXP (body, 0), data);
1502 return;
1503
1504 case ASM_OPERANDS:
1505 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1506 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1507 return;
1508
1509 case TRAP_IF:
1510 (*fun) (&TRAP_CONDITION (body), data);
1511 return;
1512
21b8482a
JJ
1513 case PREFETCH:
1514 (*fun) (&XEXP (body, 0), data);
1515 return;
1516
e2373f95
RK
1517 case UNSPEC:
1518 case UNSPEC_VOLATILE:
1519 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1520 (*fun) (&XVECEXP (body, 0, i), data);
1521 return;
1522
1523 case CLOBBER:
3c0cb5de 1524 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1525 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1526 return;
1527
1528 case SET:
1529 {
1530 rtx dest = SET_DEST (body);
1531
1532 /* For sets we replace everything in source plus registers in memory
1533 expression in store and operands of a ZERO_EXTRACT. */
1534 (*fun) (&SET_SRC (body), data);
1535
1536 if (GET_CODE (dest) == ZERO_EXTRACT)
1537 {
1538 (*fun) (&XEXP (dest, 1), data);
1539 (*fun) (&XEXP (dest, 2), data);
1540 }
1541
1542 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1543 dest = XEXP (dest, 0);
1544
3c0cb5de 1545 if (MEM_P (dest))
e2373f95
RK
1546 (*fun) (&XEXP (dest, 0), data);
1547 }
1548 return;
1549
1550 default:
1551 /* All the other possibilities never store. */
1552 (*fun) (pbody, data);
1553 return;
1554 }
1555}
1556\f
2c88418c
RS
1557/* Return nonzero if X's old contents don't survive after INSN.
1558 This will be true if X is (cc0) or if X is a register and
1559 X dies in INSN or because INSN entirely sets X.
1560
46d096a3
SB
1561 "Entirely set" means set directly and not through a SUBREG, or
1562 ZERO_EXTRACT, so no trace of the old contents remains.
2c88418c
RS
1563 Likewise, REG_INC does not count.
1564
1565 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1566 but for this use that makes no difference, since regs don't overlap
1567 during their lifetimes. Therefore, this function may be used
6fb5fa3c 1568 at any time after deaths have been computed.
2c88418c
RS
1569
1570 If REG is a hard reg that occupies multiple machine registers, this
1571 function will only return 1 if each of those registers will be replaced
1572 by INSN. */
1573
1574int
f7d504c2 1575dead_or_set_p (const_rtx insn, const_rtx x)
2c88418c 1576{
09e18274 1577 unsigned int regno, end_regno;
770ae6cc 1578 unsigned int i;
2c88418c
RS
1579
1580 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1581 if (GET_CODE (x) == CC0)
1582 return 1;
1583
41374e13 1584 gcc_assert (REG_P (x));
2c88418c
RS
1585
1586 regno = REGNO (x);
09e18274
RS
1587 end_regno = END_REGNO (x);
1588 for (i = regno; i < end_regno; i++)
2c88418c
RS
1589 if (! dead_or_set_regno_p (insn, i))
1590 return 0;
1591
1592 return 1;
1593}
1594
194acded
HPN
1595/* Return TRUE iff DEST is a register or subreg of a register and
1596 doesn't change the number of words of the inner register, and any
1597 part of the register is TEST_REGNO. */
1598
1599static bool
f7d504c2 1600covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
1601{
1602 unsigned int regno, endregno;
1603
1604 if (GET_CODE (dest) == SUBREG
1605 && (((GET_MODE_SIZE (GET_MODE (dest))
1606 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1607 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1608 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1609 dest = SUBREG_REG (dest);
1610
1611 if (!REG_P (dest))
1612 return false;
1613
1614 regno = REGNO (dest);
09e18274 1615 endregno = END_REGNO (dest);
194acded
HPN
1616 return (test_regno >= regno && test_regno < endregno);
1617}
1618
1619/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1620 any member matches the covers_regno_no_parallel_p criteria. */
1621
1622static bool
f7d504c2 1623covers_regno_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
1624{
1625 if (GET_CODE (dest) == PARALLEL)
1626 {
1627 /* Some targets place small structures in registers for return
1628 values of functions, and those registers are wrapped in
1629 PARALLELs that we may see as the destination of a SET. */
1630 int i;
1631
1632 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1633 {
1634 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1635 if (inner != NULL_RTX
1636 && covers_regno_no_parallel_p (inner, test_regno))
1637 return true;
1638 }
1639
1640 return false;
1641 }
1642 else
1643 return covers_regno_no_parallel_p (dest, test_regno);
1644}
1645
6fb5fa3c 1646/* Utility function for dead_or_set_p to check an individual register. */
2c88418c
RS
1647
1648int
f7d504c2 1649dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
2c88418c 1650{
f7d504c2 1651 const_rtx pattern;
2c88418c 1652
0a2287bf
RH
1653 /* See if there is a death note for something that includes TEST_REGNO. */
1654 if (find_regno_note (insn, REG_DEAD, test_regno))
1655 return 1;
2c88418c 1656
4b4bf941 1657 if (CALL_P (insn)
8f3e7a26
RK
1658 && find_regno_fusage (insn, CLOBBER, test_regno))
1659 return 1;
1660
0c99ec5c
RH
1661 pattern = PATTERN (insn);
1662
1663 if (GET_CODE (pattern) == COND_EXEC)
1664 pattern = COND_EXEC_CODE (pattern);
1665
1666 if (GET_CODE (pattern) == SET)
194acded 1667 return covers_regno_p (SET_DEST (pattern), test_regno);
0c99ec5c 1668 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 1669 {
b3694847 1670 int i;
2c88418c 1671
0c99ec5c 1672 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 1673 {
0c99ec5c
RH
1674 rtx body = XVECEXP (pattern, 0, i);
1675
1676 if (GET_CODE (body) == COND_EXEC)
1677 body = COND_EXEC_CODE (body);
2c88418c 1678
194acded
HPN
1679 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1680 && covers_regno_p (SET_DEST (body), test_regno))
1681 return 1;
2c88418c
RS
1682 }
1683 }
1684
1685 return 0;
1686}
1687
1688/* Return the reg-note of kind KIND in insn INSN, if there is one.
1689 If DATUM is nonzero, look for one whose datum is DATUM. */
1690
1691rtx
f7d504c2 1692find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
2c88418c 1693{
b3694847 1694 rtx link;
2c88418c 1695
af082de3
BE
1696 gcc_assert (insn);
1697
ae78d276 1698 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1699 if (! INSN_P (insn))
ae78d276 1700 return 0;
cd798543
AP
1701 if (datum == 0)
1702 {
1703 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1704 if (REG_NOTE_KIND (link) == kind)
1705 return link;
1706 return 0;
1707 }
ae78d276 1708
2c88418c 1709 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 1710 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
1711 return link;
1712 return 0;
1713}
1714
1715/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
1716 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1717 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1718 it might be the case that the note overlaps REGNO. */
2c88418c
RS
1719
1720rtx
f7d504c2 1721find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 1722{
b3694847 1723 rtx link;
2c88418c 1724
ae78d276 1725 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1726 if (! INSN_P (insn))
ae78d276
MM
1727 return 0;
1728
2c88418c
RS
1729 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1730 if (REG_NOTE_KIND (link) == kind
1731 /* Verify that it is a register, so that scratch and MEM won't cause a
1732 problem here. */
f8cfc6aa 1733 && REG_P (XEXP (link, 0))
99309f3b 1734 && REGNO (XEXP (link, 0)) <= regno
09e18274 1735 && END_REGNO (XEXP (link, 0)) > regno)
2c88418c
RS
1736 return link;
1737 return 0;
1738}
8f3e7a26 1739
d9c695ff
RK
1740/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1741 has such a note. */
1742
1743rtx
f7d504c2 1744find_reg_equal_equiv_note (const_rtx insn)
d9c695ff 1745{
cd648cec 1746 rtx link;
d9c695ff 1747
cd648cec 1748 if (!INSN_P (insn))
d9c695ff 1749 return 0;
ea8f106d 1750
cd648cec
JH
1751 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1752 if (REG_NOTE_KIND (link) == REG_EQUAL
1753 || REG_NOTE_KIND (link) == REG_EQUIV)
1754 {
ea8f106d
SB
1755 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1756 insns that have multiple sets. Checking single_set to
1757 make sure of this is not the proper check, as explained
1758 in the comment in set_unique_reg_note.
1759
1760 This should be changed into an assert. */
1761 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
cd648cec
JH
1762 return 0;
1763 return link;
1764 }
1765 return NULL;
d9c695ff
RK
1766}
1767
2a450639
RS
1768/* Check whether INSN is a single_set whose source is known to be
1769 equivalent to a constant. Return that constant if so, otherwise
1770 return null. */
1771
1772rtx
f7d504c2 1773find_constant_src (const_rtx insn)
2a450639
RS
1774{
1775 rtx note, set, x;
1776
1777 set = single_set (insn);
1778 if (set)
1779 {
1780 x = avoid_constant_pool_reference (SET_SRC (set));
1781 if (CONSTANT_P (x))
1782 return x;
1783 }
1784
1785 note = find_reg_equal_equiv_note (insn);
1786 if (note && CONSTANT_P (XEXP (note, 0)))
1787 return XEXP (note, 0);
1788
1789 return NULL_RTX;
1790}
1791
8f3e7a26
RK
1792/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1793 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1794
1795int
f7d504c2 1796find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
8f3e7a26
RK
1797{
1798 /* If it's not a CALL_INSN, it can't possibly have a
1799 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 1800 if (!CALL_P (insn))
8f3e7a26
RK
1801 return 0;
1802
41374e13 1803 gcc_assert (datum);
8f3e7a26 1804
f8cfc6aa 1805 if (!REG_P (datum))
8f3e7a26 1806 {
b3694847 1807 rtx link;
8f3e7a26
RK
1808
1809 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 1810 link;
8f3e7a26 1811 link = XEXP (link, 1))
a6a2274a 1812 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 1813 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 1814 return 1;
8f3e7a26
RK
1815 }
1816 else
1817 {
770ae6cc 1818 unsigned int regno = REGNO (datum);
8f3e7a26
RK
1819
1820 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1821 to pseudo registers, so don't bother checking. */
1822
1823 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 1824 {
09e18274 1825 unsigned int end_regno = END_HARD_REGNO (datum);
770ae6cc 1826 unsigned int i;
8f3e7a26
RK
1827
1828 for (i = regno; i < end_regno; i++)
1829 if (find_regno_fusage (insn, code, i))
1830 return 1;
a6a2274a 1831 }
8f3e7a26
RK
1832 }
1833
1834 return 0;
1835}
1836
1837/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1838 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1839
1840int
f7d504c2 1841find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 1842{
b3694847 1843 rtx link;
8f3e7a26
RK
1844
1845 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1846 to pseudo registers, so don't bother checking. */
1847
1848 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 1849 || !CALL_P (insn) )
8f3e7a26
RK
1850 return 0;
1851
1852 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 1853 {
770ae6cc 1854 rtx op, reg;
83ab3839
RH
1855
1856 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 1857 && REG_P (reg = XEXP (op, 0))
09e18274
RS
1858 && REGNO (reg) <= regno
1859 && END_HARD_REGNO (reg) > regno)
83ab3839
RH
1860 return 1;
1861 }
8f3e7a26
RK
1862
1863 return 0;
1864}
a6a063b8 1865
2c88418c 1866\f
efc0b2bd
ILT
1867/* Allocate a register note with kind KIND and datum DATUM. LIST is
1868 stored as the pointer to the next register note. */
65c5f2a6 1869
efc0b2bd
ILT
1870rtx
1871alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
65c5f2a6
ILT
1872{
1873 rtx note;
1874
1875 switch (kind)
1876 {
1877 case REG_CC_SETTER:
1878 case REG_CC_USER:
1879 case REG_LABEL_TARGET:
1880 case REG_LABEL_OPERAND:
1881 /* These types of register notes use an INSN_LIST rather than an
1882 EXPR_LIST, so that copying is done right and dumps look
1883 better. */
efc0b2bd 1884 note = alloc_INSN_LIST (datum, list);
65c5f2a6
ILT
1885 PUT_REG_NOTE_KIND (note, kind);
1886 break;
1887
1888 default:
efc0b2bd 1889 note = alloc_EXPR_LIST (kind, datum, list);
65c5f2a6
ILT
1890 break;
1891 }
1892
efc0b2bd
ILT
1893 return note;
1894}
1895
1896/* Add register note with kind KIND and datum DATUM to INSN. */
1897
1898void
1899add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1900{
1901 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
65c5f2a6
ILT
1902}
1903
2c88418c
RS
1904/* Remove register note NOTE from the REG_NOTES of INSN. */
1905
1906void
f7d504c2 1907remove_note (rtx insn, const_rtx note)
2c88418c 1908{
b3694847 1909 rtx link;
2c88418c 1910
49c3bb12
RH
1911 if (note == NULL_RTX)
1912 return;
1913
2c88418c 1914 if (REG_NOTES (insn) == note)
6fb5fa3c
DB
1915 REG_NOTES (insn) = XEXP (note, 1);
1916 else
1917 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1918 if (XEXP (link, 1) == note)
1919 {
1920 XEXP (link, 1) = XEXP (note, 1);
1921 break;
1922 }
1923
1924 switch (REG_NOTE_KIND (note))
2c88418c 1925 {
6fb5fa3c
DB
1926 case REG_EQUAL:
1927 case REG_EQUIV:
1928 df_notes_rescan (insn);
1929 break;
1930 default:
1931 break;
2c88418c 1932 }
2c88418c 1933}
55a98783 1934
7cd689bc
SB
1935/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1936
1937void
1938remove_reg_equal_equiv_notes (rtx insn)
1939{
1940 rtx *loc;
1941
1942 loc = &REG_NOTES (insn);
1943 while (*loc)
1944 {
1945 enum reg_note kind = REG_NOTE_KIND (*loc);
1946 if (kind == REG_EQUAL || kind == REG_EQUIV)
1947 *loc = XEXP (*loc, 1);
1948 else
1949 loc = &XEXP (*loc, 1);
1950 }
1951}
1952
5f0d2358
RK
1953/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1954 return 1 if it is found. A simple equality test is used to determine if
1955 NODE matches. */
1956
1957int
f7d504c2 1958in_expr_list_p (const_rtx listp, const_rtx node)
5f0d2358 1959{
f7d504c2 1960 const_rtx x;
5f0d2358
RK
1961
1962 for (x = listp; x; x = XEXP (x, 1))
1963 if (node == XEXP (x, 0))
1964 return 1;
1965
1966 return 0;
1967}
1968
dd248abd
RK
1969/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1970 remove that entry from the list if it is found.
55a98783 1971
dd248abd 1972 A simple equality test is used to determine if NODE matches. */
55a98783
JL
1973
1974void
f7d504c2 1975remove_node_from_expr_list (const_rtx node, rtx *listp)
55a98783
JL
1976{
1977 rtx temp = *listp;
1978 rtx prev = NULL_RTX;
1979
1980 while (temp)
1981 {
1982 if (node == XEXP (temp, 0))
1983 {
1984 /* Splice the node out of the list. */
1985 if (prev)
1986 XEXP (prev, 1) = XEXP (temp, 1);
1987 else
1988 *listp = XEXP (temp, 1);
1989
1990 return;
1991 }
dd248abd
RK
1992
1993 prev = temp;
55a98783
JL
1994 temp = XEXP (temp, 1);
1995 }
1996}
2c88418c 1997\f
2b067faf
RS
1998/* Nonzero if X contains any volatile instructions. These are instructions
1999 which may cause unpredictable machine state instructions, and thus no
2000 instructions should be moved or combined across them. This includes
2001 only volatile asms and UNSPEC_VOLATILE instructions. */
2002
2003int
f7d504c2 2004volatile_insn_p (const_rtx x)
2b067faf 2005{
f7d504c2 2006 const RTX_CODE code = GET_CODE (x);
2b067faf
RS
2007 switch (code)
2008 {
2009 case LABEL_REF:
2010 case SYMBOL_REF:
2011 case CONST_INT:
2012 case CONST:
2013 case CONST_DOUBLE:
091a3ac7 2014 case CONST_FIXED:
69ef87e2 2015 case CONST_VECTOR:
2b067faf
RS
2016 case CC0:
2017 case PC:
2018 case REG:
2019 case SCRATCH:
2020 case CLOBBER:
2b067faf
RS
2021 case ADDR_VEC:
2022 case ADDR_DIFF_VEC:
2023 case CALL:
2024 case MEM:
2025 return 0;
2026
2027 case UNSPEC_VOLATILE:
2028 /* case TRAP_IF: This isn't clear yet. */
2029 return 1;
2030
4c46ea23 2031 case ASM_INPUT:
2b067faf
RS
2032 case ASM_OPERANDS:
2033 if (MEM_VOLATILE_P (x))
2034 return 1;
e9a25f70
JL
2035
2036 default:
2037 break;
2b067faf
RS
2038 }
2039
2040 /* Recursively scan the operands of this expression. */
2041
2042 {
f7d504c2 2043 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2044 int i;
a6a2274a 2045
2b067faf
RS
2046 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2047 {
2048 if (fmt[i] == 'e')
2049 {
31001f72 2050 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
2051 return 1;
2052 }
d4757e6a 2053 else if (fmt[i] == 'E')
2b067faf 2054 {
b3694847 2055 int j;
2b067faf 2056 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 2057 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
2058 return 1;
2059 }
2060 }
2061 }
2062 return 0;
2063}
2064
2c88418c 2065/* Nonzero if X contains any volatile memory references
2ac4fed0 2066 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
2067
2068int
f7d504c2 2069volatile_refs_p (const_rtx x)
2c88418c 2070{
f7d504c2 2071 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2072 switch (code)
2073 {
2074 case LABEL_REF:
2075 case SYMBOL_REF:
2076 case CONST_INT:
2077 case CONST:
2078 case CONST_DOUBLE:
091a3ac7 2079 case CONST_FIXED:
69ef87e2 2080 case CONST_VECTOR:
2c88418c
RS
2081 case CC0:
2082 case PC:
2083 case REG:
2084 case SCRATCH:
2085 case CLOBBER:
2c88418c
RS
2086 case ADDR_VEC:
2087 case ADDR_DIFF_VEC:
2088 return 0;
2089
2ac4fed0 2090 case UNSPEC_VOLATILE:
2c88418c
RS
2091 return 1;
2092
2093 case MEM:
4c46ea23 2094 case ASM_INPUT:
2c88418c
RS
2095 case ASM_OPERANDS:
2096 if (MEM_VOLATILE_P (x))
2097 return 1;
e9a25f70
JL
2098
2099 default:
2100 break;
2c88418c
RS
2101 }
2102
2103 /* Recursively scan the operands of this expression. */
2104
2105 {
f7d504c2 2106 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2107 int i;
a6a2274a 2108
2c88418c
RS
2109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2110 {
2111 if (fmt[i] == 'e')
2112 {
2113 if (volatile_refs_p (XEXP (x, i)))
2114 return 1;
2115 }
d4757e6a 2116 else if (fmt[i] == 'E')
2c88418c 2117 {
b3694847 2118 int j;
2c88418c
RS
2119 for (j = 0; j < XVECLEN (x, i); j++)
2120 if (volatile_refs_p (XVECEXP (x, i, j)))
2121 return 1;
2122 }
2123 }
2124 }
2125 return 0;
2126}
2127
2128/* Similar to above, except that it also rejects register pre- and post-
2129 incrementing. */
2130
2131int
f7d504c2 2132side_effects_p (const_rtx x)
2c88418c 2133{
f7d504c2 2134 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2135 switch (code)
2136 {
2137 case LABEL_REF:
2138 case SYMBOL_REF:
2139 case CONST_INT:
2140 case CONST:
2141 case CONST_DOUBLE:
091a3ac7 2142 case CONST_FIXED:
69ef87e2 2143 case CONST_VECTOR:
2c88418c
RS
2144 case CC0:
2145 case PC:
2146 case REG:
2147 case SCRATCH:
2c88418c
RS
2148 case ADDR_VEC:
2149 case ADDR_DIFF_VEC:
b5b8b0ac 2150 case VAR_LOCATION:
2c88418c
RS
2151 return 0;
2152
2153 case CLOBBER:
2154 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2155 when some combination can't be done. If we see one, don't think
2156 that we can simplify the expression. */
2157 return (GET_MODE (x) != VOIDmode);
2158
2159 case PRE_INC:
2160 case PRE_DEC:
2161 case POST_INC:
2162 case POST_DEC:
1fb9c5cd
MH
2163 case PRE_MODIFY:
2164 case POST_MODIFY:
2c88418c 2165 case CALL:
2ac4fed0 2166 case UNSPEC_VOLATILE:
2c88418c
RS
2167 /* case TRAP_IF: This isn't clear yet. */
2168 return 1;
2169
2170 case MEM:
4c46ea23 2171 case ASM_INPUT:
2c88418c
RS
2172 case ASM_OPERANDS:
2173 if (MEM_VOLATILE_P (x))
2174 return 1;
e9a25f70
JL
2175
2176 default:
2177 break;
2c88418c
RS
2178 }
2179
2180 /* Recursively scan the operands of this expression. */
2181
2182 {
b3694847
SS
2183 const char *fmt = GET_RTX_FORMAT (code);
2184 int i;
a6a2274a 2185
2c88418c
RS
2186 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2187 {
2188 if (fmt[i] == 'e')
2189 {
2190 if (side_effects_p (XEXP (x, i)))
2191 return 1;
2192 }
d4757e6a 2193 else if (fmt[i] == 'E')
2c88418c 2194 {
b3694847 2195 int j;
2c88418c
RS
2196 for (j = 0; j < XVECLEN (x, i); j++)
2197 if (side_effects_p (XVECEXP (x, i, j)))
2198 return 1;
2199 }
2200 }
2201 }
2202 return 0;
2203}
2204\f
e755fcf5 2205/* Return nonzero if evaluating rtx X might cause a trap.
48e8382e
PB
2206 FLAGS controls how to consider MEMs. A nonzero means the context
2207 of the access may have changed from the original, such that the
2208 address may have become invalid. */
2c88418c 2209
215b063c 2210int
f7d504c2 2211may_trap_p_1 (const_rtx x, unsigned flags)
2c88418c
RS
2212{
2213 int i;
2214 enum rtx_code code;
6f7d635c 2215 const char *fmt;
48e8382e
PB
2216
2217 /* We make no distinction currently, but this function is part of
2218 the internal target-hooks ABI so we keep the parameter as
2219 "unsigned flags". */
2220 bool code_changed = flags != 0;
2c88418c
RS
2221
2222 if (x == 0)
2223 return 0;
2224 code = GET_CODE (x);
2225 switch (code)
2226 {
2227 /* Handle these cases quickly. */
2228 case CONST_INT:
2229 case CONST_DOUBLE:
091a3ac7 2230 case CONST_FIXED:
69ef87e2 2231 case CONST_VECTOR:
2c88418c
RS
2232 case SYMBOL_REF:
2233 case LABEL_REF:
2234 case CONST:
2235 case PC:
2236 case CC0:
2237 case REG:
2238 case SCRATCH:
2239 return 0;
2240
215b063c 2241 case UNSPEC:
2ac4fed0 2242 case UNSPEC_VOLATILE:
215b063c
PB
2243 return targetm.unspec_may_trap_p (x, flags);
2244
2245 case ASM_INPUT:
2c88418c
RS
2246 case TRAP_IF:
2247 return 1;
2248
22aa60a1
RH
2249 case ASM_OPERANDS:
2250 return MEM_VOLATILE_P (x);
2251
2c88418c
RS
2252 /* Memory ref can trap unless it's a static var or a stack slot. */
2253 case MEM:
d809253a
EB
2254 /* Recognize specific pattern of stack checking probes. */
2255 if (flag_stack_check
2256 && MEM_VOLATILE_P (x)
2257 && XEXP (x, 0) == stack_pointer_rtx)
2258 return 1;
e755fcf5 2259 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
48e8382e
PB
2260 reference; moving it out of context such as when moving code
2261 when optimizing, might cause its address to become invalid. */
2262 code_changed
2263 || !MEM_NOTRAP_P (x))
2264 {
2265 HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0;
2266 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2267 GET_MODE (x), code_changed);
2268 }
2269
2270 return 0;
2c88418c
RS
2271
2272 /* Division by a non-constant might trap. */
2273 case DIV:
2274 case MOD:
2275 case UDIV:
2276 case UMOD:
52bfebf0
RS
2277 if (HONOR_SNANS (GET_MODE (x)))
2278 return 1;
3d8bf70f 2279 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
f9013075
DE
2280 return flag_trapping_math;
2281 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2c88418c 2282 return 1;
e9a25f70
JL
2283 break;
2284
b278301b
RK
2285 case EXPR_LIST:
2286 /* An EXPR_LIST is used to represent a function call. This
2287 certainly may trap. */
2288 return 1;
e9a25f70 2289
734508ea
JW
2290 case GE:
2291 case GT:
2292 case LE:
2293 case LT:
19aec195 2294 case LTGT:
55143861 2295 case COMPARE:
734508ea 2296 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2297 if (!flag_trapping_math)
2298 break;
734508ea
JW
2299 /* ??? There is no machine independent way to check for tests that trap
2300 when COMPARE is used, though many targets do make this distinction.
2301 For instance, sparc uses CCFPE for compares which generate exceptions
2302 and CCFP for compares which do not generate exceptions. */
52bfebf0 2303 if (HONOR_NANS (GET_MODE (x)))
55143861
JJ
2304 return 1;
2305 /* But often the compare has some CC mode, so check operand
2306 modes as well. */
52bfebf0
RS
2307 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2308 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2309 return 1;
2310 break;
2311
2312 case EQ:
2313 case NE:
2314 if (HONOR_SNANS (GET_MODE (x)))
2315 return 1;
2316 /* Often comparison is CC mode, so check operand modes. */
2317 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2318 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
55143861
JJ
2319 return 1;
2320 break;
2321
22fd5743
FH
2322 case FIX:
2323 /* Conversion of floating point might trap. */
2324 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2325 return 1;
2326 break;
2327
05cc23e8
RH
2328 case NEG:
2329 case ABS:
e3947b34 2330 case SUBREG:
05cc23e8
RH
2331 /* These operations don't trap even with floating point. */
2332 break;
2333
2c88418c
RS
2334 default:
2335 /* Any floating arithmetic may trap. */
3d8bf70f 2336 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
f5eb5fd0 2337 && flag_trapping_math)
2c88418c
RS
2338 return 1;
2339 }
2340
2341 fmt = GET_RTX_FORMAT (code);
2342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2343 {
2344 if (fmt[i] == 'e')
2345 {
e755fcf5 2346 if (may_trap_p_1 (XEXP (x, i), flags))
2c88418c
RS
2347 return 1;
2348 }
2349 else if (fmt[i] == 'E')
2350 {
b3694847 2351 int j;
2c88418c 2352 for (j = 0; j < XVECLEN (x, i); j++)
e755fcf5 2353 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2c88418c
RS
2354 return 1;
2355 }
2356 }
2357 return 0;
2358}
2358ff91
EB
2359
2360/* Return nonzero if evaluating rtx X might cause a trap. */
2361
2362int
f7d504c2 2363may_trap_p (const_rtx x)
2358ff91 2364{
e755fcf5
ZD
2365 return may_trap_p_1 (x, 0);
2366}
2367
c0220ea4 2368/* Same as above, but additionally return nonzero if evaluating rtx X might
2358ff91
EB
2369 cause a fault. We define a fault for the purpose of this function as a
2370 erroneous execution condition that cannot be encountered during the normal
2371 execution of a valid program; the typical example is an unaligned memory
2372 access on a strict alignment machine. The compiler guarantees that it
2373 doesn't generate code that will fault from a valid program, but this
2374 guarantee doesn't mean anything for individual instructions. Consider
2375 the following example:
2376
2377 struct S { int d; union { char *cp; int *ip; }; };
2378
2379 int foo(struct S *s)
2380 {
2381 if (s->d == 1)
2382 return *s->ip;
2383 else
2384 return *s->cp;
2385 }
2386
2387 on a strict alignment machine. In a valid program, foo will never be
2388 invoked on a structure for which d is equal to 1 and the underlying
2389 unique field of the union not aligned on a 4-byte boundary, but the
2390 expression *s->ip might cause a fault if considered individually.
2391
2392 At the RTL level, potentially problematic expressions will almost always
2393 verify may_trap_p; for example, the above dereference can be emitted as
2394 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2395 However, suppose that foo is inlined in a caller that causes s->cp to
2396 point to a local character variable and guarantees that s->d is not set
2397 to 1; foo may have been effectively translated into pseudo-RTL as:
2398
2399 if ((reg:SI) == 1)
2400 (set (reg:SI) (mem:SI (%fp - 7)))
2401 else
2402 (set (reg:QI) (mem:QI (%fp - 7)))
2403
2404 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2405 memory reference to a stack slot, but it will certainly cause a fault
2406 on a strict alignment machine. */
2407
2408int
f7d504c2 2409may_trap_or_fault_p (const_rtx x)
2358ff91 2410{
48e8382e 2411 return may_trap_p_1 (x, 1);
2358ff91 2412}
2c88418c
RS
2413\f
2414/* Return nonzero if X contains a comparison that is not either EQ or NE,
2415 i.e., an inequality. */
2416
2417int
f7d504c2 2418inequality_comparisons_p (const_rtx x)
2c88418c 2419{
b3694847
SS
2420 const char *fmt;
2421 int len, i;
f7d504c2 2422 const enum rtx_code code = GET_CODE (x);
2c88418c
RS
2423
2424 switch (code)
2425 {
2426 case REG:
2427 case SCRATCH:
2428 case PC:
2429 case CC0:
2430 case CONST_INT:
2431 case CONST_DOUBLE:
091a3ac7 2432 case CONST_FIXED:
69ef87e2 2433 case CONST_VECTOR:
2c88418c
RS
2434 case CONST:
2435 case LABEL_REF:
2436 case SYMBOL_REF:
2437 return 0;
2438
2439 case LT:
2440 case LTU:
2441 case GT:
2442 case GTU:
2443 case LE:
2444 case LEU:
2445 case GE:
2446 case GEU:
2447 return 1;
a6a2274a 2448
e9a25f70
JL
2449 default:
2450 break;
2c88418c
RS
2451 }
2452
2453 len = GET_RTX_LENGTH (code);
2454 fmt = GET_RTX_FORMAT (code);
2455
2456 for (i = 0; i < len; i++)
2457 {
2458 if (fmt[i] == 'e')
2459 {
2460 if (inequality_comparisons_p (XEXP (x, i)))
2461 return 1;
2462 }
2463 else if (fmt[i] == 'E')
2464 {
b3694847 2465 int j;
2c88418c
RS
2466 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2467 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2468 return 1;
2469 }
2470 }
a6a2274a 2471
2c88418c
RS
2472 return 0;
2473}
2474\f
1ed0205e
VM
2475/* Replace any occurrence of FROM in X with TO. The function does
2476 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2477
2478 Note that copying is not done so X must not be shared unless all copies
2479 are to be modified. */
2480
2481rtx
0c20a65f 2482replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2483{
b3694847
SS
2484 int i, j;
2485 const char *fmt;
2c88418c 2486
1ed0205e 2487 /* The following prevents loops occurrence when we change MEM in
dc297297 2488 CONST_DOUBLE onto the same CONST_DOUBLE. */
1ed0205e
VM
2489 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2490 return x;
2491
2c88418c
RS
2492 if (x == from)
2493 return to;
2494
2495 /* Allow this function to make replacements in EXPR_LISTs. */
2496 if (x == 0)
2497 return 0;
2498
9dd791c8
AO
2499 if (GET_CODE (x) == SUBREG)
2500 {
55d796da 2501 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
9dd791c8 2502
481683e1 2503 if (CONST_INT_P (new_rtx))
9dd791c8 2504 {
55d796da 2505 x = simplify_subreg (GET_MODE (x), new_rtx,
9dd791c8
AO
2506 GET_MODE (SUBREG_REG (x)),
2507 SUBREG_BYTE (x));
41374e13 2508 gcc_assert (x);
9dd791c8
AO
2509 }
2510 else
55d796da 2511 SUBREG_REG (x) = new_rtx;
9dd791c8
AO
2512
2513 return x;
2514 }
2515 else if (GET_CODE (x) == ZERO_EXTEND)
2516 {
55d796da 2517 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
9dd791c8 2518
481683e1 2519 if (CONST_INT_P (new_rtx))
9dd791c8
AO
2520 {
2521 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
55d796da 2522 new_rtx, GET_MODE (XEXP (x, 0)));
41374e13 2523 gcc_assert (x);
9dd791c8
AO
2524 }
2525 else
55d796da 2526 XEXP (x, 0) = new_rtx;
9dd791c8
AO
2527
2528 return x;
2529 }
2530
2c88418c
RS
2531 fmt = GET_RTX_FORMAT (GET_CODE (x));
2532 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2533 {
2534 if (fmt[i] == 'e')
2535 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2536 else if (fmt[i] == 'E')
2537 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2538 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2539 }
2540
2541 return x;
a6a2274a 2542}
2c88418c 2543\f
39811184 2544/* Replace occurrences of the old label in *X with the new one.
4af16369 2545 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
39811184
JZ
2546
2547int
0c20a65f 2548replace_label (rtx *x, void *data)
39811184
JZ
2549{
2550 rtx l = *x;
4af16369
JZ
2551 rtx old_label = ((replace_label_data *) data)->r1;
2552 rtx new_label = ((replace_label_data *) data)->r2;
2553 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
39811184
JZ
2554
2555 if (l == NULL_RTX)
2556 return 0;
2557
173cd571
JZ
2558 if (GET_CODE (l) == SYMBOL_REF
2559 && CONSTANT_POOL_ADDRESS_P (l))
4af16369 2560 {
173cd571 2561 rtx c = get_pool_constant (l);
4af16369
JZ
2562 if (rtx_referenced_p (old_label, c))
2563 {
2564 rtx new_c, new_l;
2565 replace_label_data *d = (replace_label_data *) data;
0c20a65f 2566
4af16369
JZ
2567 /* Create a copy of constant C; replace the label inside
2568 but do not update LABEL_NUSES because uses in constant pool
2569 are not counted. */
2570 new_c = copy_rtx (c);
2571 d->update_label_nuses = false;
2572 for_each_rtx (&new_c, replace_label, data);
2573 d->update_label_nuses = update_label_nuses;
2574
2575 /* Add the new constant NEW_C to constant pool and replace
2576 the old reference to constant by new reference. */
173cd571 2577 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
4af16369
JZ
2578 *x = replace_rtx (l, l, new_l);
2579 }
2580 return 0;
2581 }
2582
39811184
JZ
2583 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2584 field. This is not handled by for_each_rtx because it doesn't
2585 handle unprinted ('0') fields. */
4b4bf941 2586 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
39811184 2587 JUMP_LABEL (l) = new_label;
39811184 2588
4af16369
JZ
2589 if ((GET_CODE (l) == LABEL_REF
2590 || GET_CODE (l) == INSN_LIST)
2591 && XEXP (l, 0) == old_label)
2592 {
2593 XEXP (l, 0) = new_label;
2594 if (update_label_nuses)
2595 {
2596 ++LABEL_NUSES (new_label);
2597 --LABEL_NUSES (old_label);
2598 }
2599 return 0;
2600 }
39811184
JZ
2601
2602 return 0;
2603}
2604
4af16369
JZ
2605/* When *BODY is equal to X or X is directly referenced by *BODY
2606 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2607 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
39811184
JZ
2608
2609static int
0c20a65f 2610rtx_referenced_p_1 (rtx *body, void *x)
39811184 2611{
4af16369
JZ
2612 rtx y = (rtx) x;
2613
2614 if (*body == NULL_RTX)
2615 return y == NULL_RTX;
2616
2617 /* Return true if a label_ref *BODY refers to label Y. */
4b4bf941 2618 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
4af16369
JZ
2619 return XEXP (*body, 0) == y;
2620
2621 /* If *BODY is a reference to pool constant traverse the constant. */
2622 if (GET_CODE (*body) == SYMBOL_REF
2623 && CONSTANT_POOL_ADDRESS_P (*body))
2624 return rtx_referenced_p (y, get_pool_constant (*body));
2625
2626 /* By default, compare the RTL expressions. */
2627 return rtx_equal_p (*body, y);
39811184
JZ
2628}
2629
4af16369 2630/* Return true if X is referenced in BODY. */
39811184
JZ
2631
2632int
0c20a65f 2633rtx_referenced_p (rtx x, rtx body)
39811184 2634{
4af16369 2635 return for_each_rtx (&body, rtx_referenced_p_1, x);
39811184
JZ
2636}
2637
ee735eef
JZ
2638/* If INSN is a tablejump return true and store the label (before jump table) to
2639 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
2640
2641bool
f7d504c2 2642tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
39811184 2643{
ee735eef
JZ
2644 rtx label, table;
2645
4b4bf941 2646 if (JUMP_P (insn)
ee735eef
JZ
2647 && (label = JUMP_LABEL (insn)) != NULL_RTX
2648 && (table = next_active_insn (label)) != NULL_RTX
481683e1 2649 && JUMP_TABLE_DATA_P (table))
39811184 2650 {
ee735eef
JZ
2651 if (labelp)
2652 *labelp = label;
2653 if (tablep)
2654 *tablep = table;
39811184
JZ
2655 return true;
2656 }
2657 return false;
2658}
2659
fce7e199
RH
2660/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2661 constant that is not in the constant pool and not in the condition
2662 of an IF_THEN_ELSE. */
2a1777af
JL
2663
2664static int
f7d504c2 2665computed_jump_p_1 (const_rtx x)
2a1777af 2666{
f7d504c2 2667 const enum rtx_code code = GET_CODE (x);
2a1777af 2668 int i, j;
6f7d635c 2669 const char *fmt;
2a1777af
JL
2670
2671 switch (code)
2672 {
2a1777af
JL
2673 case LABEL_REF:
2674 case PC:
2675 return 0;
2676
fce7e199
RH
2677 case CONST:
2678 case CONST_INT:
2679 case CONST_DOUBLE:
091a3ac7 2680 case CONST_FIXED:
69ef87e2 2681 case CONST_VECTOR:
fce7e199 2682 case SYMBOL_REF:
2a1777af
JL
2683 case REG:
2684 return 1;
2685
2686 case MEM:
2687 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2688 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2689
2690 case IF_THEN_ELSE:
fce7e199
RH
2691 return (computed_jump_p_1 (XEXP (x, 1))
2692 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
2693
2694 default:
2695 break;
2a1777af
JL
2696 }
2697
2698 fmt = GET_RTX_FORMAT (code);
2699 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2700 {
2701 if (fmt[i] == 'e'
fce7e199 2702 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
2703 return 1;
2704
d4757e6a 2705 else if (fmt[i] == 'E')
2a1777af 2706 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 2707 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
2708 return 1;
2709 }
2710
2711 return 0;
2712}
2713
2714/* Return nonzero if INSN is an indirect jump (aka computed jump).
2715
2716 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 2717 we can recognize them by a (use (label_ref)). */
2a1777af
JL
2718
2719int
f7d504c2 2720computed_jump_p (const_rtx insn)
2a1777af
JL
2721{
2722 int i;
4b4bf941 2723 if (JUMP_P (insn))
2a1777af
JL
2724 {
2725 rtx pat = PATTERN (insn);
2a1777af 2726
cf7c4aa6
HPN
2727 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2728 if (JUMP_LABEL (insn) != NULL)
f759eb8b 2729 return 0;
cf7c4aa6
HPN
2730
2731 if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
2732 {
2733 int len = XVECLEN (pat, 0);
2734 int has_use_labelref = 0;
2735
2736 for (i = len - 1; i >= 0; i--)
2737 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2738 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2739 == LABEL_REF))
2740 has_use_labelref = 1;
2741
2742 if (! has_use_labelref)
2743 for (i = len - 1; i >= 0; i--)
2744 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2745 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 2746 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
2747 return 1;
2748 }
2749 else if (GET_CODE (pat) == SET
2750 && SET_DEST (pat) == pc_rtx
fce7e199 2751 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
2752 return 1;
2753 }
2754 return 0;
2755}
ccc2d6d0 2756
cf94b0fc
PB
2757/* Optimized loop of for_each_rtx, trying to avoid useless recursive
2758 calls. Processes the subexpressions of EXP and passes them to F. */
2759static int
2760for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2761{
2762 int result, i, j;
2763 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2764 rtx *x;
2765
2766 for (; format[n] != '\0'; n++)
2767 {
2768 switch (format[n])
2769 {
2770 case 'e':
2771 /* Call F on X. */
2772 x = &XEXP (exp, n);
2773 result = (*f) (x, data);
2774 if (result == -1)
2775 /* Do not traverse sub-expressions. */
2776 continue;
2777 else if (result != 0)
2778 /* Stop the traversal. */
2779 return result;
b8698a0f 2780
cf94b0fc
PB
2781 if (*x == NULL_RTX)
2782 /* There are no sub-expressions. */
2783 continue;
b8698a0f 2784
cf94b0fc
PB
2785 i = non_rtx_starting_operands[GET_CODE (*x)];
2786 if (i >= 0)
2787 {
2788 result = for_each_rtx_1 (*x, i, f, data);
2789 if (result != 0)
2790 return result;
2791 }
2792 break;
2793
2794 case 'V':
2795 case 'E':
2796 if (XVEC (exp, n) == 0)
2797 continue;
2798 for (j = 0; j < XVECLEN (exp, n); ++j)
2799 {
2800 /* Call F on X. */
2801 x = &XVECEXP (exp, n, j);
2802 result = (*f) (x, data);
2803 if (result == -1)
2804 /* Do not traverse sub-expressions. */
2805 continue;
2806 else if (result != 0)
2807 /* Stop the traversal. */
2808 return result;
b8698a0f 2809
cf94b0fc
PB
2810 if (*x == NULL_RTX)
2811 /* There are no sub-expressions. */
2812 continue;
b8698a0f 2813
cf94b0fc
PB
2814 i = non_rtx_starting_operands[GET_CODE (*x)];
2815 if (i >= 0)
2816 {
2817 result = for_each_rtx_1 (*x, i, f, data);
2818 if (result != 0)
2819 return result;
2820 }
2821 }
2822 break;
2823
2824 default:
2825 /* Nothing to do. */
2826 break;
2827 }
2828 }
2829
2830 return 0;
2831}
2832
ccc2d6d0
MM
2833/* Traverse X via depth-first search, calling F for each
2834 sub-expression (including X itself). F is also passed the DATA.
2835 If F returns -1, do not traverse sub-expressions, but continue
2836 traversing the rest of the tree. If F ever returns any other
40f03658 2837 nonzero value, stop the traversal, and return the value returned
ccc2d6d0
MM
2838 by F. Otherwise, return 0. This function does not traverse inside
2839 tree structure that contains RTX_EXPRs, or into sub-expressions
2840 whose format code is `0' since it is not known whether or not those
2841 codes are actually RTL.
2842
2843 This routine is very general, and could (should?) be used to
2844 implement many of the other routines in this file. */
2845
ae0b51ef 2846int
0c20a65f 2847for_each_rtx (rtx *x, rtx_function f, void *data)
ccc2d6d0
MM
2848{
2849 int result;
ccc2d6d0
MM
2850 int i;
2851
2852 /* Call F on X. */
b987f237 2853 result = (*f) (x, data);
ccc2d6d0
MM
2854 if (result == -1)
2855 /* Do not traverse sub-expressions. */
2856 return 0;
2857 else if (result != 0)
2858 /* Stop the traversal. */
2859 return result;
2860
2861 if (*x == NULL_RTX)
2862 /* There are no sub-expressions. */
2863 return 0;
2864
cf94b0fc
PB
2865 i = non_rtx_starting_operands[GET_CODE (*x)];
2866 if (i < 0)
2867 return 0;
ccc2d6d0 2868
cf94b0fc 2869 return for_each_rtx_1 (*x, i, f, data);
ccc2d6d0 2870}
3ec2b590 2871
cf94b0fc 2872
777b1b71
RH
2873/* Searches X for any reference to REGNO, returning the rtx of the
2874 reference found if any. Otherwise, returns NULL_RTX. */
2875
2876rtx
0c20a65f 2877regno_use_in (unsigned int regno, rtx x)
777b1b71 2878{
b3694847 2879 const char *fmt;
777b1b71
RH
2880 int i, j;
2881 rtx tem;
2882
f8cfc6aa 2883 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
2884 return x;
2885
2886 fmt = GET_RTX_FORMAT (GET_CODE (x));
2887 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2888 {
2889 if (fmt[i] == 'e')
2890 {
2891 if ((tem = regno_use_in (regno, XEXP (x, i))))
2892 return tem;
2893 }
2894 else if (fmt[i] == 'E')
2895 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2896 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2897 return tem;
2898 }
2899
2900 return NULL_RTX;
2901}
2dfa9a87 2902
e5c56fd9
JH
2903/* Return a value indicating whether OP, an operand of a commutative
2904 operation, is preferred as the first or second operand. The higher
2905 the value, the stronger the preference for being the first operand.
2906 We use negative values to indicate a preference for the first operand
2907 and positive values for the second operand. */
2908
9b3bd424 2909int
0c20a65f 2910commutative_operand_precedence (rtx op)
e5c56fd9 2911{
e3d6e740 2912 enum rtx_code code = GET_CODE (op);
b8698a0f 2913
e5c56fd9 2914 /* Constants always come the second operand. Prefer "nice" constants. */
e3d6e740 2915 if (code == CONST_INT)
7e0b4eae 2916 return -8;
e3d6e740 2917 if (code == CONST_DOUBLE)
7e0b4eae 2918 return -7;
091a3ac7
CF
2919 if (code == CONST_FIXED)
2920 return -7;
9ce79a7a 2921 op = avoid_constant_pool_reference (op);
79b82df3 2922 code = GET_CODE (op);
ec8e098d
PB
2923
2924 switch (GET_RTX_CLASS (code))
2925 {
2926 case RTX_CONST_OBJ:
2927 if (code == CONST_INT)
7e0b4eae 2928 return -6;
ec8e098d 2929 if (code == CONST_DOUBLE)
7e0b4eae 2930 return -5;
091a3ac7
CF
2931 if (code == CONST_FIXED)
2932 return -5;
7e0b4eae 2933 return -4;
ec8e098d
PB
2934
2935 case RTX_EXTRA:
2936 /* SUBREGs of objects should come second. */
2937 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
7e0b4eae 2938 return -3;
6fb5fa3c 2939 return 0;
ec8e098d
PB
2940
2941 case RTX_OBJ:
2942 /* Complex expressions should be the first, so decrease priority
7e0b4eae
PB
2943 of objects. Prefer pointer objects over non pointer objects. */
2944 if ((REG_P (op) && REG_POINTER (op))
2945 || (MEM_P (op) && MEM_POINTER (op)))
2946 return -1;
2947 return -2;
ec8e098d
PB
2948
2949 case RTX_COMM_ARITH:
2950 /* Prefer operands that are themselves commutative to be first.
2951 This helps to make things linear. In particular,
2952 (and (and (reg) (reg)) (not (reg))) is canonical. */
2953 return 4;
2954
2955 case RTX_BIN_ARITH:
2956 /* If only one operand is a binary expression, it will be the first
2957 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2958 is canonical, although it will usually be further simplified. */
2959 return 2;
b8698a0f 2960
ec8e098d
PB
2961 case RTX_UNARY:
2962 /* Then prefer NEG and NOT. */
2963 if (code == NEG || code == NOT)
2964 return 1;
e5c56fd9 2965
ec8e098d
PB
2966 default:
2967 return 0;
2968 }
e5c56fd9
JH
2969}
2970
f63d1bf7 2971/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
2972 in order to canonicalize expression. */
2973
7e0b4eae 2974bool
0c20a65f 2975swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 2976{
9b3bd424
RH
2977 return (commutative_operand_precedence (x)
2978 < commutative_operand_precedence (y));
e5c56fd9 2979}
2dfa9a87
MH
2980
2981/* Return 1 if X is an autoincrement side effect and the register is
2982 not the stack pointer. */
2983int
f7d504c2 2984auto_inc_p (const_rtx x)
2dfa9a87
MH
2985{
2986 switch (GET_CODE (x))
2987 {
2988 case PRE_INC:
2989 case POST_INC:
2990 case PRE_DEC:
2991 case POST_DEC:
2992 case PRE_MODIFY:
2993 case POST_MODIFY:
2994 /* There are no REG_INC notes for SP. */
2995 if (XEXP (x, 0) != stack_pointer_rtx)
2996 return 1;
2997 default:
2998 break;
2999 }
3000 return 0;
3001}
3b10cf4b 3002
f9da5064 3003/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 3004int
f7d504c2 3005loc_mentioned_in_p (rtx *loc, const_rtx in)
db7ba742 3006{
a52b023a
PB
3007 enum rtx_code code;
3008 const char *fmt;
db7ba742
R
3009 int i, j;
3010
a52b023a
PB
3011 if (!in)
3012 return 0;
3013
3014 code = GET_CODE (in);
3015 fmt = GET_RTX_FORMAT (code);
db7ba742
R
3016 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3017 {
db7ba742
R
3018 if (fmt[i] == 'e')
3019 {
e0651058 3020 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
db7ba742
R
3021 return 1;
3022 }
3023 else if (fmt[i] == 'E')
3024 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
e0651058
AO
3025 if (loc == &XVECEXP (in, i, j)
3026 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
db7ba742
R
3027 return 1;
3028 }
3029 return 0;
3030}
ddef6bc7 3031
bb51e270
RS
3032/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3033 and SUBREG_BYTE, return the bit offset where the subreg begins
3034 (counting from the least significant bit of the operand). */
33aceff2
JW
3035
3036unsigned int
bb51e270
RS
3037subreg_lsb_1 (enum machine_mode outer_mode,
3038 enum machine_mode inner_mode,
3039 unsigned int subreg_byte)
33aceff2 3040{
33aceff2
JW
3041 unsigned int bitpos;
3042 unsigned int byte;
3043 unsigned int word;
3044
3045 /* A paradoxical subreg begins at bit position 0. */
bb51e270 3046 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
33aceff2
JW
3047 return 0;
3048
3049 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3050 /* If the subreg crosses a word boundary ensure that
3051 it also begins and ends on a word boundary. */
41374e13
NS
3052 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3053 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3054 && (subreg_byte % UNITS_PER_WORD
3055 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
33aceff2
JW
3056
3057 if (WORDS_BIG_ENDIAN)
3058 word = (GET_MODE_SIZE (inner_mode)
bb51e270 3059 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 3060 else
bb51e270 3061 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
3062 bitpos = word * BITS_PER_WORD;
3063
3064 if (BYTES_BIG_ENDIAN)
3065 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 3066 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 3067 else
bb51e270 3068 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
3069 bitpos += byte * BITS_PER_UNIT;
3070
3071 return bitpos;
3072}
3073
bb51e270
RS
3074/* Given a subreg X, return the bit offset where the subreg begins
3075 (counting from the least significant bit of the reg). */
3076
3077unsigned int
f7d504c2 3078subreg_lsb (const_rtx x)
bb51e270
RS
3079{
3080 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3081 SUBREG_BYTE (x));
3082}
3083
f1f4e530 3084/* Fill in information about a subreg of a hard register.
ddef6bc7
JJ
3085 xregno - A regno of an inner hard subreg_reg (or what will become one).
3086 xmode - The mode of xregno.
3087 offset - The byte offset.
3088 ymode - The mode of a top level SUBREG (or what may become one).
f1f4e530 3089 info - Pointer to structure to fill in. */
c619e982 3090void
f1f4e530
JM
3091subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3092 unsigned int offset, enum machine_mode ymode,
3093 struct subreg_info *info)
04c5580f 3094{
8521c414 3095 int nregs_xmode, nregs_ymode;
04c5580f 3096 int mode_multiple, nregs_multiple;
f1f4e530 3097 int offset_adj, y_offset, y_offset_adj;
8521c414 3098 int regsize_xmode, regsize_ymode;
f1f4e530 3099 bool rknown;
04c5580f 3100
41374e13 3101 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
04c5580f 3102
f1f4e530
JM
3103 rknown = false;
3104
dd79bb7e
GK
3105 /* If there are holes in a non-scalar mode in registers, we expect
3106 that it is made up of its units concatenated together. */
8521c414 3107 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
dd79bb7e 3108 {
8521c414
JM
3109 enum machine_mode xmode_unit;
3110
3111 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3112 if (GET_MODE_INNER (xmode) == VOIDmode)
3113 xmode_unit = xmode;
3114 else
3115 xmode_unit = GET_MODE_INNER (xmode);
3116 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3117 gcc_assert (nregs_xmode
3118 == (GET_MODE_NUNITS (xmode)
3119 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3120 gcc_assert (hard_regno_nregs[xregno][xmode]
3121 == (hard_regno_nregs[xregno][xmode_unit]
3122 * GET_MODE_NUNITS (xmode)));
dd79bb7e
GK
3123
3124 /* You can only ask for a SUBREG of a value with holes in the middle
3125 if you don't cross the holes. (Such a SUBREG should be done by
3126 picking a different register class, or doing it in memory if
3127 necessary.) An example of a value with holes is XCmode on 32-bit
3128 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
b8698a0f 3129 3 for each part, but in memory it's two 128-bit parts.
dd79bb7e
GK
3130 Padding is assumed to be at the end (not necessarily the 'high part')
3131 of each unit. */
b8698a0f 3132 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
8521c414
JM
3133 < GET_MODE_NUNITS (xmode))
3134 && (offset / GET_MODE_SIZE (xmode_unit)
dd79bb7e 3135 != ((offset + GET_MODE_SIZE (ymode) - 1)
8521c414 3136 / GET_MODE_SIZE (xmode_unit))))
f1f4e530
JM
3137 {
3138 info->representable_p = false;
3139 rknown = true;
3140 }
dd79bb7e
GK
3141 }
3142 else
3143 nregs_xmode = hard_regno_nregs[xregno][xmode];
b8698a0f 3144
66fd46b6 3145 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3146
dd79bb7e 3147 /* Paradoxical subregs are otherwise valid. */
f1f4e530
JM
3148 if (!rknown
3149 && offset == 0
3150 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3151 {
3152 info->representable_p = true;
3153 /* If this is a big endian paradoxical subreg, which uses more
3154 actual hard registers than the original register, we must
3155 return a negative offset so that we find the proper highpart
3156 of the register. */
3157 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3158 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3159 info->offset = nregs_xmode - nregs_ymode;
3160 else
3161 info->offset = 0;
3162 info->nregs = nregs_ymode;
3163 return;
3164 }
04c5580f 3165
8521c414
JM
3166 /* If registers store different numbers of bits in the different
3167 modes, we cannot generally form this subreg. */
f1f4e530 3168 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
5f7fc2b8
JM
3169 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3170 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3171 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
f1f4e530
JM
3172 {
3173 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
f1f4e530 3174 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
f1f4e530
JM
3175 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3176 {
3177 info->representable_p = false;
3178 info->nregs
3179 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3180 info->offset = offset / regsize_xmode;
3181 return;
3182 }
3183 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3184 {
3185 info->representable_p = false;
3186 info->nregs
3187 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3188 info->offset = offset / regsize_xmode;
3189 return;
3190 }
3191 }
8521c414 3192
dd79bb7e 3193 /* Lowpart subregs are otherwise valid. */
f1f4e530
JM
3194 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3195 {
3196 info->representable_p = true;
3197 rknown = true;
a446b4e8
JM
3198
3199 if (offset == 0 || nregs_xmode == nregs_ymode)
3200 {
3201 info->offset = 0;
3202 info->nregs = nregs_ymode;
3203 return;
3204 }
f1f4e530 3205 }
04c5580f 3206
dd79bb7e
GK
3207 /* This should always pass, otherwise we don't know how to verify
3208 the constraint. These conditions may be relaxed but
3209 subreg_regno_offset would need to be redesigned. */
41374e13 3210 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
41374e13 3211 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
04c5580f 3212
b20b352b 3213 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3214 values. The subreg must represent a lowpart of given field.
04c5580f 3215 Compute what field it is. */
f1f4e530
JM
3216 offset_adj = offset;
3217 offset_adj -= subreg_lowpart_offset (ymode,
3218 mode_for_size (GET_MODE_BITSIZE (xmode)
3219 / nregs_xmode,
3220 MODE_INT, 0));
04c5580f 3221
dd79bb7e 3222 /* Size of ymode must not be greater than the size of xmode. */
04c5580f 3223 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 3224 gcc_assert (mode_multiple != 0);
04c5580f
JH
3225
3226 y_offset = offset / GET_MODE_SIZE (ymode);
f1f4e530
JM
3227 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3228 nregs_multiple = nregs_xmode / nregs_ymode;
41374e13 3229
f1f4e530 3230 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
41374e13
NS
3231 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3232
f1f4e530
JM
3233 if (!rknown)
3234 {
3235 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3236 rknown = true;
3237 }
3238 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3239 info->nregs = nregs_ymode;
3240}
3241
3242/* This function returns the regno offset of a subreg expression.
3243 xregno - A regno of an inner hard subreg_reg (or what will become one).
3244 xmode - The mode of xregno.
3245 offset - The byte offset.
3246 ymode - The mode of a top level SUBREG (or what may become one).
3247 RETURN - The regno offset which would be used. */
3248unsigned int
3249subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3250 unsigned int offset, enum machine_mode ymode)
3251{
3252 struct subreg_info info;
3253 subreg_get_info (xregno, xmode, offset, ymode, &info);
3254 return info.offset;
3255}
3256
3257/* This function returns true when the offset is representable via
3258 subreg_offset in the given regno.
3259 xregno - A regno of an inner hard subreg_reg (or what will become one).
3260 xmode - The mode of xregno.
3261 offset - The byte offset.
3262 ymode - The mode of a top level SUBREG (or what may become one).
3263 RETURN - Whether the offset is representable. */
3264bool
3265subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3266 unsigned int offset, enum machine_mode ymode)
3267{
3268 struct subreg_info info;
3269 subreg_get_info (xregno, xmode, offset, ymode, &info);
05cee290 3270 return info.representable_p;
04c5580f
JH
3271}
3272
eef302d2
RS
3273/* Return the number of a YMODE register to which
3274
3275 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3276
3277 can be simplified. Return -1 if the subreg can't be simplified.
3278
3279 XREGNO is a hard register number. */
3280
3281int
3282simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3283 unsigned int offset, enum machine_mode ymode)
3284{
3285 struct subreg_info info;
3286 unsigned int yregno;
3287
3288#ifdef CANNOT_CHANGE_MODE_CLASS
3289 /* Give the backend a chance to disallow the mode change. */
3290 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3291 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3292 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3293 return -1;
3294#endif
3295
3296 /* We shouldn't simplify stack-related registers. */
3297 if ((!reload_completed || frame_pointer_needed)
3298 && (xregno == FRAME_POINTER_REGNUM
3299 || xregno == HARD_FRAME_POINTER_REGNUM))
3300 return -1;
3301
3302 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3303 && xregno == ARG_POINTER_REGNUM)
3304 return -1;
3305
3306 if (xregno == STACK_POINTER_REGNUM)
3307 return -1;
3308
3309 /* Try to get the register offset. */
3310 subreg_get_info (xregno, xmode, offset, ymode, &info);
3311 if (!info.representable_p)
3312 return -1;
3313
3314 /* Make sure that the offsetted register value is in range. */
3315 yregno = xregno + info.offset;
3316 if (!HARD_REGISTER_NUM_P (yregno))
3317 return -1;
3318
3319 /* See whether (reg:YMODE YREGNO) is valid.
3320
3321 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3322 This is a kludge to work around how float/complex arguments are passed
3323 on 32-bit SPARC and should be fixed. */
3324 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3325 && HARD_REGNO_MODE_OK (xregno, xmode))
3326 return -1;
3327
3328 return (int) yregno;
3329}
3330
dc297297 3331/* Return the final regno that a subreg expression refers to. */
a6a2274a 3332unsigned int
f7d504c2 3333subreg_regno (const_rtx x)
ddef6bc7
JJ
3334{
3335 unsigned int ret;
3336 rtx subreg = SUBREG_REG (x);
3337 int regno = REGNO (subreg);
3338
a6a2274a
KH
3339 ret = regno + subreg_regno_offset (regno,
3340 GET_MODE (subreg),
ddef6bc7
JJ
3341 SUBREG_BYTE (x),
3342 GET_MODE (x));
3343 return ret;
3344
3345}
f1f4e530
JM
3346
3347/* Return the number of registers that a subreg expression refers
3348 to. */
3349unsigned int
f7d504c2 3350subreg_nregs (const_rtx x)
ba49cb7b
KZ
3351{
3352 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3353}
3354
3355/* Return the number of registers that a subreg REG with REGNO
3356 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3357 changed so that the regno can be passed in. */
3358
3359unsigned int
3360subreg_nregs_with_regno (unsigned int regno, const_rtx x)
f1f4e530
JM
3361{
3362 struct subreg_info info;
3363 rtx subreg = SUBREG_REG (x);
f1f4e530
JM
3364
3365 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3366 &info);
3367 return info.nregs;
3368}
3369
ba49cb7b 3370
833366d6
JH
3371struct parms_set_data
3372{
3373 int nregs;
3374 HARD_REG_SET regs;
3375};
3376
3377/* Helper function for noticing stores to parameter registers. */
3378static void
7bc980e1 3379parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6 3380{
1634b18f 3381 struct parms_set_data *const d = (struct parms_set_data *) data;
833366d6
JH
3382 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3383 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3384 {
3385 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3386 d->nregs--;
3387 }
3388}
3389
a6a2274a 3390/* Look backward for first parameter to be loaded.
b2df20b4
DJ
3391 Note that loads of all parameters will not necessarily be
3392 found if CSE has eliminated some of them (e.g., an argument
3393 to the outer function is passed down as a parameter).
833366d6
JH
3394 Do not skip BOUNDARY. */
3395rtx
0c20a65f 3396find_first_parameter_load (rtx call_insn, rtx boundary)
833366d6
JH
3397{
3398 struct parms_set_data parm;
b2df20b4 3399 rtx p, before, first_set;
833366d6
JH
3400
3401 /* Since different machines initialize their parameter registers
3402 in different orders, assume nothing. Collect the set of all
3403 parameter registers. */
3404 CLEAR_HARD_REG_SET (parm.regs);
3405 parm.nregs = 0;
3406 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3407 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3408 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6 3409 {
41374e13 3410 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
833366d6
JH
3411
3412 /* We only care about registers which can hold function
3413 arguments. */
3414 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3415 continue;
3416
3417 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3418 parm.nregs++;
3419 }
3420 before = call_insn;
b2df20b4 3421 first_set = call_insn;
833366d6
JH
3422
3423 /* Search backward for the first set of a register in this set. */
3424 while (parm.nregs && before != boundary)
3425 {
3426 before = PREV_INSN (before);
3427
3428 /* It is possible that some loads got CSEed from one call to
3429 another. Stop in that case. */
4b4bf941 3430 if (CALL_P (before))
833366d6
JH
3431 break;
3432
dbc1a163 3433 /* Our caller needs either ensure that we will find all sets
833366d6 3434 (in case code has not been optimized yet), or take care
eaec9b3d 3435 for possible labels in a way by setting boundary to preceding
833366d6 3436 CODE_LABEL. */
4b4bf941 3437 if (LABEL_P (before))
dbc1a163 3438 {
41374e13 3439 gcc_assert (before == boundary);
dbc1a163
RH
3440 break;
3441 }
833366d6 3442
0d025d43 3443 if (INSN_P (before))
b2df20b4
DJ
3444 {
3445 int nregs_old = parm.nregs;
3446 note_stores (PATTERN (before), parms_set, &parm);
3447 /* If we found something that did not set a parameter reg,
3448 we're done. Do not keep going, as that might result
3449 in hoisting an insn before the setting of a pseudo
3450 that is used by the hoisted insn. */
3451 if (nregs_old != parm.nregs)
3452 first_set = before;
3453 else
3454 break;
3455 }
833366d6 3456 }
b2df20b4 3457 return first_set;
833366d6 3458}
3dec4024 3459
14b493d6 3460/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3461 call instruction. */
3462
3463bool
9678086d 3464keep_with_call_p (const_rtx insn)
3dec4024
JH
3465{
3466 rtx set;
3467
3468 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3469 {
f8cfc6aa 3470 if (REG_P (SET_DEST (set))
5df533b3 3471 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3472 && fixed_regs[REGNO (SET_DEST (set))]
3473 && general_operand (SET_SRC (set), VOIDmode))
3474 return true;
f8cfc6aa 3475 if (REG_P (SET_SRC (set))
82f81f18 3476 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
f8cfc6aa 3477 && REG_P (SET_DEST (set))
3dec4024
JH
3478 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3479 return true;
bc204393
RH
3480 /* There may be a stack pop just after the call and before the store
3481 of the return register. Search for the actual store when deciding
3482 if we can break or not. */
3dec4024
JH
3483 if (SET_DEST (set) == stack_pointer_rtx)
3484 {
75547801 3485 /* This CONST_CAST is okay because next_nonnote_insn just
4e9b57fa 3486 returns its argument and we assign it to a const_rtx
75547801 3487 variable. */
b1d5455a 3488 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
bc204393 3489 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3490 return true;
3491 }
3492 }
3493 return false;
3494}
71d2c5bd 3495
432f982f
JH
3496/* Return true if LABEL is a target of JUMP_INSN. This applies only
3497 to non-complex jumps. That is, direct unconditional, conditional,
3498 and tablejumps, but not computed jumps or returns. It also does
3499 not apply to the fallthru case of a conditional jump. */
3500
3501bool
f7d504c2 3502label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
432f982f
JH
3503{
3504 rtx tmp = JUMP_LABEL (jump_insn);
3505
3506 if (label == tmp)
3507 return true;
3508
3509 if (tablejump_p (jump_insn, NULL, &tmp))
3510 {
3511 rtvec vec = XVEC (PATTERN (tmp),
3512 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3513 int i, veclen = GET_NUM_ELEM (vec);
3514
3515 for (i = 0; i < veclen; ++i)
3516 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3517 return true;
3518 }
3519
cb2f563b
HPN
3520 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3521 return true;
3522
432f982f
JH
3523 return false;
3524}
3525
f894b69b
PB
3526\f
3527/* Return an estimate of the cost of computing rtx X.
3528 One use is in cse, to decide which expression to keep in the hash table.
3529 Another is in rtl generation, to pick the cheapest way to multiply.
b8698a0f 3530 Other uses like the latter are expected in the future.
f40751dd
JH
3531
3532 SPEED parameter specify whether costs optimized for speed or size should
3533 be returned. */
f894b69b
PB
3534
3535int
f40751dd 3536rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED, bool speed)
f894b69b
PB
3537{
3538 int i, j;
3539 enum rtx_code code;
3540 const char *fmt;
3541 int total;
3542
3543 if (x == 0)
3544 return 0;
3545
3546 /* Compute the default costs of certain things.
3547 Note that targetm.rtx_costs can override the defaults. */
3548
3549 code = GET_CODE (x);
3550 switch (code)
3551 {
3552 case MULT:
3553 total = COSTS_N_INSNS (5);
3554 break;
3555 case DIV:
3556 case UDIV:
3557 case MOD:
3558 case UMOD:
3559 total = COSTS_N_INSNS (7);
3560 break;
3561 case USE:
db3edc20 3562 /* Used in combine.c as a marker. */
f894b69b
PB
3563 total = 0;
3564 break;
3565 default:
3566 total = COSTS_N_INSNS (1);
3567 }
3568
3569 switch (code)
3570 {
3571 case REG:
3572 return 0;
3573
3574 case SUBREG:
edb81165 3575 total = 0;
f894b69b
PB
3576 /* If we can't tie these modes, make this expensive. The larger
3577 the mode, the more expensive it is. */
3578 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3579 return COSTS_N_INSNS (2
3580 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3581 break;
3582
3583 default:
f40751dd 3584 if (targetm.rtx_costs (x, code, outer_code, &total, speed))
f894b69b
PB
3585 return total;
3586 break;
3587 }
3588
3589 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3590 which is already in total. */
3591
3592 fmt = GET_RTX_FORMAT (code);
3593 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3594 if (fmt[i] == 'e')
f40751dd 3595 total += rtx_cost (XEXP (x, i), code, speed);
f894b69b
PB
3596 else if (fmt[i] == 'E')
3597 for (j = 0; j < XVECLEN (x, i); j++)
f40751dd 3598 total += rtx_cost (XVECEXP (x, i, j), code, speed);
f894b69b
PB
3599
3600 return total;
3601}
3602\f
3603/* Return cost of address expression X.
b8698a0f 3604 Expect that X is properly formed address reference.
f40751dd
JH
3605
3606 SPEED parameter specify whether costs optimized for speed or size should
3607 be returned. */
f894b69b
PB
3608
3609int
09e881c9 3610address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
f894b69b 3611{
f894b69b
PB
3612 /* We may be asked for cost of various unusual addresses, such as operands
3613 of push instruction. It is not worthwhile to complicate writing
3614 of the target hook by such cases. */
3615
09e881c9 3616 if (!memory_address_addr_space_p (mode, x, as))
f894b69b
PB
3617 return 1000;
3618
f40751dd 3619 return targetm.address_cost (x, speed);
f894b69b
PB
3620}
3621
3622/* If the target doesn't override, compute the cost as with arithmetic. */
3623
3624int
f40751dd 3625default_address_cost (rtx x, bool speed)
f894b69b 3626{
f40751dd 3627 return rtx_cost (x, MEM, speed);
f894b69b 3628}
2f93eea8
PB
3629\f
3630
3631unsigned HOST_WIDE_INT
fa233e34 3632nonzero_bits (const_rtx x, enum machine_mode mode)
2f93eea8
PB
3633{
3634 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3635}
3636
3637unsigned int
fa233e34 3638num_sign_bit_copies (const_rtx x, enum machine_mode mode)
2f93eea8
PB
3639{
3640 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3641}
3642
3643/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3644 It avoids exponential behavior in nonzero_bits1 when X has
3645 identical subexpressions on the first or the second level. */
3646
3647static unsigned HOST_WIDE_INT
fa233e34 3648cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
3649 enum machine_mode known_mode,
3650 unsigned HOST_WIDE_INT known_ret)
3651{
3652 if (x == known_x && mode == known_mode)
3653 return known_ret;
3654
3655 /* Try to find identical subexpressions. If found call
3656 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3657 precomputed value for the subexpression as KNOWN_RET. */
3658
3659 if (ARITHMETIC_P (x))
3660 {
3661 rtx x0 = XEXP (x, 0);
3662 rtx x1 = XEXP (x, 1);
3663
3664 /* Check the first level. */
3665 if (x0 == x1)
3666 return nonzero_bits1 (x, mode, x0, mode,
3667 cached_nonzero_bits (x0, mode, known_x,
3668 known_mode, known_ret));
3669
3670 /* Check the second level. */
3671 if (ARITHMETIC_P (x0)
3672 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3673 return nonzero_bits1 (x, mode, x1, mode,
3674 cached_nonzero_bits (x1, mode, known_x,
3675 known_mode, known_ret));
3676
3677 if (ARITHMETIC_P (x1)
3678 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3679 return nonzero_bits1 (x, mode, x0, mode,
3680 cached_nonzero_bits (x0, mode, known_x,
3681 known_mode, known_ret));
3682 }
3683
3684 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3685}
3686
3687/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3688 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3689 is less useful. We can't allow both, because that results in exponential
3690 run time recursion. There is a nullstone testcase that triggered
3691 this. This macro avoids accidental uses of num_sign_bit_copies. */
3692#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3693
3694/* Given an expression, X, compute which bits in X can be nonzero.
3695 We don't care about bits outside of those defined in MODE.
3696
3697 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3698 an arithmetic operation, we can do better. */
3699
3700static unsigned HOST_WIDE_INT
fa233e34 3701nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
3702 enum machine_mode known_mode,
3703 unsigned HOST_WIDE_INT known_ret)
3704{
3705 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3706 unsigned HOST_WIDE_INT inner_nz;
3707 enum rtx_code code;
3708 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3709
ff596cd2
RL
3710 /* For floating-point and vector values, assume all bits are needed. */
3711 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3712 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
3713 return nonzero;
3714
3715 /* If X is wider than MODE, use its mode instead. */
3716 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3717 {
3718 mode = GET_MODE (x);
3719 nonzero = GET_MODE_MASK (mode);
3720 mode_width = GET_MODE_BITSIZE (mode);
3721 }
3722
3723 if (mode_width > HOST_BITS_PER_WIDE_INT)
3724 /* Our only callers in this case look for single bit values. So
3725 just return the mode mask. Those tests will then be false. */
3726 return nonzero;
3727
3728#ifndef WORD_REGISTER_OPERATIONS
3729 /* If MODE is wider than X, but both are a single word for both the host
3730 and target machines, we can compute this from which bits of the
3731 object might be nonzero in its own mode, taking into account the fact
3732 that on many CISC machines, accessing an object in a wider mode
3733 causes the high-order bits to become undefined. So they are
3734 not known to be zero. */
3735
3736 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3737 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3738 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3739 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3740 {
3741 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3742 known_x, known_mode, known_ret);
3743 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3744 return nonzero;
3745 }
3746#endif
3747
3748 code = GET_CODE (x);
3749 switch (code)
3750 {
3751 case REG:
3752#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3753 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3754 all the bits above ptr_mode are known to be zero. */
d4ebfa65
BE
3755 /* As we do not know which address space the pointer is refering to,
3756 we can do this only if the target does not support different pointer
3757 or address modes depending on the address space. */
3758 if (target_default_pointer_address_modes_p ()
3759 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
2f93eea8
PB
3760 && REG_POINTER (x))
3761 nonzero &= GET_MODE_MASK (ptr_mode);
3762#endif
3763
3764 /* Include declared information about alignment of pointers. */
3765 /* ??? We don't properly preserve REG_POINTER changes across
3766 pointer-to-integer casts, so we can't trust it except for
3767 things that we know must be pointers. See execute/960116-1.c. */
3768 if ((x == stack_pointer_rtx
3769 || x == frame_pointer_rtx
3770 || x == arg_pointer_rtx)
3771 && REGNO_POINTER_ALIGN (REGNO (x)))
3772 {
3773 unsigned HOST_WIDE_INT alignment
3774 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3775
3776#ifdef PUSH_ROUNDING
3777 /* If PUSH_ROUNDING is defined, it is possible for the
3778 stack to be momentarily aligned only to that amount,
3779 so we pick the least alignment. */
3780 if (x == stack_pointer_rtx && PUSH_ARGS)
3781 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3782 alignment);
3783#endif
3784
3785 nonzero &= ~(alignment - 1);
3786 }
3787
3788 {
3789 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
55d796da 3790 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
2f93eea8
PB
3791 known_mode, known_ret,
3792 &nonzero_for_hook);
3793
55d796da
KG
3794 if (new_rtx)
3795 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
2f93eea8
PB
3796 known_mode, known_ret);
3797
3798 return nonzero_for_hook;
3799 }
3800
3801 case CONST_INT:
3802#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3803 /* If X is negative in MODE, sign-extend the value. */
3804 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3805 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3806 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3807#endif
3808
3809 return INTVAL (x);
3810
3811 case MEM:
3812#ifdef LOAD_EXTEND_OP
3813 /* In many, if not most, RISC machines, reading a byte from memory
3814 zeros the rest of the register. Noticing that fact saves a lot
3815 of extra zero-extends. */
3816 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3817 nonzero &= GET_MODE_MASK (GET_MODE (x));
3818#endif
3819 break;
3820
3821 case EQ: case NE:
3822 case UNEQ: case LTGT:
3823 case GT: case GTU: case UNGT:
3824 case LT: case LTU: case UNLT:
3825 case GE: case GEU: case UNGE:
3826 case LE: case LEU: case UNLE:
3827 case UNORDERED: case ORDERED:
2f93eea8
PB
3828 /* If this produces an integer result, we know which bits are set.
3829 Code here used to clear bits outside the mode of X, but that is
3830 now done above. */
b8698a0f
L
3831 /* Mind that MODE is the mode the caller wants to look at this
3832 operation in, and not the actual operation mode. We can wind
505ac507
RH
3833 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3834 that describes the results of a vector compare. */
3835 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2f93eea8
PB
3836 && mode_width <= HOST_BITS_PER_WIDE_INT)
3837 nonzero = STORE_FLAG_VALUE;
3838 break;
3839
3840 case NEG:
3841#if 0
3842 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3843 and num_sign_bit_copies. */
3844 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3845 == GET_MODE_BITSIZE (GET_MODE (x)))
3846 nonzero = 1;
3847#endif
3848
3849 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3850 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3851 break;
3852
3853 case ABS:
3854#if 0
3855 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3856 and num_sign_bit_copies. */
3857 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3858 == GET_MODE_BITSIZE (GET_MODE (x)))
3859 nonzero = 1;
3860#endif
3861 break;
3862
3863 case TRUNCATE:
3864 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3865 known_x, known_mode, known_ret)
3866 & GET_MODE_MASK (mode));
3867 break;
3868
3869 case ZERO_EXTEND:
3870 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3871 known_x, known_mode, known_ret);
3872 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3873 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3874 break;
3875
3876 case SIGN_EXTEND:
3877 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3878 Otherwise, show all the bits in the outer mode but not the inner
3879 may be nonzero. */
3880 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3881 known_x, known_mode, known_ret);
3882 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3883 {
3884 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3885 if (inner_nz
3886 & (((HOST_WIDE_INT) 1
3887 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3888 inner_nz |= (GET_MODE_MASK (mode)
3889 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3890 }
3891
3892 nonzero &= inner_nz;
3893 break;
3894
3895 case AND:
3896 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3897 known_x, known_mode, known_ret)
3898 & cached_nonzero_bits (XEXP (x, 1), mode,
3899 known_x, known_mode, known_ret);
3900 break;
3901
3902 case XOR: case IOR:
3903 case UMIN: case UMAX: case SMIN: case SMAX:
3904 {
3905 unsigned HOST_WIDE_INT nonzero0 =
3906 cached_nonzero_bits (XEXP (x, 0), mode,
3907 known_x, known_mode, known_ret);
3908
3909 /* Don't call nonzero_bits for the second time if it cannot change
3910 anything. */
3911 if ((nonzero & nonzero0) != nonzero)
3912 nonzero &= nonzero0
3913 | cached_nonzero_bits (XEXP (x, 1), mode,
3914 known_x, known_mode, known_ret);
3915 }
3916 break;
3917
3918 case PLUS: case MINUS:
3919 case MULT:
3920 case DIV: case UDIV:
3921 case MOD: case UMOD:
3922 /* We can apply the rules of arithmetic to compute the number of
3923 high- and low-order zero bits of these operations. We start by
3924 computing the width (position of the highest-order nonzero bit)
3925 and the number of low-order zero bits for each value. */
3926 {
3927 unsigned HOST_WIDE_INT nz0 =
3928 cached_nonzero_bits (XEXP (x, 0), mode,
3929 known_x, known_mode, known_ret);
3930 unsigned HOST_WIDE_INT nz1 =
3931 cached_nonzero_bits (XEXP (x, 1), mode,
3932 known_x, known_mode, known_ret);
3933 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3934 int width0 = floor_log2 (nz0) + 1;
3935 int width1 = floor_log2 (nz1) + 1;
3936 int low0 = floor_log2 (nz0 & -nz0);
3937 int low1 = floor_log2 (nz1 & -nz1);
3938 HOST_WIDE_INT op0_maybe_minusp
3939 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3940 HOST_WIDE_INT op1_maybe_minusp
3941 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3942 unsigned int result_width = mode_width;
3943 int result_low = 0;
3944
3945 switch (code)
3946 {
3947 case PLUS:
3948 result_width = MAX (width0, width1) + 1;
3949 result_low = MIN (low0, low1);
3950 break;
3951 case MINUS:
3952 result_low = MIN (low0, low1);
3953 break;
3954 case MULT:
3955 result_width = width0 + width1;
3956 result_low = low0 + low1;
3957 break;
3958 case DIV:
3959 if (width1 == 0)
3960 break;
3961 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3962 result_width = width0;
3963 break;
3964 case UDIV:
3965 if (width1 == 0)
3966 break;
3967 result_width = width0;
3968 break;
3969 case MOD:
3970 if (width1 == 0)
3971 break;
3972 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3973 result_width = MIN (width0, width1);
3974 result_low = MIN (low0, low1);
3975 break;
3976 case UMOD:
3977 if (width1 == 0)
3978 break;
3979 result_width = MIN (width0, width1);
3980 result_low = MIN (low0, low1);
3981 break;
3982 default:
41374e13 3983 gcc_unreachable ();
2f93eea8
PB
3984 }
3985
3986 if (result_width < mode_width)
3987 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3988
3989 if (result_low > 0)
3990 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3991
3992#ifdef POINTERS_EXTEND_UNSIGNED
3993 /* If pointers extend unsigned and this is an addition or subtraction
3994 to a pointer in Pmode, all the bits above ptr_mode are known to be
3995 zero. */
d4ebfa65
BE
3996 /* As we do not know which address space the pointer is refering to,
3997 we can do this only if the target does not support different pointer
3998 or address modes depending on the address space. */
3999 if (target_default_pointer_address_modes_p ()
4000 && POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
2f93eea8 4001 && (code == PLUS || code == MINUS)
f8cfc6aa 4002 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
4003 nonzero &= GET_MODE_MASK (ptr_mode);
4004#endif
4005 }
4006 break;
4007
4008 case ZERO_EXTRACT:
481683e1 4009 if (CONST_INT_P (XEXP (x, 1))
2f93eea8
PB
4010 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4011 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4012 break;
4013
4014 case SUBREG:
4015 /* If this is a SUBREG formed for a promoted variable that has
4016 been zero-extended, we know that at least the high-order bits
4017 are zero, though others might be too. */
4018
4019 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4020 nonzero = GET_MODE_MASK (GET_MODE (x))
4021 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4022 known_x, known_mode, known_ret);
4023
4024 /* If the inner mode is a single word for both the host and target
4025 machines, we can compute this from which bits of the inner
4026 object might be nonzero. */
4027 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4028 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4029 <= HOST_BITS_PER_WIDE_INT))
4030 {
4031 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4032 known_x, known_mode, known_ret);
4033
4034#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4035 /* If this is a typical RISC machine, we only have to worry
4036 about the way loads are extended. */
4037 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4038 ? (((nonzero
4039 & (((unsigned HOST_WIDE_INT) 1
4040 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4041 != 0))
4042 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3c0cb5de 4043 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4044#endif
4045 {
4046 /* On many CISC machines, accessing an object in a wider mode
4047 causes the high-order bits to become undefined. So they are
4048 not known to be zero. */
4049 if (GET_MODE_SIZE (GET_MODE (x))
4050 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4051 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4052 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4053 }
4054 }
4055 break;
4056
4057 case ASHIFTRT:
4058 case LSHIFTRT:
4059 case ASHIFT:
4060 case ROTATE:
4061 /* The nonzero bits are in two classes: any bits within MODE
4062 that aren't in GET_MODE (x) are always significant. The rest of the
4063 nonzero bits are those that are significant in the operand of
4064 the shift when shifted the appropriate number of bits. This
4065 shows that high-order bits are cleared by the right shift and
4066 low-order bits by left shifts. */
481683e1 4067 if (CONST_INT_P (XEXP (x, 1))
2f93eea8 4068 && INTVAL (XEXP (x, 1)) >= 0
39b2ac74
JJ
4069 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4070 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
2f93eea8
PB
4071 {
4072 enum machine_mode inner_mode = GET_MODE (x);
4073 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4074 int count = INTVAL (XEXP (x, 1));
4075 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4076 unsigned HOST_WIDE_INT op_nonzero =
4077 cached_nonzero_bits (XEXP (x, 0), mode,
4078 known_x, known_mode, known_ret);
4079 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4080 unsigned HOST_WIDE_INT outer = 0;
4081
4082 if (mode_width > width)
4083 outer = (op_nonzero & nonzero & ~mode_mask);
4084
4085 if (code == LSHIFTRT)
4086 inner >>= count;
4087 else if (code == ASHIFTRT)
4088 {
4089 inner >>= count;
4090
4091 /* If the sign bit may have been nonzero before the shift, we
4092 need to mark all the places it could have been copied to
4093 by the shift as possibly nonzero. */
4094 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4095 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4096 }
4097 else if (code == ASHIFT)
4098 inner <<= count;
4099 else
4100 inner = ((inner << (count % width)
4101 | (inner >> (width - (count % width)))) & mode_mask);
4102
4103 nonzero &= (outer | inner);
4104 }
4105 break;
4106
4107 case FFS:
4108 case POPCOUNT:
4109 /* This is at most the number of bits in the mode. */
4110 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4111 break;
4112
4113 case CLZ:
4114 /* If CLZ has a known value at zero, then the nonzero bits are
4115 that value, plus the number of bits in the mode minus one. */
4116 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4117 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4118 else
4119 nonzero = -1;
4120 break;
4121
4122 case CTZ:
4123 /* If CTZ has a known value at zero, then the nonzero bits are
4124 that value, plus the number of bits in the mode minus one. */
4125 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4126 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4127 else
4128 nonzero = -1;
4129 break;
4130
4131 case PARITY:
4132 nonzero = 1;
4133 break;
4134
4135 case IF_THEN_ELSE:
4136 {
4137 unsigned HOST_WIDE_INT nonzero_true =
4138 cached_nonzero_bits (XEXP (x, 1), mode,
4139 known_x, known_mode, known_ret);
4140
4141 /* Don't call nonzero_bits for the second time if it cannot change
4142 anything. */
4143 if ((nonzero & nonzero_true) != nonzero)
4144 nonzero &= nonzero_true
4145 | cached_nonzero_bits (XEXP (x, 2), mode,
4146 known_x, known_mode, known_ret);
4147 }
4148 break;
4149
4150 default:
4151 break;
4152 }
4153
4154 return nonzero;
4155}
4156
4157/* See the macro definition above. */
4158#undef cached_num_sign_bit_copies
4159
4160\f
4161/* The function cached_num_sign_bit_copies is a wrapper around
4162 num_sign_bit_copies1. It avoids exponential behavior in
4163 num_sign_bit_copies1 when X has identical subexpressions on the
4164 first or the second level. */
4165
4166static unsigned int
fa233e34 4167cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
4168 enum machine_mode known_mode,
4169 unsigned int known_ret)
4170{
4171 if (x == known_x && mode == known_mode)
4172 return known_ret;
4173
4174 /* Try to find identical subexpressions. If found call
4175 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4176 the precomputed value for the subexpression as KNOWN_RET. */
4177
4178 if (ARITHMETIC_P (x))
4179 {
4180 rtx x0 = XEXP (x, 0);
4181 rtx x1 = XEXP (x, 1);
4182
4183 /* Check the first level. */
4184 if (x0 == x1)
4185 return
4186 num_sign_bit_copies1 (x, mode, x0, mode,
4187 cached_num_sign_bit_copies (x0, mode, known_x,
4188 known_mode,
4189 known_ret));
4190
4191 /* Check the second level. */
4192 if (ARITHMETIC_P (x0)
4193 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4194 return
4195 num_sign_bit_copies1 (x, mode, x1, mode,
4196 cached_num_sign_bit_copies (x1, mode, known_x,
4197 known_mode,
4198 known_ret));
4199
4200 if (ARITHMETIC_P (x1)
4201 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4202 return
4203 num_sign_bit_copies1 (x, mode, x0, mode,
4204 cached_num_sign_bit_copies (x0, mode, known_x,
4205 known_mode,
4206 known_ret));
4207 }
4208
4209 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4210}
4211
4212/* Return the number of bits at the high-order end of X that are known to
4213 be equal to the sign bit. X will be used in mode MODE; if MODE is
4214 VOIDmode, X will be used in its own mode. The returned value will always
4215 be between 1 and the number of bits in MODE. */
4216
4217static unsigned int
fa233e34 4218num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
2f93eea8
PB
4219 enum machine_mode known_mode,
4220 unsigned int known_ret)
4221{
4222 enum rtx_code code = GET_CODE (x);
4223 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4224 int num0, num1, result;
4225 unsigned HOST_WIDE_INT nonzero;
4226
4227 /* If we weren't given a mode, use the mode of X. If the mode is still
4228 VOIDmode, we don't know anything. Likewise if one of the modes is
4229 floating-point. */
4230
4231 if (mode == VOIDmode)
4232 mode = GET_MODE (x);
4233
ff596cd2
RL
4234 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4235 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
4236 return 1;
4237
4238 /* For a smaller object, just ignore the high bits. */
4239 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4240 {
4241 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4242 known_x, known_mode, known_ret);
4243 return MAX (1,
4244 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4245 }
4246
4247 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4248 {
4249#ifndef WORD_REGISTER_OPERATIONS
4250 /* If this machine does not do all register operations on the entire
4251 register and MODE is wider than the mode of X, we can say nothing
4252 at all about the high-order bits. */
4253 return 1;
4254#else
4255 /* Likewise on machines that do, if the mode of the object is smaller
4256 than a word and loads of that size don't sign extend, we can say
4257 nothing about the high order bits. */
4258 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4259#ifdef LOAD_EXTEND_OP
4260 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4261#endif
4262 )
4263 return 1;
4264#endif
4265 }
4266
4267 switch (code)
4268 {
4269 case REG:
4270
4271#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4272 /* If pointers extend signed and this is a pointer in Pmode, say that
4273 all the bits above ptr_mode are known to be sign bit copies. */
d4ebfa65
BE
4274 /* As we do not know which address space the pointer is refering to,
4275 we can do this only if the target does not support different pointer
4276 or address modes depending on the address space. */
4277 if (target_default_pointer_address_modes_p ()
4278 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4279 && mode == Pmode && REG_POINTER (x))
2f93eea8
PB
4280 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4281#endif
4282
4283 {
4284 unsigned int copies_for_hook = 1, copies = 1;
55d796da 4285 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
2f93eea8
PB
4286 known_mode, known_ret,
4287 &copies_for_hook);
4288
55d796da
KG
4289 if (new_rtx)
4290 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
2f93eea8
PB
4291 known_mode, known_ret);
4292
4293 if (copies > 1 || copies_for_hook > 1)
4294 return MAX (copies, copies_for_hook);
4295
4296 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4297 }
4298 break;
4299
4300 case MEM:
4301#ifdef LOAD_EXTEND_OP
4302 /* Some RISC machines sign-extend all loads of smaller than a word. */
4303 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4304 return MAX (1, ((int) bitwidth
4305 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4306#endif
4307 break;
4308
4309 case CONST_INT:
4310 /* If the constant is negative, take its 1's complement and remask.
4311 Then see how many zero bits we have. */
4312 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4313 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4314 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4315 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4316
4317 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4318
4319 case SUBREG:
4320 /* If this is a SUBREG for a promoted object that is sign-extended
4321 and we are looking at it in a wider mode, we know that at least the
4322 high-order bits are known to be sign bit copies. */
4323
4324 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4325 {
4326 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4327 known_x, known_mode, known_ret);
4328 return MAX ((int) bitwidth
4329 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4330 num0);
4331 }
4332
4333 /* For a smaller object, just ignore the high bits. */
4334 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4335 {
4336 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4337 known_x, known_mode, known_ret);
4338 return MAX (1, (num0
4339 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4340 - bitwidth)));
4341 }
4342
4343#ifdef WORD_REGISTER_OPERATIONS
4344#ifdef LOAD_EXTEND_OP
4345 /* For paradoxical SUBREGs on machines where all register operations
4346 affect the entire register, just look inside. Note that we are
4347 passing MODE to the recursive call, so the number of sign bit copies
4348 will remain relative to that mode, not the inner mode. */
4349
4350 /* This works only if loads sign extend. Otherwise, if we get a
4351 reload for the inner part, it may be loaded from the stack, and
4352 then we lose all sign bit copies that existed before the store
4353 to the stack. */
4354
4355 if ((GET_MODE_SIZE (GET_MODE (x))
4356 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4357 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4358 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4359 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4360 known_x, known_mode, known_ret);
4361#endif
4362#endif
4363 break;
4364
4365 case SIGN_EXTRACT:
481683e1 4366 if (CONST_INT_P (XEXP (x, 1)))
2f93eea8
PB
4367 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4368 break;
4369
4370 case SIGN_EXTEND:
4371 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4372 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4373 known_x, known_mode, known_ret));
4374
4375 case TRUNCATE:
4376 /* For a smaller object, just ignore the high bits. */
4377 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4378 known_x, known_mode, known_ret);
4379 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4380 - bitwidth)));
4381
4382 case NOT:
4383 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4384 known_x, known_mode, known_ret);
4385
4386 case ROTATE: case ROTATERT:
4387 /* If we are rotating left by a number of bits less than the number
4388 of sign bit copies, we can just subtract that amount from the
4389 number. */
481683e1 4390 if (CONST_INT_P (XEXP (x, 1))
2f93eea8
PB
4391 && INTVAL (XEXP (x, 1)) >= 0
4392 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4393 {
4394 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4395 known_x, known_mode, known_ret);
4396 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4397 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4398 }
4399 break;
4400
4401 case NEG:
4402 /* In general, this subtracts one sign bit copy. But if the value
4403 is known to be positive, the number of sign bit copies is the
4404 same as that of the input. Finally, if the input has just one bit
4405 that might be nonzero, all the bits are copies of the sign bit. */
4406 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4407 known_x, known_mode, known_ret);
4408 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4409 return num0 > 1 ? num0 - 1 : 1;
4410
4411 nonzero = nonzero_bits (XEXP (x, 0), mode);
4412 if (nonzero == 1)
4413 return bitwidth;
4414
4415 if (num0 > 1
4416 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4417 num0--;
4418
4419 return num0;
4420
4421 case IOR: case AND: case XOR:
4422 case SMIN: case SMAX: case UMIN: case UMAX:
4423 /* Logical operations will preserve the number of sign-bit copies.
4424 MIN and MAX operations always return one of the operands. */
4425 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4426 known_x, known_mode, known_ret);
4427 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4428 known_x, known_mode, known_ret);
22761ec3
AN
4429
4430 /* If num1 is clearing some of the top bits then regardless of
4431 the other term, we are guaranteed to have at least that many
4432 high-order zero bits. */
4433 if (code == AND
4434 && num1 > 1
4435 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4436 && CONST_INT_P (XEXP (x, 1))
22761ec3
AN
4437 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4438 return num1;
4439
4440 /* Similarly for IOR when setting high-order bits. */
4441 if (code == IOR
4442 && num1 > 1
4443 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4444 && CONST_INT_P (XEXP (x, 1))
22761ec3
AN
4445 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4446 return num1;
4447
2f93eea8
PB
4448 return MIN (num0, num1);
4449
4450 case PLUS: case MINUS:
4451 /* For addition and subtraction, we can have a 1-bit carry. However,
4452 if we are subtracting 1 from a positive number, there will not
4453 be such a carry. Furthermore, if the positive number is known to
4454 be 0 or 1, we know the result is either -1 or 0. */
4455
4456 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4457 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4458 {
4459 nonzero = nonzero_bits (XEXP (x, 0), mode);
4460 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4461 return (nonzero == 1 || nonzero == 0 ? bitwidth
4462 : bitwidth - floor_log2 (nonzero) - 1);
4463 }
4464
4465 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4466 known_x, known_mode, known_ret);
4467 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4468 known_x, known_mode, known_ret);
4469 result = MAX (1, MIN (num0, num1) - 1);
4470
4471#ifdef POINTERS_EXTEND_UNSIGNED
4472 /* If pointers extend signed and this is an addition or subtraction
4473 to a pointer in Pmode, all the bits above ptr_mode are known to be
4474 sign bit copies. */
d4ebfa65
BE
4475 /* As we do not know which address space the pointer is refering to,
4476 we can do this only if the target does not support different pointer
4477 or address modes depending on the address space. */
4478 if (target_default_pointer_address_modes_p ()
4479 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
2f93eea8 4480 && (code == PLUS || code == MINUS)
f8cfc6aa 4481 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
2f93eea8
PB
4482 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4483 - GET_MODE_BITSIZE (ptr_mode) + 1),
4484 result);
4485#endif
4486 return result;
4487
4488 case MULT:
4489 /* The number of bits of the product is the sum of the number of
4490 bits of both terms. However, unless one of the terms if known
4491 to be positive, we must allow for an additional bit since negating
4492 a negative number can remove one sign bit copy. */
4493
4494 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4495 known_x, known_mode, known_ret);
4496 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4497 known_x, known_mode, known_ret);
4498
4499 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4500 if (result > 0
4501 && (bitwidth > HOST_BITS_PER_WIDE_INT
4502 || (((nonzero_bits (XEXP (x, 0), mode)
4503 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4504 && ((nonzero_bits (XEXP (x, 1), mode)
4505 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4506 result--;
4507
4508 return MAX (1, result);
4509
4510 case UDIV:
4511 /* The result must be <= the first operand. If the first operand
4512 has the high bit set, we know nothing about the number of sign
4513 bit copies. */
4514 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4515 return 1;
4516 else if ((nonzero_bits (XEXP (x, 0), mode)
4517 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4518 return 1;
4519 else
4520 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4521 known_x, known_mode, known_ret);
4522
4523 case UMOD:
24d179b4
JJ
4524 /* The result must be <= the second operand. If the second operand
4525 has (or just might have) the high bit set, we know nothing about
4526 the number of sign bit copies. */
4527 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4528 return 1;
4529 else if ((nonzero_bits (XEXP (x, 1), mode)
4530 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4531 return 1;
4532 else
4533 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
2f93eea8
PB
4534 known_x, known_mode, known_ret);
4535
4536 case DIV:
4537 /* Similar to unsigned division, except that we have to worry about
4538 the case where the divisor is negative, in which case we have
4539 to add 1. */
4540 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4541 known_x, known_mode, known_ret);
4542 if (result > 1
4543 && (bitwidth > HOST_BITS_PER_WIDE_INT
4544 || (nonzero_bits (XEXP (x, 1), mode)
4545 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4546 result--;
4547
4548 return result;
4549
4550 case MOD:
4551 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4552 known_x, known_mode, known_ret);
4553 if (result > 1
4554 && (bitwidth > HOST_BITS_PER_WIDE_INT
4555 || (nonzero_bits (XEXP (x, 1), mode)
4556 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4557 result--;
4558
4559 return result;
4560
4561 case ASHIFTRT:
4562 /* Shifts by a constant add to the number of bits equal to the
4563 sign bit. */
4564 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4565 known_x, known_mode, known_ret);
481683e1 4566 if (CONST_INT_P (XEXP (x, 1))
39b2ac74
JJ
4567 && INTVAL (XEXP (x, 1)) > 0
4568 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
2f93eea8
PB
4569 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4570
4571 return num0;
4572
4573 case ASHIFT:
4574 /* Left shifts destroy copies. */
481683e1 4575 if (!CONST_INT_P (XEXP (x, 1))
2f93eea8 4576 || INTVAL (XEXP (x, 1)) < 0
39b2ac74
JJ
4577 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4578 || INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x)))
2f93eea8
PB
4579 return 1;
4580
4581 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4582 known_x, known_mode, known_ret);
4583 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4584
4585 case IF_THEN_ELSE:
4586 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4587 known_x, known_mode, known_ret);
4588 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4589 known_x, known_mode, known_ret);
4590 return MIN (num0, num1);
4591
4592 case EQ: case NE: case GE: case GT: case LE: case LT:
4593 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4594 case GEU: case GTU: case LEU: case LTU:
4595 case UNORDERED: case ORDERED:
4596 /* If the constant is negative, take its 1's complement and remask.
4597 Then see how many zero bits we have. */
4598 nonzero = STORE_FLAG_VALUE;
4599 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4600 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4601 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4602
4603 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4604
4605 default:
4606 break;
4607 }
4608
4609 /* If we haven't been able to figure it out by one of the above rules,
4610 see if some of the high-order bits are known to be zero. If so,
4611 count those bits and return one less than that amount. If we can't
4612 safely compute the mask for this mode, always return BITWIDTH. */
4613
4614 bitwidth = GET_MODE_BITSIZE (mode);
4615 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4616 return 1;
4617
4618 nonzero = nonzero_bits (x, mode);
4619 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4620 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4621}
6fd21094
RS
4622
4623/* Calculate the rtx_cost of a single instruction. A return value of
4624 zero indicates an instruction pattern without a known cost. */
4625
4626int
f40751dd 4627insn_rtx_cost (rtx pat, bool speed)
6fd21094
RS
4628{
4629 int i, cost;
4630 rtx set;
4631
4632 /* Extract the single set rtx from the instruction pattern.
4633 We can't use single_set since we only have the pattern. */
4634 if (GET_CODE (pat) == SET)
4635 set = pat;
4636 else if (GET_CODE (pat) == PARALLEL)
4637 {
4638 set = NULL_RTX;
4639 for (i = 0; i < XVECLEN (pat, 0); i++)
4640 {
4641 rtx x = XVECEXP (pat, 0, i);
4642 if (GET_CODE (x) == SET)
4643 {
4644 if (set)
4645 return 0;
4646 set = x;
4647 }
4648 }
4649 if (!set)
4650 return 0;
4651 }
4652 else
4653 return 0;
4654
f40751dd 4655 cost = rtx_cost (SET_SRC (set), SET, speed);
6fd21094
RS
4656 return cost > 0 ? cost : COSTS_N_INSNS (1);
4657}
75473b02
SB
4658
4659/* Given an insn INSN and condition COND, return the condition in a
4660 canonical form to simplify testing by callers. Specifically:
4661
4662 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4663 (2) Both operands will be machine operands; (cc0) will have been replaced.
4664 (3) If an operand is a constant, it will be the second operand.
4665 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4666 for GE, GEU, and LEU.
4667
4668 If the condition cannot be understood, or is an inequality floating-point
4669 comparison which needs to be reversed, 0 will be returned.
4670
4671 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4672
4673 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4674 insn used in locating the condition was found. If a replacement test
4675 of the condition is desired, it should be placed in front of that
4676 insn and we will be sure that the inputs are still valid.
4677
4678 If WANT_REG is nonzero, we wish the condition to be relative to that
4679 register, if possible. Therefore, do not canonicalize the condition
b8698a0f 4680 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
75473b02
SB
4681 to be a compare to a CC mode register.
4682
4683 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4684 and at INSN. */
4685
4686rtx
4687canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4688 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4689{
4690 enum rtx_code code;
4691 rtx prev = insn;
f7d504c2 4692 const_rtx set;
75473b02
SB
4693 rtx tem;
4694 rtx op0, op1;
4695 int reverse_code = 0;
4696 enum machine_mode mode;
569f8d98 4697 basic_block bb = BLOCK_FOR_INSN (insn);
75473b02
SB
4698
4699 code = GET_CODE (cond);
4700 mode = GET_MODE (cond);
4701 op0 = XEXP (cond, 0);
4702 op1 = XEXP (cond, 1);
4703
4704 if (reverse)
4705 code = reversed_comparison_code (cond, insn);
4706 if (code == UNKNOWN)
4707 return 0;
4708
4709 if (earliest)
4710 *earliest = insn;
4711
4712 /* If we are comparing a register with zero, see if the register is set
4713 in the previous insn to a COMPARE or a comparison operation. Perform
4714 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4715 in cse.c */
4716
4717 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4718 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4719 && op1 == CONST0_RTX (GET_MODE (op0))
4720 && op0 != want_reg)
4721 {
4722 /* Set nonzero when we find something of interest. */
4723 rtx x = 0;
4724
4725#ifdef HAVE_cc0
4726 /* If comparison with cc0, import actual comparison from compare
4727 insn. */
4728 if (op0 == cc0_rtx)
4729 {
4730 if ((prev = prev_nonnote_insn (prev)) == 0
4731 || !NONJUMP_INSN_P (prev)
4732 || (set = single_set (prev)) == 0
4733 || SET_DEST (set) != cc0_rtx)
4734 return 0;
4735
4736 op0 = SET_SRC (set);
4737 op1 = CONST0_RTX (GET_MODE (op0));
4738 if (earliest)
4739 *earliest = prev;
4740 }
4741#endif
4742
4743 /* If this is a COMPARE, pick up the two things being compared. */
4744 if (GET_CODE (op0) == COMPARE)
4745 {
4746 op1 = XEXP (op0, 1);
4747 op0 = XEXP (op0, 0);
4748 continue;
4749 }
4750 else if (!REG_P (op0))
4751 break;
4752
4753 /* Go back to the previous insn. Stop if it is not an INSN. We also
4754 stop if it isn't a single set or if it has a REG_INC note because
4755 we don't want to bother dealing with it. */
4756
b5b8b0ac
AO
4757 do
4758 prev = prev_nonnote_insn (prev);
4759 while (prev && DEBUG_INSN_P (prev));
4760
4761 if (prev == 0
75473b02 4762 || !NONJUMP_INSN_P (prev)
569f8d98
ZD
4763 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4764 /* In cfglayout mode, there do not have to be labels at the
4765 beginning of a block, or jumps at the end, so the previous
4766 conditions would not stop us when we reach bb boundary. */
4767 || BLOCK_FOR_INSN (prev) != bb)
75473b02
SB
4768 break;
4769
4770 set = set_of (op0, prev);
4771
4772 if (set
4773 && (GET_CODE (set) != SET
4774 || !rtx_equal_p (SET_DEST (set), op0)))
4775 break;
4776
4777 /* If this is setting OP0, get what it sets it to if it looks
4778 relevant. */
4779 if (set)
4780 {
4781 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4782#ifdef FLOAT_STORE_FLAG_VALUE
4783 REAL_VALUE_TYPE fsfv;
4784#endif
4785
4786 /* ??? We may not combine comparisons done in a CCmode with
4787 comparisons not done in a CCmode. This is to aid targets
4788 like Alpha that have an IEEE compliant EQ instruction, and
4789 a non-IEEE compliant BEQ instruction. The use of CCmode is
4790 actually artificial, simply to prevent the combination, but
4791 should not affect other platforms.
4792
4793 However, we must allow VOIDmode comparisons to match either
4794 CCmode or non-CCmode comparison, because some ports have
4795 modeless comparisons inside branch patterns.
4796
4797 ??? This mode check should perhaps look more like the mode check
4798 in simplify_comparison in combine. */
4799
4800 if ((GET_CODE (SET_SRC (set)) == COMPARE
4801 || (((code == NE
4802 || (code == LT
4803 && GET_MODE_CLASS (inner_mode) == MODE_INT
4804 && (GET_MODE_BITSIZE (inner_mode)
4805 <= HOST_BITS_PER_WIDE_INT)
4806 && (STORE_FLAG_VALUE
4807 & ((HOST_WIDE_INT) 1
4808 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4809#ifdef FLOAT_STORE_FLAG_VALUE
4810 || (code == LT
3d8bf70f 4811 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4812 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4813 REAL_VALUE_NEGATIVE (fsfv)))
4814#endif
4815 ))
4816 && COMPARISON_P (SET_SRC (set))))
4817 && (((GET_MODE_CLASS (mode) == MODE_CC)
4818 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4819 || mode == VOIDmode || inner_mode == VOIDmode))
4820 x = SET_SRC (set);
4821 else if (((code == EQ
4822 || (code == GE
4823 && (GET_MODE_BITSIZE (inner_mode)
4824 <= HOST_BITS_PER_WIDE_INT)
4825 && GET_MODE_CLASS (inner_mode) == MODE_INT
4826 && (STORE_FLAG_VALUE
4827 & ((HOST_WIDE_INT) 1
4828 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4829#ifdef FLOAT_STORE_FLAG_VALUE
4830 || (code == GE
3d8bf70f 4831 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
4832 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4833 REAL_VALUE_NEGATIVE (fsfv)))
4834#endif
4835 ))
4836 && COMPARISON_P (SET_SRC (set))
4837 && (((GET_MODE_CLASS (mode) == MODE_CC)
4838 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4839 || mode == VOIDmode || inner_mode == VOIDmode))
4840
4841 {
4842 reverse_code = 1;
4843 x = SET_SRC (set);
4844 }
4845 else
4846 break;
4847 }
4848
4849 else if (reg_set_p (op0, prev))
4850 /* If this sets OP0, but not directly, we have to give up. */
4851 break;
4852
4853 if (x)
4854 {
4855 /* If the caller is expecting the condition to be valid at INSN,
4856 make sure X doesn't change before INSN. */
4857 if (valid_at_insn_p)
4858 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4859 break;
4860 if (COMPARISON_P (x))
4861 code = GET_CODE (x);
4862 if (reverse_code)
4863 {
4864 code = reversed_comparison_code (x, prev);
4865 if (code == UNKNOWN)
4866 return 0;
4867 reverse_code = 0;
4868 }
4869
4870 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4871 if (earliest)
4872 *earliest = prev;
4873 }
4874 }
4875
4876 /* If constant is first, put it last. */
4877 if (CONSTANT_P (op0))
4878 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4879
4880 /* If OP0 is the result of a comparison, we weren't able to find what
4881 was really being compared, so fail. */
4882 if (!allow_cc_mode
4883 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4884 return 0;
4885
4886 /* Canonicalize any ordered comparison with integers involving equality
4887 if we can do computations in the relevant mode and we do not
4888 overflow. */
4889
4890 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
481683e1 4891 && CONST_INT_P (op1)
75473b02
SB
4892 && GET_MODE (op0) != VOIDmode
4893 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4894 {
4895 HOST_WIDE_INT const_val = INTVAL (op1);
4896 unsigned HOST_WIDE_INT uconst_val = const_val;
4897 unsigned HOST_WIDE_INT max_val
4898 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4899
4900 switch (code)
4901 {
4902 case LE:
4903 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4904 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4905 break;
4906
4907 /* When cross-compiling, const_val might be sign-extended from
4908 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4909 case GE:
4910 if ((HOST_WIDE_INT) (const_val & max_val)
4911 != (((HOST_WIDE_INT) 1
4912 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4913 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4914 break;
4915
4916 case LEU:
4917 if (uconst_val < max_val)
4918 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4919 break;
4920
4921 case GEU:
4922 if (uconst_val != 0)
4923 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4924 break;
4925
4926 default:
4927 break;
4928 }
4929 }
4930
4931 /* Never return CC0; return zero instead. */
4932 if (CC0_P (op0))
4933 return 0;
4934
4935 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4936}
4937
4938/* Given a jump insn JUMP, return the condition that will cause it to branch
4939 to its JUMP_LABEL. If the condition cannot be understood, or is an
4940 inequality floating-point comparison which needs to be reversed, 0 will
4941 be returned.
4942
4943 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4944 insn used in locating the condition was found. If a replacement test
4945 of the condition is desired, it should be placed in front of that
4946 insn and we will be sure that the inputs are still valid. If EARLIEST
4947 is null, the returned condition will be valid at INSN.
4948
4949 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4950 compare CC mode register.
4951
4952 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4953
4954rtx
4955get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4956{
4957 rtx cond;
4958 int reverse;
4959 rtx set;
4960
4961 /* If this is not a standard conditional jump, we can't parse it. */
4962 if (!JUMP_P (jump)
4963 || ! any_condjump_p (jump))
4964 return 0;
4965 set = pc_set (jump);
4966
4967 cond = XEXP (SET_SRC (set), 0);
4968
4969 /* If this branches to JUMP_LABEL when the condition is false, reverse
4970 the condition. */
4971 reverse
4972 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4973 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4974
4975 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4976 allow_cc_mode, valid_at_insn_p);
4977}
4978
b12cbf2c
AN
4979/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4980 TARGET_MODE_REP_EXTENDED.
4981
4982 Note that we assume that the property of
4983 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4984 narrower than mode B. I.e., if A is a mode narrower than B then in
4985 order to be able to operate on it in mode B, mode A needs to
4986 satisfy the requirements set by the representation of mode B. */
4987
4988static void
4989init_num_sign_bit_copies_in_rep (void)
4990{
4991 enum machine_mode mode, in_mode;
4992
4993 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4994 in_mode = GET_MODE_WIDER_MODE (mode))
4995 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4996 mode = GET_MODE_WIDER_MODE (mode))
4997 {
4998 enum machine_mode i;
4999
5000 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5001 extends to the next widest mode. */
5002 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5003 || GET_MODE_WIDER_MODE (mode) == in_mode);
5004
5005 /* We are in in_mode. Count how many bits outside of mode
5006 have to be copies of the sign-bit. */
5007 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5008 {
5009 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5010
5011 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5012 /* We can only check sign-bit copies starting from the
5013 top-bit. In order to be able to check the bits we
5014 have already seen we pretend that subsequent bits
5015 have to be sign-bit copies too. */
5016 || num_sign_bit_copies_in_rep [in_mode][mode])
5017 num_sign_bit_copies_in_rep [in_mode][mode]
5018 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
5019 }
5020 }
5021}
5022
d3b72690
PB
5023/* Suppose that truncation from the machine mode of X to MODE is not a
5024 no-op. See if there is anything special about X so that we can
5025 assume it already contains a truncated value of MODE. */
5026
5027bool
fa233e34 5028truncated_to_mode (enum machine_mode mode, const_rtx x)
d3b72690 5029{
b12cbf2c
AN
5030 /* This register has already been used in MODE without explicit
5031 truncation. */
5032 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5033 return true;
5034
5035 /* See if we already satisfy the requirements of MODE. If yes we
5036 can just switch to MODE. */
5037 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5038 && (num_sign_bit_copies (x, GET_MODE (x))
5039 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5040 return true;
d3b72690 5041
b12cbf2c
AN
5042 return false;
5043}
cf94b0fc
PB
5044\f
5045/* Initialize non_rtx_starting_operands, which is used to speed up
5046 for_each_rtx. */
5047void
5048init_rtlanal (void)
5049{
5050 int i;
5051 for (i = 0; i < NUM_RTX_CODE; i++)
5052 {
5053 const char *format = GET_RTX_FORMAT (i);
5054 const char *first = strpbrk (format, "eEV");
5055 non_rtx_starting_operands[i] = first ? first - format : -1;
5056 }
b12cbf2c
AN
5057
5058 init_num_sign_bit_copies_in_rep ();
cf94b0fc 5059}
3d8504ac
RS
5060\f
5061/* Check whether this is a constant pool constant. */
5062bool
5063constant_pool_constant_p (rtx x)
5064{
5065 x = avoid_constant_pool_reference (x);
5066 return GET_CODE (x) == CONST_DOUBLE;
5067}
842e098c
AN
5068\f
5069/* If M is a bitmask that selects a field of low-order bits within an item but
5070 not the entire word, return the length of the field. Return -1 otherwise.
5071 M is used in machine mode MODE. */
5072
5073int
5074low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5075{
5076 if (mode != VOIDmode)
5077 {
5078 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
5079 return -1;
5080 m &= GET_MODE_MASK (mode);
5081 }
5082
5083 return exact_log2 (m + 1);
5084}