]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
af082de3 1/* Analyze RTL for GNU compiler.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
2c88418c 3
1322177d 4This file is part of GCC.
2c88418c 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
2c88418c 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
2c88418c
RS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
2c88418c
RS
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6
ZW
23#include "coretypes.h"
24#include "tm.h"
718f9c0f 25#include "diagnostic-core.h"
3335f1d9 26#include "hard-reg-set.h"
9f02e6a5 27#include "rtl.h"
bc204393
RH
28#include "insn-config.h"
29#include "recog.h"
f894b69b
PB
30#include "target.h"
31#include "output.h"
91ea4f8d 32#include "tm_p.h"
f5eb5fd0 33#include "flags.h"
66fd46b6 34#include "regs.h"
83685514
AM
35#include "hashtab.h"
36#include "hash-set.h"
37#include "vec.h"
38#include "machmode.h"
39#include "input.h"
2f93eea8 40#include "function.h"
60393bbc
AM
41#include "predict.h"
42#include "basic-block.h"
6fb5fa3c 43#include "df.h"
40e23961
MC
44#include "symtab.h"
45#include "wide-int.h"
46#include "inchash.h"
7ffb5e78 47#include "tree.h"
5936d944 48#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
277f65de 49#include "addresses.h"
476dd0ce 50#include "rtl-iter.h"
2c88418c 51
e2373f95 52/* Forward declarations */
7bc980e1 53static void set_of_1 (rtx, const_rtx, void *);
f7d504c2
KG
54static bool covers_regno_p (const_rtx, unsigned int);
55static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
f7d504c2 56static int computed_jump_p_1 (const_rtx);
7bc980e1 57static void parms_set (rtx, const_rtx, void *);
2a1777af 58
ef4bddc2
RS
59static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, machine_mode,
60 const_rtx, machine_mode,
2f93eea8 61 unsigned HOST_WIDE_INT);
ef4bddc2
RS
62static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, machine_mode,
63 const_rtx, machine_mode,
2f93eea8 64 unsigned HOST_WIDE_INT);
ef4bddc2
RS
65static unsigned int cached_num_sign_bit_copies (const_rtx, machine_mode, const_rtx,
66 machine_mode,
2f93eea8 67 unsigned int);
ef4bddc2
RS
68static unsigned int num_sign_bit_copies1 (const_rtx, machine_mode, const_rtx,
69 machine_mode, unsigned int);
2f93eea8 70
476dd0ce
RS
71rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
72rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
73
b12cbf2c
AN
74/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
75 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
76 SIGN_EXTEND then while narrowing we also have to enforce the
77 representation and sign-extend the value to mode DESTINATION_REP.
78
79 If the value is already sign-extended to DESTINATION_REP mode we
80 can just switch to DESTINATION mode on it. For each pair of
81 integral modes SOURCE and DESTINATION, when truncating from SOURCE
82 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
83 contains the number of high-order bits in SOURCE that have to be
84 copies of the sign-bit so that we can do this mode-switch to
85 DESTINATION. */
86
87static unsigned int
88num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
2c88418c 89\f
476dd0ce
RS
90/* Store X into index I of ARRAY. ARRAY is known to have at least I
91 elements. Return the new base of ARRAY. */
92
93template <typename T>
94typename T::value_type *
95generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
96 value_type *base,
97 size_t i, value_type x)
98{
99 if (base == array.stack)
100 {
101 if (i < LOCAL_ELEMS)
102 {
103 base[i] = x;
104 return base;
105 }
106 gcc_checking_assert (i == LOCAL_ELEMS);
107 vec_safe_grow (array.heap, i + 1);
108 base = array.heap->address ();
109 memcpy (base, array.stack, sizeof (array.stack));
110 base[LOCAL_ELEMS] = x;
111 return base;
112 }
113 unsigned int length = array.heap->length ();
114 if (length > i)
115 {
116 gcc_checking_assert (base == array.heap->address ());
117 base[i] = x;
118 return base;
119 }
120 else
121 {
122 gcc_checking_assert (i == length);
123 vec_safe_push (array.heap, x);
124 return array.heap->address ();
125 }
126}
127
128/* Add the subrtxes of X to worklist ARRAY, starting at END. Return the
129 number of elements added to the worklist. */
130
131template <typename T>
132size_t
133generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
134 value_type *base,
135 size_t end, rtx_type x)
136{
641123eb
RS
137 enum rtx_code code = GET_CODE (x);
138 const char *format = GET_RTX_FORMAT (code);
476dd0ce 139 size_t orig_end = end;
641123eb
RS
140 if (__builtin_expect (INSN_P (x), false))
141 {
142 /* Put the pattern at the top of the queue, since that's what
143 we're likely to want most. It also allows for the SEQUENCE
144 code below. */
145 for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
146 if (format[i] == 'e')
147 {
148 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
149 if (__builtin_expect (end < LOCAL_ELEMS, true))
150 base[end++] = subx;
151 else
152 base = add_single_to_queue (array, base, end++, subx);
153 }
154 }
155 else
156 for (int i = 0; format[i]; ++i)
157 if (format[i] == 'e')
158 {
159 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
160 if (__builtin_expect (end < LOCAL_ELEMS, true))
161 base[end++] = subx;
162 else
163 base = add_single_to_queue (array, base, end++, subx);
164 }
165 else if (format[i] == 'E')
166 {
167 unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
168 rtx *vec = x->u.fld[i].rt_rtvec->elem;
169 if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
170 for (unsigned int j = 0; j < length; j++)
171 base[end++] = T::get_value (vec[j]);
172 else
173 for (unsigned int j = 0; j < length; j++)
174 base = add_single_to_queue (array, base, end++,
175 T::get_value (vec[j]));
176 if (code == SEQUENCE && end == length)
177 /* If the subrtxes of the sequence fill the entire array then
178 we know that no other parts of a containing insn are queued.
179 The caller is therefore iterating over the sequence as a
180 PATTERN (...), so we also want the patterns of the
181 subinstructions. */
182 for (unsigned int j = 0; j < length; j++)
183 {
184 typename T::rtx_type x = T::get_rtx (base[j]);
185 if (INSN_P (x))
186 base[j] = T::get_value (PATTERN (x));
187 }
188 }
476dd0ce
RS
189 return end - orig_end;
190}
191
192template <typename T>
193void
194generic_subrtx_iterator <T>::free_array (array_type &array)
195{
196 vec_free (array.heap);
197}
198
199template <typename T>
200const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
201
202template class generic_subrtx_iterator <const_rtx_accessor>;
203template class generic_subrtx_iterator <rtx_var_accessor>;
204template class generic_subrtx_iterator <rtx_ptr_accessor>;
205
2c88418c
RS
206/* Return 1 if the value of X is unstable
207 (would be different at a different point in the program).
208 The frame pointer, arg pointer, etc. are considered stable
209 (within one function) and so is anything marked `unchanging'. */
210
211int
f7d504c2 212rtx_unstable_p (const_rtx x)
2c88418c 213{
f7d504c2 214 const RTX_CODE code = GET_CODE (x);
b3694847
SS
215 int i;
216 const char *fmt;
2c88418c 217
ae0fb1b9
JW
218 switch (code)
219 {
220 case MEM:
389fdba0 221 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
2c88418c 222
ae0fb1b9 223 case CONST:
d8116890 224 CASE_CONST_ANY:
ae0fb1b9
JW
225 case SYMBOL_REF:
226 case LABEL_REF:
227 return 0;
2c88418c 228
ae0fb1b9
JW
229 case REG:
230 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
c0fc376b 231 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9 232 /* The arg pointer varies if it is not a fixed register. */
389fdba0 233 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 234 return 0;
c0fc376b
RH
235 /* ??? When call-clobbered, the value is stable modulo the restore
236 that must happen after a call. This currently screws up local-alloc
237 into believing that the restore is not needed. */
f8fe0a4a 238 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
c0fc376b 239 return 0;
c0fc376b 240 return 1;
ae0fb1b9
JW
241
242 case ASM_OPERANDS:
243 if (MEM_VOLATILE_P (x))
244 return 1;
245
5d3cc252 246 /* Fall through. */
ae0fb1b9
JW
247
248 default:
249 break;
250 }
2c88418c
RS
251
252 fmt = GET_RTX_FORMAT (code);
253 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
254 if (fmt[i] == 'e')
9c82ac6b
JW
255 {
256 if (rtx_unstable_p (XEXP (x, i)))
257 return 1;
258 }
259 else if (fmt[i] == 'E')
260 {
261 int j;
262 for (j = 0; j < XVECLEN (x, i); j++)
263 if (rtx_unstable_p (XVECEXP (x, i, j)))
264 return 1;
265 }
266
2c88418c
RS
267 return 0;
268}
269
270/* Return 1 if X has a value that can vary even between two
271 executions of the program. 0 means X can be compared reliably
272 against certain constants or near-constants.
e38fe8e0
BS
273 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
274 zero, we are slightly more conservative.
2c88418c
RS
275 The frame pointer and the arg pointer are considered constant. */
276
4f588890
KG
277bool
278rtx_varies_p (const_rtx x, bool for_alias)
2c88418c 279{
e978d62e 280 RTX_CODE code;
b3694847
SS
281 int i;
282 const char *fmt;
2c88418c 283
e978d62e
PB
284 if (!x)
285 return 0;
286
287 code = GET_CODE (x);
2c88418c
RS
288 switch (code)
289 {
290 case MEM:
389fdba0 291 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
55efb413 292
2c88418c 293 case CONST:
d8116890 294 CASE_CONST_ANY:
2c88418c
RS
295 case SYMBOL_REF:
296 case LABEL_REF:
297 return 0;
298
299 case REG:
300 /* Note that we have to test for the actual rtx used for the frame
301 and arg pointers and not just the register number in case we have
302 eliminated the frame and/or arg pointer and are using it
303 for pseudos. */
c0fc376b 304 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
3335f1d9
JL
305 /* The arg pointer varies if it is not a fixed register. */
306 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
c0fc376b 307 return 0;
e38fe8e0 308 if (x == pic_offset_table_rtx
e38fe8e0
BS
309 /* ??? When call-clobbered, the value is stable modulo the restore
310 that must happen after a call. This currently screws up
311 local-alloc into believing that the restore is not needed, so we
312 must return 0 only if we are called from alias analysis. */
f8fe0a4a 313 && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
e38fe8e0 314 return 0;
c0fc376b 315 return 1;
2c88418c
RS
316
317 case LO_SUM:
318 /* The operand 0 of a LO_SUM is considered constant
e7d96a83
JW
319 (in fact it is related specifically to operand 1)
320 during alias analysis. */
321 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
322 || rtx_varies_p (XEXP (x, 1), for_alias);
a6a2274a 323
ae0fb1b9
JW
324 case ASM_OPERANDS:
325 if (MEM_VOLATILE_P (x))
326 return 1;
327
5d3cc252 328 /* Fall through. */
ae0fb1b9 329
e9a25f70
JL
330 default:
331 break;
2c88418c
RS
332 }
333
334 fmt = GET_RTX_FORMAT (code);
335 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
336 if (fmt[i] == 'e')
9c82ac6b 337 {
e38fe8e0 338 if (rtx_varies_p (XEXP (x, i), for_alias))
9c82ac6b
JW
339 return 1;
340 }
341 else if (fmt[i] == 'E')
342 {
343 int j;
344 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 345 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
9c82ac6b
JW
346 return 1;
347 }
348
2c88418c
RS
349 return 0;
350}
351
c7e30a96
EB
352/* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
353 bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
354 UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
355 references on strict alignment machines. */
2c88418c 356
2358ff91 357static int
48e8382e 358rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
ef4bddc2 359 machine_mode mode, bool unaligned_mems)
2c88418c 360{
b3694847 361 enum rtx_code code = GET_CODE (x);
2c88418c 362
c7e30a96
EB
363 /* The offset must be a multiple of the mode size if we are considering
364 unaligned memory references on strict alignment machines. */
365 if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0)
48e8382e
PB
366 {
367 HOST_WIDE_INT actual_offset = offset;
c7e30a96 368
48e8382e
PB
369#ifdef SPARC_STACK_BOUNDARY_HACK
370 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
371 the real alignment of %sp. However, when it does this, the
372 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
373 if (SPARC_STACK_BOUNDARY_HACK
374 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
375 actual_offset -= STACK_POINTER_OFFSET;
376#endif
377
65a74b5d
PB
378 if (actual_offset % GET_MODE_SIZE (mode) != 0)
379 return 1;
48e8382e
PB
380 }
381
2c88418c
RS
382 switch (code)
383 {
384 case SYMBOL_REF:
48e8382e
PB
385 if (SYMBOL_REF_WEAK (x))
386 return 1;
387 if (!CONSTANT_POOL_ADDRESS_P (x))
388 {
389 tree decl;
390 HOST_WIDE_INT decl_size;
391
392 if (offset < 0)
393 return 1;
394 if (size == 0)
395 size = GET_MODE_SIZE (mode);
396 if (size == 0)
397 return offset != 0;
398
399 /* If the size of the access or of the symbol is unknown,
400 assume the worst. */
401 decl = SYMBOL_REF_DECL (x);
402
403 /* Else check that the access is in bounds. TODO: restructure
71c00b5c 404 expr_size/tree_expr_size/int_expr_size and just use the latter. */
48e8382e
PB
405 if (!decl)
406 decl_size = -1;
407 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
9541ffee 408 decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl))
9439e9a1 409 ? tree_to_shwi (DECL_SIZE_UNIT (decl))
48e8382e
PB
410 : -1);
411 else if (TREE_CODE (decl) == STRING_CST)
412 decl_size = TREE_STRING_LENGTH (decl);
413 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
414 decl_size = int_size_in_bytes (TREE_TYPE (decl));
415 else
416 decl_size = -1;
417
418 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
419 }
420
421 return 0;
ff0b6b99 422
2c88418c 423 case LABEL_REF:
2c88418c
RS
424 return 0;
425
426 case REG:
c7e30a96
EB
427 /* Stack references are assumed not to trap, but we need to deal with
428 nonsensical offsets. */
429 if (x == frame_pointer_rtx)
430 {
431 HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET;
432 if (size == 0)
433 size = GET_MODE_SIZE (mode);
434 if (FRAME_GROWS_DOWNWARD)
435 {
436 if (adj_offset < frame_offset || adj_offset + size - 1 >= 0)
437 return 1;
438 }
439 else
440 {
441 if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset)
442 return 1;
443 }
444 return 0;
445 }
446 /* ??? Need to add a similar guard for nonsensical offsets. */
447 if (x == hard_frame_pointer_rtx
4f73495e
RH
448 || x == stack_pointer_rtx
449 /* The arg pointer varies if it is not a fixed register. */
450 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
451 return 0;
452 /* All of the virtual frame registers are stack references. */
453 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
454 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
455 return 0;
456 return 1;
2c88418c
RS
457
458 case CONST:
48e8382e
PB
459 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
460 mode, unaligned_mems);
2c88418c
RS
461
462 case PLUS:
2358ff91 463 /* An address is assumed not to trap if:
48e8382e
PB
464 - it is the pic register plus a constant. */
465 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
466 return 0;
467
c7e30a96 468 /* - or it is an address that can't trap plus a constant integer. */
481683e1 469 if (CONST_INT_P (XEXP (x, 1))
48e8382e
PB
470 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
471 size, mode, unaligned_mems))
2358ff91
EB
472 return 0;
473
474 return 1;
2c88418c
RS
475
476 case LO_SUM:
4f73495e 477 case PRE_MODIFY:
48e8382e
PB
478 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
479 mode, unaligned_mems);
4f73495e
RH
480
481 case PRE_DEC:
482 case PRE_INC:
483 case POST_DEC:
484 case POST_INC:
485 case POST_MODIFY:
48e8382e
PB
486 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
487 mode, unaligned_mems);
4f73495e 488
e9a25f70
JL
489 default:
490 break;
2c88418c
RS
491 }
492
493 /* If it isn't one of the case above, it can cause a trap. */
494 return 1;
495}
496
2358ff91
EB
497/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
498
499int
f7d504c2 500rtx_addr_can_trap_p (const_rtx x)
2358ff91 501{
48e8382e 502 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
2358ff91
EB
503}
504
4977bab6
ZW
505/* Return true if X is an address that is known to not be zero. */
506
507bool
f7d504c2 508nonzero_address_p (const_rtx x)
4977bab6 509{
f7d504c2 510 const enum rtx_code code = GET_CODE (x);
4977bab6
ZW
511
512 switch (code)
513 {
514 case SYMBOL_REF:
515 return !SYMBOL_REF_WEAK (x);
516
517 case LABEL_REF:
518 return true;
519
4977bab6
ZW
520 case REG:
521 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
522 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
523 || x == stack_pointer_rtx
524 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
525 return true;
526 /* All of the virtual frame registers are stack references. */
527 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
528 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
529 return true;
530 return false;
531
532 case CONST:
533 return nonzero_address_p (XEXP (x, 0));
534
535 case PLUS:
4977bab6 536 /* Handle PIC references. */
bc2164e8 537 if (XEXP (x, 0) == pic_offset_table_rtx
4977bab6
ZW
538 && CONSTANT_P (XEXP (x, 1)))
539 return true;
540 return false;
541
542 case PRE_MODIFY:
543 /* Similar to the above; allow positive offsets. Further, since
544 auto-inc is only allowed in memories, the register must be a
545 pointer. */
481683e1 546 if (CONST_INT_P (XEXP (x, 1))
4977bab6
ZW
547 && INTVAL (XEXP (x, 1)) > 0)
548 return true;
549 return nonzero_address_p (XEXP (x, 0));
550
551 case PRE_INC:
552 /* Similarly. Further, the offset is always positive. */
553 return true;
554
555 case PRE_DEC:
556 case POST_DEC:
557 case POST_INC:
558 case POST_MODIFY:
559 return nonzero_address_p (XEXP (x, 0));
560
561 case LO_SUM:
562 return nonzero_address_p (XEXP (x, 1));
563
564 default:
565 break;
566 }
567
568 /* If it isn't one of the case above, might be zero. */
569 return false;
570}
571
a6a2274a 572/* Return 1 if X refers to a memory location whose address
2c88418c 573 cannot be compared reliably with constant addresses,
a6a2274a 574 or if X refers to a BLKmode memory object.
e38fe8e0
BS
575 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
576 zero, we are slightly more conservative. */
2c88418c 577
4f588890
KG
578bool
579rtx_addr_varies_p (const_rtx x, bool for_alias)
2c88418c 580{
b3694847
SS
581 enum rtx_code code;
582 int i;
583 const char *fmt;
2c88418c
RS
584
585 if (x == 0)
586 return 0;
587
588 code = GET_CODE (x);
589 if (code == MEM)
e38fe8e0 590 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
2c88418c
RS
591
592 fmt = GET_RTX_FORMAT (code);
593 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
594 if (fmt[i] == 'e')
833c0b26 595 {
e38fe8e0 596 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
833c0b26
RK
597 return 1;
598 }
599 else if (fmt[i] == 'E')
600 {
601 int j;
602 for (j = 0; j < XVECLEN (x, i); j++)
e38fe8e0 603 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
833c0b26
RK
604 return 1;
605 }
2c88418c
RS
606 return 0;
607}
608\f
da4fdf2d
SB
609/* Return the CALL in X if there is one. */
610
611rtx
612get_call_rtx_from (rtx x)
613{
614 if (INSN_P (x))
615 x = PATTERN (x);
616 if (GET_CODE (x) == PARALLEL)
617 x = XVECEXP (x, 0, 0);
618 if (GET_CODE (x) == SET)
619 x = SET_SRC (x);
620 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
621 return x;
622 return NULL_RTX;
623}
624\f
2c88418c
RS
625/* Return the value of the integer term in X, if one is apparent;
626 otherwise return 0.
627 Only obvious integer terms are detected.
3ef42a0c 628 This is used in cse.c with the `related_value' field. */
2c88418c 629
c166a311 630HOST_WIDE_INT
f7d504c2 631get_integer_term (const_rtx x)
2c88418c
RS
632{
633 if (GET_CODE (x) == CONST)
634 x = XEXP (x, 0);
635
636 if (GET_CODE (x) == MINUS
481683e1 637 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
638 return - INTVAL (XEXP (x, 1));
639 if (GET_CODE (x) == PLUS
481683e1 640 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
641 return INTVAL (XEXP (x, 1));
642 return 0;
643}
644
645/* If X is a constant, return the value sans apparent integer term;
646 otherwise return 0.
647 Only obvious integer terms are detected. */
648
649rtx
f7d504c2 650get_related_value (const_rtx x)
2c88418c
RS
651{
652 if (GET_CODE (x) != CONST)
653 return 0;
654 x = XEXP (x, 0);
655 if (GET_CODE (x) == PLUS
481683e1 656 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
657 return XEXP (x, 0);
658 else if (GET_CODE (x) == MINUS
481683e1 659 && CONST_INT_P (XEXP (x, 1)))
2c88418c
RS
660 return XEXP (x, 0);
661 return 0;
662}
663\f
7ffb5e78
RS
664/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
665 to somewhere in the same object or object_block as SYMBOL. */
666
667bool
f7d504c2 668offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
7ffb5e78
RS
669{
670 tree decl;
671
672 if (GET_CODE (symbol) != SYMBOL_REF)
673 return false;
674
675 if (offset == 0)
676 return true;
677
678 if (offset > 0)
679 {
680 if (CONSTANT_POOL_ADDRESS_P (symbol)
681 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
682 return true;
683
684 decl = SYMBOL_REF_DECL (symbol);
685 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
686 return true;
687 }
688
689 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
690 && SYMBOL_REF_BLOCK (symbol)
691 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
692 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
693 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
694 return true;
695
696 return false;
697}
698
699/* Split X into a base and a constant offset, storing them in *BASE_OUT
700 and *OFFSET_OUT respectively. */
701
702void
703split_const (rtx x, rtx *base_out, rtx *offset_out)
704{
705 if (GET_CODE (x) == CONST)
706 {
707 x = XEXP (x, 0);
481683e1 708 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
7ffb5e78
RS
709 {
710 *base_out = XEXP (x, 0);
711 *offset_out = XEXP (x, 1);
712 return;
713 }
714 }
715 *base_out = x;
716 *offset_out = const0_rtx;
717}
718\f
4b983fdc
RH
719/* Return the number of places FIND appears within X. If COUNT_DEST is
720 zero, we do not count occurrences inside the destination of a SET. */
721
722int
f7d504c2 723count_occurrences (const_rtx x, const_rtx find, int count_dest)
4b983fdc
RH
724{
725 int i, j;
726 enum rtx_code code;
727 const char *format_ptr;
728 int count;
729
730 if (x == find)
731 return 1;
732
733 code = GET_CODE (x);
734
735 switch (code)
736 {
737 case REG:
d8116890 738 CASE_CONST_ANY:
4b983fdc
RH
739 case SYMBOL_REF:
740 case CODE_LABEL:
741 case PC:
742 case CC0:
743 return 0;
744
2372a062
BS
745 case EXPR_LIST:
746 count = count_occurrences (XEXP (x, 0), find, count_dest);
747 if (XEXP (x, 1))
748 count += count_occurrences (XEXP (x, 1), find, count_dest);
749 return count;
b8698a0f 750
4b983fdc 751 case MEM:
3c0cb5de 752 if (MEM_P (find) && rtx_equal_p (x, find))
4b983fdc
RH
753 return 1;
754 break;
755
756 case SET:
757 if (SET_DEST (x) == find && ! count_dest)
758 return count_occurrences (SET_SRC (x), find, count_dest);
759 break;
760
761 default:
762 break;
763 }
764
765 format_ptr = GET_RTX_FORMAT (code);
766 count = 0;
767
768 for (i = 0; i < GET_RTX_LENGTH (code); i++)
769 {
770 switch (*format_ptr++)
771 {
772 case 'e':
773 count += count_occurrences (XEXP (x, i), find, count_dest);
774 break;
775
776 case 'E':
777 for (j = 0; j < XVECLEN (x, i); j++)
778 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
779 break;
780 }
781 }
782 return count;
783}
6fb5fa3c 784
7bc14a04
PB
785\f
786/* Return TRUE if OP is a register or subreg of a register that
787 holds an unsigned quantity. Otherwise, return FALSE. */
788
789bool
790unsigned_reg_p (rtx op)
791{
792 if (REG_P (op)
793 && REG_EXPR (op)
794 && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
795 return true;
796
797 if (GET_CODE (op) == SUBREG
362d42dc 798 && SUBREG_PROMOTED_SIGN (op))
7bc14a04
PB
799 return true;
800
801 return false;
802}
803
4b983fdc 804\f
2c88418c
RS
805/* Nonzero if register REG appears somewhere within IN.
806 Also works if REG is not a register; in this case it checks
807 for a subexpression of IN that is Lisp "equal" to REG. */
808
809int
f7d504c2 810reg_mentioned_p (const_rtx reg, const_rtx in)
2c88418c 811{
b3694847
SS
812 const char *fmt;
813 int i;
814 enum rtx_code code;
2c88418c
RS
815
816 if (in == 0)
817 return 0;
818
819 if (reg == in)
820 return 1;
821
822 if (GET_CODE (in) == LABEL_REF)
a827d9b1 823 return reg == LABEL_REF_LABEL (in);
2c88418c
RS
824
825 code = GET_CODE (in);
826
827 switch (code)
828 {
829 /* Compare registers by number. */
830 case REG:
f8cfc6aa 831 return REG_P (reg) && REGNO (in) == REGNO (reg);
2c88418c
RS
832
833 /* These codes have no constituent expressions
834 and are unique. */
835 case SCRATCH:
836 case CC0:
837 case PC:
838 return 0;
839
d8116890 840 CASE_CONST_ANY:
2c88418c
RS
841 /* These are kept unique for a given value. */
842 return 0;
a6a2274a 843
e9a25f70
JL
844 default:
845 break;
2c88418c
RS
846 }
847
848 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
849 return 1;
850
851 fmt = GET_RTX_FORMAT (code);
852
853 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
854 {
855 if (fmt[i] == 'E')
856 {
b3694847 857 int j;
2c88418c
RS
858 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
859 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
860 return 1;
861 }
862 else if (fmt[i] == 'e'
863 && reg_mentioned_p (reg, XEXP (in, i)))
864 return 1;
865 }
866 return 0;
867}
868\f
869/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
870 no CODE_LABEL insn. */
871
872int
b32d5189 873no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
2c88418c 874{
b32d5189 875 rtx_insn *p;
978f547f
JH
876 if (beg == end)
877 return 0;
2c88418c 878 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
4b4bf941 879 if (LABEL_P (p))
2c88418c
RS
880 return 0;
881 return 1;
882}
883
884/* Nonzero if register REG is used in an insn between
885 FROM_INSN and TO_INSN (exclusive of those two). */
886
887int
b32d5189
DM
888reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
889 const rtx_insn *to_insn)
2c88418c 890{
1bbbc4a3 891 rtx_insn *insn;
2c88418c
RS
892
893 if (from_insn == to_insn)
894 return 0;
895
896 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
b5b8b0ac 897 if (NONDEBUG_INSN_P (insn)
8f3e7a26 898 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
76dd5923 899 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
2c88418c
RS
900 return 1;
901 return 0;
902}
903\f
904/* Nonzero if the old value of X, a register, is referenced in BODY. If X
905 is entirely replaced by a new value and the only use is as a SET_DEST,
906 we do not consider it a reference. */
907
908int
f7d504c2 909reg_referenced_p (const_rtx x, const_rtx body)
2c88418c
RS
910{
911 int i;
912
913 switch (GET_CODE (body))
914 {
915 case SET:
916 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
917 return 1;
918
919 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
920 of a REG that occupies all of the REG, the insn references X if
921 it is mentioned in the destination. */
922 if (GET_CODE (SET_DEST (body)) != CC0
923 && GET_CODE (SET_DEST (body)) != PC
f8cfc6aa 924 && !REG_P (SET_DEST (body))
2c88418c 925 && ! (GET_CODE (SET_DEST (body)) == SUBREG
f8cfc6aa 926 && REG_P (SUBREG_REG (SET_DEST (body)))
2c88418c
RS
927 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
928 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
929 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
930 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
931 && reg_overlap_mentioned_p (x, SET_DEST (body)))
932 return 1;
e9a25f70 933 return 0;
2c88418c
RS
934
935 case ASM_OPERANDS:
936 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
937 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
938 return 1;
e9a25f70 939 return 0;
2c88418c
RS
940
941 case CALL:
942 case USE:
14a774a9 943 case IF_THEN_ELSE:
2c88418c
RS
944 return reg_overlap_mentioned_p (x, body);
945
946 case TRAP_IF:
947 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
948
21b8482a
JJ
949 case PREFETCH:
950 return reg_overlap_mentioned_p (x, XEXP (body, 0));
951
2ac4fed0
RK
952 case UNSPEC:
953 case UNSPEC_VOLATILE:
2f9fb4c2
R
954 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
955 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
956 return 1;
957 return 0;
958
2c88418c
RS
959 case PARALLEL:
960 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
961 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
962 return 1;
e9a25f70 963 return 0;
a6a2274a 964
0d3ffb5a 965 case CLOBBER:
3c0cb5de 966 if (MEM_P (XEXP (body, 0)))
0d3ffb5a
GK
967 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
968 return 1;
969 return 0;
970
0c99ec5c
RH
971 case COND_EXEC:
972 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
973 return 1;
974 return reg_referenced_p (x, COND_EXEC_CODE (body));
975
e9a25f70
JL
976 default:
977 return 0;
2c88418c 978 }
2c88418c 979}
2c88418c
RS
980\f
981/* Nonzero if register REG is set or clobbered in an insn between
982 FROM_INSN and TO_INSN (exclusive of those two). */
983
984int
a5d567ec
DM
985reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
986 const rtx_insn *to_insn)
2c88418c 987{
1bbbc4a3 988 const rtx_insn *insn;
2c88418c
RS
989
990 if (from_insn == to_insn)
991 return 0;
992
993 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
2c3c49de 994 if (INSN_P (insn) && reg_set_p (reg, insn))
2c88418c
RS
995 return 1;
996 return 0;
997}
998
999/* Internals of reg_set_between_p. */
2c88418c 1000int
ed7a4b4b 1001reg_set_p (const_rtx reg, const_rtx insn)
2c88418c 1002{
2c88418c
RS
1003 /* We can be passed an insn or part of one. If we are passed an insn,
1004 check if a side-effect of the insn clobbers REG. */
4977bab6
ZW
1005 if (INSN_P (insn)
1006 && (FIND_REG_INC_NOTE (insn, reg)
4b4bf941 1007 || (CALL_P (insn)
f8cfc6aa 1008 && ((REG_P (reg)
4f1605d2 1009 && REGNO (reg) < FIRST_PSEUDO_REGISTER
5da20cfe
RS
1010 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
1011 GET_MODE (reg), REGNO (reg)))
3c0cb5de 1012 || MEM_P (reg)
4977bab6
ZW
1013 || find_reg_fusage (insn, CLOBBER, reg)))))
1014 return 1;
2c88418c 1015
91b2d119 1016 return set_of (reg, insn) != NULL_RTX;
2c88418c
RS
1017}
1018
1019/* Similar to reg_set_between_p, but check all registers in X. Return 0
1020 only if none of them are modified between START and END. Return 1 if
fa10beec 1021 X contains a MEM; this routine does use memory aliasing. */
2c88418c
RS
1022
1023int
8f6bce51 1024modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
2c88418c 1025{
9678086d 1026 const enum rtx_code code = GET_CODE (x);
6f7d635c 1027 const char *fmt;
f8163c92 1028 int i, j;
1bbbc4a3 1029 rtx_insn *insn;
7b52eede
JH
1030
1031 if (start == end)
1032 return 0;
2c88418c
RS
1033
1034 switch (code)
1035 {
d8116890 1036 CASE_CONST_ANY:
2c88418c
RS
1037 case CONST:
1038 case SYMBOL_REF:
1039 case LABEL_REF:
1040 return 0;
1041
1042 case PC:
1043 case CC0:
1044 return 1;
1045
1046 case MEM:
7b52eede 1047 if (modified_between_p (XEXP (x, 0), start, end))
2c88418c 1048 return 1;
550b7784
KK
1049 if (MEM_READONLY_P (x))
1050 return 0;
7b52eede
JH
1051 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1052 if (memory_modified_in_insn_p (x, insn))
1053 return 1;
1054 return 0;
2c88418c
RS
1055 break;
1056
1057 case REG:
1058 return reg_set_between_p (x, start, end);
a6a2274a 1059
e9a25f70
JL
1060 default:
1061 break;
2c88418c
RS
1062 }
1063
1064 fmt = GET_RTX_FORMAT (code);
1065 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
f8163c92
RK
1066 {
1067 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1068 return 1;
1069
d4757e6a 1070 else if (fmt[i] == 'E')
f8163c92
RK
1071 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1072 if (modified_between_p (XVECEXP (x, i, j), start, end))
1073 return 1;
1074 }
1075
1076 return 0;
1077}
1078
1079/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1080 of them are modified in INSN. Return 1 if X contains a MEM; this routine
7b52eede 1081 does use memory aliasing. */
f8163c92
RK
1082
1083int
9678086d 1084modified_in_p (const_rtx x, const_rtx insn)
f8163c92 1085{
9678086d 1086 const enum rtx_code code = GET_CODE (x);
6f7d635c 1087 const char *fmt;
f8163c92
RK
1088 int i, j;
1089
1090 switch (code)
1091 {
d8116890 1092 CASE_CONST_ANY:
f8163c92
RK
1093 case CONST:
1094 case SYMBOL_REF:
1095 case LABEL_REF:
1096 return 0;
1097
1098 case PC:
1099 case CC0:
2c88418c
RS
1100 return 1;
1101
f8163c92 1102 case MEM:
7b52eede 1103 if (modified_in_p (XEXP (x, 0), insn))
f8163c92 1104 return 1;
550b7784
KK
1105 if (MEM_READONLY_P (x))
1106 return 0;
7b52eede
JH
1107 if (memory_modified_in_insn_p (x, insn))
1108 return 1;
1109 return 0;
f8163c92
RK
1110 break;
1111
1112 case REG:
1113 return reg_set_p (x, insn);
e9a25f70
JL
1114
1115 default:
1116 break;
f8163c92
RK
1117 }
1118
1119 fmt = GET_RTX_FORMAT (code);
1120 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1121 {
1122 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1123 return 1;
1124
d4757e6a 1125 else if (fmt[i] == 'E')
f8163c92
RK
1126 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1127 if (modified_in_p (XVECEXP (x, i, j), insn))
1128 return 1;
1129 }
1130
2c88418c
RS
1131 return 0;
1132}
1133\f
91b2d119
JH
1134/* Helper function for set_of. */
1135struct set_of_data
1136 {
7bc980e1
KG
1137 const_rtx found;
1138 const_rtx pat;
91b2d119
JH
1139 };
1140
1141static void
7bc980e1 1142set_of_1 (rtx x, const_rtx pat, void *data1)
91b2d119 1143{
7bc980e1
KG
1144 struct set_of_data *const data = (struct set_of_data *) (data1);
1145 if (rtx_equal_p (x, data->pat)
1146 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1147 data->found = pat;
91b2d119
JH
1148}
1149
1150/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
eaec9b3d 1151 (either directly or via STRICT_LOW_PART and similar modifiers). */
7bc980e1
KG
1152const_rtx
1153set_of (const_rtx pat, const_rtx insn)
91b2d119
JH
1154{
1155 struct set_of_data data;
1156 data.found = NULL_RTX;
1157 data.pat = pat;
1158 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1159 return data.found;
1160}
e2724e63 1161
f7d0b0fc
RS
1162/* Add all hard register in X to *PSET. */
1163void
1164find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
1165{
1166 subrtx_iterator::array_type array;
1167 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1168 {
1169 const_rtx x = *iter;
1170 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1171 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1172 }
1173}
1174
e2724e63
BS
1175/* This function, called through note_stores, collects sets and
1176 clobbers of hard registers in a HARD_REG_SET, which is pointed to
1177 by DATA. */
1178void
1179record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1180{
1181 HARD_REG_SET *pset = (HARD_REG_SET *)data;
1182 if (REG_P (x) && HARD_REGISTER_P (x))
1183 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1184}
1185
1186/* Examine INSN, and compute the set of hard registers written by it.
1187 Store it in *PSET. Should only be called after reload. */
1188void
356bf593 1189find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset, bool implicit)
e2724e63
BS
1190{
1191 rtx link;
1192
1193 CLEAR_HARD_REG_SET (*pset);
1194 note_stores (PATTERN (insn), record_hard_reg_sets, pset);
3ee634fd
TV
1195 if (CALL_P (insn))
1196 {
1197 if (implicit)
1198 IOR_HARD_REG_SET (*pset, call_used_reg_set);
1199
1200 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1201 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1202 }
e2724e63
BS
1203 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1204 if (REG_NOTE_KIND (link) == REG_INC)
1205 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1206}
1207
e2724e63
BS
1208/* Like record_hard_reg_sets, but called through note_uses. */
1209void
1210record_hard_reg_uses (rtx *px, void *data)
1211{
f7d0b0fc 1212 find_all_hard_regs (*px, (HARD_REG_SET *) data);
e2724e63 1213}
91b2d119 1214\f
2c88418c
RS
1215/* Given an INSN, return a SET expression if this insn has only a single SET.
1216 It may also have CLOBBERs, USEs, or SET whose output
1217 will not be used, which we ignore. */
1218
1219rtx
e8a54173 1220single_set_2 (const rtx_insn *insn, const_rtx pat)
2c88418c 1221{
c9b89a21
JH
1222 rtx set = NULL;
1223 int set_verified = 1;
2c88418c 1224 int i;
c9b89a21 1225
b1cdafbb 1226 if (GET_CODE (pat) == PARALLEL)
2c88418c 1227 {
c9b89a21 1228 for (i = 0; i < XVECLEN (pat, 0); i++)
b1cdafbb 1229 {
c9b89a21
JH
1230 rtx sub = XVECEXP (pat, 0, i);
1231 switch (GET_CODE (sub))
1232 {
1233 case USE:
1234 case CLOBBER:
1235 break;
1236
1237 case SET:
1238 /* We can consider insns having multiple sets, where all
1239 but one are dead as single set insns. In common case
1240 only single set is present in the pattern so we want
f63d1bf7 1241 to avoid checking for REG_UNUSED notes unless necessary.
c9b89a21
JH
1242
1243 When we reach set first time, we just expect this is
1244 the single set we are looking for and only when more
1245 sets are found in the insn, we check them. */
1246 if (!set_verified)
1247 {
1248 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1249 && !side_effects_p (set))
1250 set = NULL;
1251 else
1252 set_verified = 1;
1253 }
1254 if (!set)
1255 set = sub, set_verified = 0;
1256 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1257 || side_effects_p (sub))
1258 return NULL_RTX;
1259 break;
1260
1261 default:
1262 return NULL_RTX;
1263 }
787ccee0 1264 }
2c88418c 1265 }
c9b89a21 1266 return set;
2c88418c 1267}
941c63ac
JL
1268
1269/* Given an INSN, return nonzero if it has more than one SET, else return
1270 zero. */
1271
5f7d3786 1272int
f7d504c2 1273multiple_sets (const_rtx insn)
941c63ac 1274{
cae8acdd 1275 int found;
941c63ac 1276 int i;
a6a2274a 1277
941c63ac 1278 /* INSN must be an insn. */
2c3c49de 1279 if (! INSN_P (insn))
941c63ac
JL
1280 return 0;
1281
1282 /* Only a PARALLEL can have multiple SETs. */
1283 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1284 {
1285 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1286 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1287 {
1288 /* If we have already found a SET, then return now. */
1289 if (found)
1290 return 1;
1291 else
1292 found = 1;
1293 }
1294 }
a6a2274a 1295
941c63ac
JL
1296 /* Either zero or one SET. */
1297 return 0;
1298}
2c88418c 1299\f
7142e318
JW
1300/* Return nonzero if the destination of SET equals the source
1301 and there are no side effects. */
1302
1303int
f7d504c2 1304set_noop_p (const_rtx set)
7142e318
JW
1305{
1306 rtx src = SET_SRC (set);
1307 rtx dst = SET_DEST (set);
1308
371b8fc0
JH
1309 if (dst == pc_rtx && src == pc_rtx)
1310 return 1;
1311
3c0cb5de 1312 if (MEM_P (dst) && MEM_P (src))
cd648cec
JH
1313 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1314
46d096a3 1315 if (GET_CODE (dst) == ZERO_EXTRACT)
7142e318 1316 return rtx_equal_p (XEXP (dst, 0), src)
cd648cec
JH
1317 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1318 && !side_effects_p (src);
7142e318
JW
1319
1320 if (GET_CODE (dst) == STRICT_LOW_PART)
1321 dst = XEXP (dst, 0);
1322
1323 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1324 {
1325 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1326 return 0;
1327 src = SUBREG_REG (src);
1328 dst = SUBREG_REG (dst);
1329 }
1330
8c895291
TB
1331 /* It is a NOOP if destination overlaps with selected src vector
1332 elements. */
1333 if (GET_CODE (src) == VEC_SELECT
1334 && REG_P (XEXP (src, 0)) && REG_P (dst)
1335 && HARD_REGISTER_P (XEXP (src, 0))
1336 && HARD_REGISTER_P (dst))
1337 {
1338 int i;
1339 rtx par = XEXP (src, 1);
1340 rtx src0 = XEXP (src, 0);
1341 int c0 = INTVAL (XVECEXP (par, 0, 0));
1342 HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1343
1344 for (i = 1; i < XVECLEN (par, 0); i++)
1345 if (INTVAL (XVECEXP (par, 0, i)) != c0 + i)
1346 return 0;
1347 return
1348 simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1349 offset, GET_MODE (dst)) == (int) REGNO (dst);
1350 }
1351
f8cfc6aa 1352 return (REG_P (src) && REG_P (dst)
7142e318
JW
1353 && REGNO (src) == REGNO (dst));
1354}
0005550b
JH
1355\f
1356/* Return nonzero if an insn consists only of SETs, each of which only sets a
1357 value to itself. */
1358
1359int
fa233e34 1360noop_move_p (const_rtx insn)
0005550b
JH
1361{
1362 rtx pat = PATTERN (insn);
1363
b5832b43
JH
1364 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1365 return 1;
1366
0005550b
JH
1367 /* Insns carrying these notes are useful later on. */
1368 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1369 return 0;
1370
8f7e6e33
BC
1371 /* Check the code to be executed for COND_EXEC. */
1372 if (GET_CODE (pat) == COND_EXEC)
1373 pat = COND_EXEC_CODE (pat);
1374
0005550b
JH
1375 if (GET_CODE (pat) == SET && set_noop_p (pat))
1376 return 1;
1377
1378 if (GET_CODE (pat) == PARALLEL)
1379 {
1380 int i;
1381 /* If nothing but SETs of registers to themselves,
1382 this insn can also be deleted. */
1383 for (i = 0; i < XVECLEN (pat, 0); i++)
1384 {
1385 rtx tem = XVECEXP (pat, 0, i);
1386
1387 if (GET_CODE (tem) == USE
1388 || GET_CODE (tem) == CLOBBER)
1389 continue;
1390
1391 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1392 return 0;
1393 }
1394
1395 return 1;
1396 }
1397 return 0;
1398}
1399\f
7142e318 1400
2c88418c
RS
1401/* Return nonzero if register in range [REGNO, ENDREGNO)
1402 appears either explicitly or implicitly in X
1403 other than being stored into.
1404
1405 References contained within the substructure at LOC do not count.
1406 LOC may be zero, meaning don't ignore anything. */
1407
c9bd6bcd 1408bool
f7d504c2 1409refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
0c20a65f 1410 rtx *loc)
2c88418c 1411{
770ae6cc
RK
1412 int i;
1413 unsigned int x_regno;
1414 RTX_CODE code;
1415 const char *fmt;
2c88418c
RS
1416
1417 repeat:
1418 /* The contents of a REG_NONNEG note is always zero, so we must come here
1419 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1420 if (x == 0)
c9bd6bcd 1421 return false;
2c88418c
RS
1422
1423 code = GET_CODE (x);
1424
1425 switch (code)
1426 {
1427 case REG:
770ae6cc 1428 x_regno = REGNO (x);
f8163c92
RK
1429
1430 /* If we modifying the stack, frame, or argument pointer, it will
1431 clobber a virtual register. In fact, we could be more precise,
1432 but it isn't worth it. */
770ae6cc 1433 if ((x_regno == STACK_POINTER_REGNUM
f8163c92 1434#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
770ae6cc 1435 || x_regno == ARG_POINTER_REGNUM
f8163c92 1436#endif
770ae6cc 1437 || x_regno == FRAME_POINTER_REGNUM)
f8163c92 1438 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
c9bd6bcd 1439 return true;
f8163c92 1440
09e18274 1441 return endregno > x_regno && regno < END_REGNO (x);
2c88418c
RS
1442
1443 case SUBREG:
1444 /* If this is a SUBREG of a hard reg, we can see exactly which
1445 registers are being modified. Otherwise, handle normally. */
f8cfc6aa 1446 if (REG_P (SUBREG_REG (x))
2c88418c
RS
1447 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1448 {
ddef6bc7 1449 unsigned int inner_regno = subreg_regno (x);
770ae6cc 1450 unsigned int inner_endregno
403c659c 1451 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
f1f4e530 1452 ? subreg_nregs (x) : 1);
2c88418c
RS
1453
1454 return endregno > inner_regno && regno < inner_endregno;
1455 }
1456 break;
1457
1458 case CLOBBER:
1459 case SET:
1460 if (&SET_DEST (x) != loc
1461 /* Note setting a SUBREG counts as referring to the REG it is in for
1462 a pseudo but not for hard registers since we can
1463 treat each word individually. */
1464 && ((GET_CODE (SET_DEST (x)) == SUBREG
1465 && loc != &SUBREG_REG (SET_DEST (x))
f8cfc6aa 1466 && REG_P (SUBREG_REG (SET_DEST (x)))
2c88418c
RS
1467 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1468 && refers_to_regno_p (regno, endregno,
1469 SUBREG_REG (SET_DEST (x)), loc))
f8cfc6aa 1470 || (!REG_P (SET_DEST (x))
2c88418c 1471 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
c9bd6bcd 1472 return true;
2c88418c
RS
1473
1474 if (code == CLOBBER || loc == &SET_SRC (x))
c9bd6bcd 1475 return false;
2c88418c
RS
1476 x = SET_SRC (x);
1477 goto repeat;
e9a25f70
JL
1478
1479 default:
1480 break;
2c88418c
RS
1481 }
1482
1483 /* X does not match, so try its subexpressions. */
1484
1485 fmt = GET_RTX_FORMAT (code);
1486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1487 {
1488 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1489 {
1490 if (i == 0)
1491 {
1492 x = XEXP (x, 0);
1493 goto repeat;
1494 }
1495 else
1496 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
c9bd6bcd 1497 return true;
2c88418c
RS
1498 }
1499 else if (fmt[i] == 'E')
1500 {
b3694847 1501 int j;
6a87d634 1502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2c88418c
RS
1503 if (loc != &XVECEXP (x, i, j)
1504 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
c9bd6bcd 1505 return true;
2c88418c
RS
1506 }
1507 }
c9bd6bcd 1508 return false;
2c88418c
RS
1509}
1510
1511/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1512 we check if any register number in X conflicts with the relevant register
1513 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1514 contains a MEM (we don't bother checking for memory addresses that can't
1515 conflict because we expect this to be a rare case. */
1516
1517int
f7d504c2 1518reg_overlap_mentioned_p (const_rtx x, const_rtx in)
2c88418c 1519{
770ae6cc 1520 unsigned int regno, endregno;
2c88418c 1521
6f626d1b
PB
1522 /* If either argument is a constant, then modifying X can not
1523 affect IN. Here we look at IN, we can profitably combine
1524 CONSTANT_P (x) with the switch statement below. */
1525 if (CONSTANT_P (in))
b98b49ac 1526 return 0;
0c99ec5c 1527
6f626d1b 1528 recurse:
0c99ec5c 1529 switch (GET_CODE (x))
2c88418c 1530 {
6f626d1b
PB
1531 case STRICT_LOW_PART:
1532 case ZERO_EXTRACT:
1533 case SIGN_EXTRACT:
1534 /* Overly conservative. */
1535 x = XEXP (x, 0);
1536 goto recurse;
1537
0c99ec5c 1538 case SUBREG:
2c88418c
RS
1539 regno = REGNO (SUBREG_REG (x));
1540 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 1541 regno = subreg_regno (x);
f1f4e530
JM
1542 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1543 ? subreg_nregs (x) : 1);
0c99ec5c 1544 goto do_reg;
2c88418c 1545
0c99ec5c
RH
1546 case REG:
1547 regno = REGNO (x);
09e18274 1548 endregno = END_REGNO (x);
f1f4e530 1549 do_reg:
8e2e89f7 1550 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
2c88418c 1551
0c99ec5c
RH
1552 case MEM:
1553 {
1554 const char *fmt;
1555 int i;
2c88418c 1556
3c0cb5de 1557 if (MEM_P (in))
2c88418c
RS
1558 return 1;
1559
0c99ec5c
RH
1560 fmt = GET_RTX_FORMAT (GET_CODE (in));
1561 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
3b009185
RH
1562 if (fmt[i] == 'e')
1563 {
1564 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1565 return 1;
1566 }
1567 else if (fmt[i] == 'E')
1568 {
1569 int j;
1570 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1571 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1572 return 1;
1573 }
c0222c21 1574
0c99ec5c
RH
1575 return 0;
1576 }
1577
1578 case SCRATCH:
1579 case PC:
1580 case CC0:
1581 return reg_mentioned_p (x, in);
1582
1583 case PARALLEL:
37ceff9d 1584 {
90d036a0 1585 int i;
37ceff9d
RH
1586
1587 /* If any register in here refers to it we return true. */
7193d1dc
RK
1588 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1589 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1590 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
6f626d1b 1591 return 1;
7193d1dc 1592 return 0;
37ceff9d 1593 }
2c88418c 1594
0c99ec5c 1595 default:
41374e13 1596 gcc_assert (CONSTANT_P (x));
6f626d1b
PB
1597 return 0;
1598 }
2c88418c
RS
1599}
1600\f
2c88418c 1601/* Call FUN on each register or MEM that is stored into or clobbered by X.
c3a1ef9d
MM
1602 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1603 ignored by note_stores, but passed to FUN.
1604
1605 FUN receives three arguments:
1606 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1607 2. the SET or CLOBBER rtx that does the store,
1608 3. the pointer DATA provided to note_stores.
2c88418c
RS
1609
1610 If the item being stored in or clobbered is a SUBREG of a hard register,
1611 the SUBREG will be passed. */
a6a2274a 1612
2c88418c 1613void
7bc980e1 1614note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
2c88418c 1615{
aa317c97 1616 int i;
90d036a0 1617
aa317c97
KG
1618 if (GET_CODE (x) == COND_EXEC)
1619 x = COND_EXEC_CODE (x);
90d036a0 1620
aa317c97
KG
1621 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1622 {
1623 rtx dest = SET_DEST (x);
1624
1625 while ((GET_CODE (dest) == SUBREG
1626 && (!REG_P (SUBREG_REG (dest))
1627 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1628 || GET_CODE (dest) == ZERO_EXTRACT
1629 || GET_CODE (dest) == STRICT_LOW_PART)
1630 dest = XEXP (dest, 0);
1631
1632 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1633 each of whose first operand is a register. */
1634 if (GET_CODE (dest) == PARALLEL)
1635 {
1636 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1637 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1638 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1639 }
1640 else
1641 (*fun) (dest, x, data);
1642 }
770ae6cc 1643
aa317c97
KG
1644 else if (GET_CODE (x) == PARALLEL)
1645 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1646 note_stores (XVECEXP (x, 0, i), fun, data);
1647}
2c88418c 1648\f
e2373f95
RK
1649/* Like notes_stores, but call FUN for each expression that is being
1650 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1651 FUN for each expression, not any interior subexpressions. FUN receives a
1652 pointer to the expression and the DATA passed to this function.
1653
1654 Note that this is not quite the same test as that done in reg_referenced_p
1655 since that considers something as being referenced if it is being
1656 partially set, while we do not. */
1657
1658void
0c20a65f 1659note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
e2373f95
RK
1660{
1661 rtx body = *pbody;
1662 int i;
1663
1664 switch (GET_CODE (body))
1665 {
1666 case COND_EXEC:
1667 (*fun) (&COND_EXEC_TEST (body), data);
1668 note_uses (&COND_EXEC_CODE (body), fun, data);
1669 return;
1670
1671 case PARALLEL:
1672 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1673 note_uses (&XVECEXP (body, 0, i), fun, data);
1674 return;
1675
bbbc206e
BS
1676 case SEQUENCE:
1677 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1678 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1679 return;
1680
e2373f95
RK
1681 case USE:
1682 (*fun) (&XEXP (body, 0), data);
1683 return;
1684
1685 case ASM_OPERANDS:
1686 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1687 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1688 return;
1689
1690 case TRAP_IF:
1691 (*fun) (&TRAP_CONDITION (body), data);
1692 return;
1693
21b8482a
JJ
1694 case PREFETCH:
1695 (*fun) (&XEXP (body, 0), data);
1696 return;
1697
e2373f95
RK
1698 case UNSPEC:
1699 case UNSPEC_VOLATILE:
1700 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1701 (*fun) (&XVECEXP (body, 0, i), data);
1702 return;
1703
1704 case CLOBBER:
3c0cb5de 1705 if (MEM_P (XEXP (body, 0)))
e2373f95
RK
1706 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1707 return;
1708
1709 case SET:
1710 {
1711 rtx dest = SET_DEST (body);
1712
1713 /* For sets we replace everything in source plus registers in memory
1714 expression in store and operands of a ZERO_EXTRACT. */
1715 (*fun) (&SET_SRC (body), data);
1716
1717 if (GET_CODE (dest) == ZERO_EXTRACT)
1718 {
1719 (*fun) (&XEXP (dest, 1), data);
1720 (*fun) (&XEXP (dest, 2), data);
1721 }
1722
1723 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1724 dest = XEXP (dest, 0);
1725
3c0cb5de 1726 if (MEM_P (dest))
e2373f95
RK
1727 (*fun) (&XEXP (dest, 0), data);
1728 }
1729 return;
1730
1731 default:
1732 /* All the other possibilities never store. */
1733 (*fun) (pbody, data);
1734 return;
1735 }
1736}
1737\f
2c88418c
RS
1738/* Return nonzero if X's old contents don't survive after INSN.
1739 This will be true if X is (cc0) or if X is a register and
1740 X dies in INSN or because INSN entirely sets X.
1741
46d096a3
SB
1742 "Entirely set" means set directly and not through a SUBREG, or
1743 ZERO_EXTRACT, so no trace of the old contents remains.
2c88418c
RS
1744 Likewise, REG_INC does not count.
1745
1746 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1747 but for this use that makes no difference, since regs don't overlap
1748 during their lifetimes. Therefore, this function may be used
6fb5fa3c 1749 at any time after deaths have been computed.
2c88418c
RS
1750
1751 If REG is a hard reg that occupies multiple machine registers, this
1752 function will only return 1 if each of those registers will be replaced
1753 by INSN. */
1754
1755int
f7d504c2 1756dead_or_set_p (const_rtx insn, const_rtx x)
2c88418c 1757{
09e18274 1758 unsigned int regno, end_regno;
770ae6cc 1759 unsigned int i;
2c88418c
RS
1760
1761 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1762 if (GET_CODE (x) == CC0)
1763 return 1;
1764
41374e13 1765 gcc_assert (REG_P (x));
2c88418c
RS
1766
1767 regno = REGNO (x);
09e18274
RS
1768 end_regno = END_REGNO (x);
1769 for (i = regno; i < end_regno; i++)
2c88418c
RS
1770 if (! dead_or_set_regno_p (insn, i))
1771 return 0;
1772
1773 return 1;
1774}
1775
194acded
HPN
1776/* Return TRUE iff DEST is a register or subreg of a register and
1777 doesn't change the number of words of the inner register, and any
1778 part of the register is TEST_REGNO. */
1779
1780static bool
f7d504c2 1781covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
1782{
1783 unsigned int regno, endregno;
1784
1785 if (GET_CODE (dest) == SUBREG
1786 && (((GET_MODE_SIZE (GET_MODE (dest))
1787 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1788 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1789 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1790 dest = SUBREG_REG (dest);
1791
1792 if (!REG_P (dest))
1793 return false;
1794
1795 regno = REGNO (dest);
09e18274 1796 endregno = END_REGNO (dest);
194acded
HPN
1797 return (test_regno >= regno && test_regno < endregno);
1798}
1799
1800/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1801 any member matches the covers_regno_no_parallel_p criteria. */
1802
1803static bool
f7d504c2 1804covers_regno_p (const_rtx dest, unsigned int test_regno)
194acded
HPN
1805{
1806 if (GET_CODE (dest) == PARALLEL)
1807 {
1808 /* Some targets place small structures in registers for return
1809 values of functions, and those registers are wrapped in
1810 PARALLELs that we may see as the destination of a SET. */
1811 int i;
1812
1813 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1814 {
1815 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1816 if (inner != NULL_RTX
1817 && covers_regno_no_parallel_p (inner, test_regno))
1818 return true;
1819 }
1820
1821 return false;
1822 }
1823 else
1824 return covers_regno_no_parallel_p (dest, test_regno);
1825}
1826
6fb5fa3c 1827/* Utility function for dead_or_set_p to check an individual register. */
2c88418c
RS
1828
1829int
f7d504c2 1830dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
2c88418c 1831{
f7d504c2 1832 const_rtx pattern;
2c88418c 1833
0a2287bf
RH
1834 /* See if there is a death note for something that includes TEST_REGNO. */
1835 if (find_regno_note (insn, REG_DEAD, test_regno))
1836 return 1;
2c88418c 1837
4b4bf941 1838 if (CALL_P (insn)
8f3e7a26
RK
1839 && find_regno_fusage (insn, CLOBBER, test_regno))
1840 return 1;
1841
0c99ec5c
RH
1842 pattern = PATTERN (insn);
1843
10439b59 1844 /* If a COND_EXEC is not executed, the value survives. */
0c99ec5c 1845 if (GET_CODE (pattern) == COND_EXEC)
10439b59 1846 return 0;
0c99ec5c
RH
1847
1848 if (GET_CODE (pattern) == SET)
194acded 1849 return covers_regno_p (SET_DEST (pattern), test_regno);
0c99ec5c 1850 else if (GET_CODE (pattern) == PARALLEL)
2c88418c 1851 {
b3694847 1852 int i;
2c88418c 1853
0c99ec5c 1854 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
2c88418c 1855 {
0c99ec5c
RH
1856 rtx body = XVECEXP (pattern, 0, i);
1857
1858 if (GET_CODE (body) == COND_EXEC)
1859 body = COND_EXEC_CODE (body);
2c88418c 1860
194acded
HPN
1861 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1862 && covers_regno_p (SET_DEST (body), test_regno))
1863 return 1;
2c88418c
RS
1864 }
1865 }
1866
1867 return 0;
1868}
1869
1870/* Return the reg-note of kind KIND in insn INSN, if there is one.
1871 If DATUM is nonzero, look for one whose datum is DATUM. */
1872
1873rtx
f7d504c2 1874find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
2c88418c 1875{
b3694847 1876 rtx link;
2c88418c 1877
7a40b8b1 1878 gcc_checking_assert (insn);
af082de3 1879
ae78d276 1880 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1881 if (! INSN_P (insn))
ae78d276 1882 return 0;
cd798543
AP
1883 if (datum == 0)
1884 {
1885 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1886 if (REG_NOTE_KIND (link) == kind)
1887 return link;
1888 return 0;
1889 }
ae78d276 1890
2c88418c 1891 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
cd798543 1892 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
2c88418c
RS
1893 return link;
1894 return 0;
1895}
1896
1897/* Return the reg-note of kind KIND in insn INSN which applies to register
99309f3b
RK
1898 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1899 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1900 it might be the case that the note overlaps REGNO. */
2c88418c
RS
1901
1902rtx
f7d504c2 1903find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
2c88418c 1904{
b3694847 1905 rtx link;
2c88418c 1906
ae78d276 1907 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
2c3c49de 1908 if (! INSN_P (insn))
ae78d276
MM
1909 return 0;
1910
2c88418c
RS
1911 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1912 if (REG_NOTE_KIND (link) == kind
1913 /* Verify that it is a register, so that scratch and MEM won't cause a
1914 problem here. */
f8cfc6aa 1915 && REG_P (XEXP (link, 0))
99309f3b 1916 && REGNO (XEXP (link, 0)) <= regno
09e18274 1917 && END_REGNO (XEXP (link, 0)) > regno)
2c88418c
RS
1918 return link;
1919 return 0;
1920}
8f3e7a26 1921
d9c695ff
RK
1922/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1923 has such a note. */
1924
1925rtx
f7d504c2 1926find_reg_equal_equiv_note (const_rtx insn)
d9c695ff 1927{
cd648cec 1928 rtx link;
d9c695ff 1929
cd648cec 1930 if (!INSN_P (insn))
d9c695ff 1931 return 0;
ea8f106d 1932
cd648cec
JH
1933 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1934 if (REG_NOTE_KIND (link) == REG_EQUAL
1935 || REG_NOTE_KIND (link) == REG_EQUIV)
1936 {
ea8f106d
SB
1937 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1938 insns that have multiple sets. Checking single_set to
1939 make sure of this is not the proper check, as explained
1940 in the comment in set_unique_reg_note.
1941
1942 This should be changed into an assert. */
1943 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
cd648cec
JH
1944 return 0;
1945 return link;
1946 }
1947 return NULL;
d9c695ff
RK
1948}
1949
2a450639
RS
1950/* Check whether INSN is a single_set whose source is known to be
1951 equivalent to a constant. Return that constant if so, otherwise
1952 return null. */
1953
1954rtx
68a1a6c0 1955find_constant_src (const rtx_insn *insn)
2a450639
RS
1956{
1957 rtx note, set, x;
1958
1959 set = single_set (insn);
1960 if (set)
1961 {
1962 x = avoid_constant_pool_reference (SET_SRC (set));
1963 if (CONSTANT_P (x))
1964 return x;
1965 }
1966
1967 note = find_reg_equal_equiv_note (insn);
1968 if (note && CONSTANT_P (XEXP (note, 0)))
1969 return XEXP (note, 0);
1970
1971 return NULL_RTX;
1972}
1973
8f3e7a26
RK
1974/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1975 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1976
1977int
f7d504c2 1978find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
8f3e7a26
RK
1979{
1980 /* If it's not a CALL_INSN, it can't possibly have a
1981 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
4b4bf941 1982 if (!CALL_P (insn))
8f3e7a26
RK
1983 return 0;
1984
41374e13 1985 gcc_assert (datum);
8f3e7a26 1986
f8cfc6aa 1987 if (!REG_P (datum))
8f3e7a26 1988 {
b3694847 1989 rtx link;
8f3e7a26
RK
1990
1991 for (link = CALL_INSN_FUNCTION_USAGE (insn);
a6a2274a 1992 link;
8f3e7a26 1993 link = XEXP (link, 1))
a6a2274a 1994 if (GET_CODE (XEXP (link, 0)) == code
cc863bea 1995 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
a6a2274a 1996 return 1;
8f3e7a26
RK
1997 }
1998 else
1999 {
770ae6cc 2000 unsigned int regno = REGNO (datum);
8f3e7a26
RK
2001
2002 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2003 to pseudo registers, so don't bother checking. */
2004
2005 if (regno < FIRST_PSEUDO_REGISTER)
a6a2274a 2006 {
09e18274 2007 unsigned int end_regno = END_HARD_REGNO (datum);
770ae6cc 2008 unsigned int i;
8f3e7a26
RK
2009
2010 for (i = regno; i < end_regno; i++)
2011 if (find_regno_fusage (insn, code, i))
2012 return 1;
a6a2274a 2013 }
8f3e7a26
RK
2014 }
2015
2016 return 0;
2017}
2018
2019/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2020 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2021
2022int
f7d504c2 2023find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
8f3e7a26 2024{
b3694847 2025 rtx link;
8f3e7a26
RK
2026
2027 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2028 to pseudo registers, so don't bother checking. */
2029
2030 if (regno >= FIRST_PSEUDO_REGISTER
4b4bf941 2031 || !CALL_P (insn) )
8f3e7a26
RK
2032 return 0;
2033
2034 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
83ab3839 2035 {
770ae6cc 2036 rtx op, reg;
83ab3839
RH
2037
2038 if (GET_CODE (op = XEXP (link, 0)) == code
f8cfc6aa 2039 && REG_P (reg = XEXP (op, 0))
09e18274
RS
2040 && REGNO (reg) <= regno
2041 && END_HARD_REGNO (reg) > regno)
83ab3839
RH
2042 return 1;
2043 }
8f3e7a26
RK
2044
2045 return 0;
2046}
a6a063b8 2047
2c88418c 2048\f
e5af9ddd
RS
2049/* Return true if KIND is an integer REG_NOTE. */
2050
2051static bool
2052int_reg_note_p (enum reg_note kind)
2053{
2054 return kind == REG_BR_PROB;
2055}
2056
efc0b2bd
ILT
2057/* Allocate a register note with kind KIND and datum DATUM. LIST is
2058 stored as the pointer to the next register note. */
65c5f2a6 2059
efc0b2bd
ILT
2060rtx
2061alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
65c5f2a6
ILT
2062{
2063 rtx note;
2064
e5af9ddd 2065 gcc_checking_assert (!int_reg_note_p (kind));
65c5f2a6
ILT
2066 switch (kind)
2067 {
2068 case REG_CC_SETTER:
2069 case REG_CC_USER:
2070 case REG_LABEL_TARGET:
2071 case REG_LABEL_OPERAND:
0a35513e 2072 case REG_TM:
65c5f2a6
ILT
2073 /* These types of register notes use an INSN_LIST rather than an
2074 EXPR_LIST, so that copying is done right and dumps look
2075 better. */
efc0b2bd 2076 note = alloc_INSN_LIST (datum, list);
65c5f2a6
ILT
2077 PUT_REG_NOTE_KIND (note, kind);
2078 break;
2079
2080 default:
efc0b2bd 2081 note = alloc_EXPR_LIST (kind, datum, list);
65c5f2a6
ILT
2082 break;
2083 }
2084
efc0b2bd
ILT
2085 return note;
2086}
2087
2088/* Add register note with kind KIND and datum DATUM to INSN. */
2089
2090void
2091add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2092{
2093 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
65c5f2a6
ILT
2094}
2095
e5af9ddd
RS
2096/* Add an integer register note with kind KIND and datum DATUM to INSN. */
2097
2098void
2099add_int_reg_note (rtx insn, enum reg_note kind, int datum)
2100{
2101 gcc_checking_assert (int_reg_note_p (kind));
ef4bddc2 2102 REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
e5af9ddd
RS
2103 datum, REG_NOTES (insn));
2104}
2105
2106/* Add a register note like NOTE to INSN. */
2107
2108void
2109add_shallow_copy_of_reg_note (rtx insn, rtx note)
2110{
2111 if (GET_CODE (note) == INT_LIST)
2112 add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2113 else
2114 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2115}
2116
2c88418c
RS
2117/* Remove register note NOTE from the REG_NOTES of INSN. */
2118
2119void
f7d504c2 2120remove_note (rtx insn, const_rtx note)
2c88418c 2121{
b3694847 2122 rtx link;
2c88418c 2123
49c3bb12
RH
2124 if (note == NULL_RTX)
2125 return;
2126
2c88418c 2127 if (REG_NOTES (insn) == note)
6fb5fa3c
DB
2128 REG_NOTES (insn) = XEXP (note, 1);
2129 else
2130 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2131 if (XEXP (link, 1) == note)
2132 {
2133 XEXP (link, 1) = XEXP (note, 1);
2134 break;
2135 }
2136
2137 switch (REG_NOTE_KIND (note))
2c88418c 2138 {
6fb5fa3c
DB
2139 case REG_EQUAL:
2140 case REG_EQUIV:
b2908ba6 2141 df_notes_rescan (as_a <rtx_insn *> (insn));
6fb5fa3c
DB
2142 break;
2143 default:
2144 break;
2c88418c 2145 }
2c88418c 2146}
55a98783 2147
7cd689bc
SB
2148/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
2149
2150void
2151remove_reg_equal_equiv_notes (rtx insn)
2152{
2153 rtx *loc;
2154
2155 loc = &REG_NOTES (insn);
2156 while (*loc)
2157 {
2158 enum reg_note kind = REG_NOTE_KIND (*loc);
2159 if (kind == REG_EQUAL || kind == REG_EQUIV)
2160 *loc = XEXP (*loc, 1);
2161 else
2162 loc = &XEXP (*loc, 1);
2163 }
2164}
885c9b5d
EB
2165
2166/* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2167
2168void
2169remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2170{
2171 df_ref eq_use;
2172
2173 if (!df)
2174 return;
2175
2176 /* This loop is a little tricky. We cannot just go down the chain because
2177 it is being modified by some actions in the loop. So we just iterate
2178 over the head. We plan to drain the list anyway. */
2179 while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2180 {
1bbbc4a3 2181 rtx_insn *insn = DF_REF_INSN (eq_use);
885c9b5d
EB
2182 rtx note = find_reg_equal_equiv_note (insn);
2183
2184 /* This assert is generally triggered when someone deletes a REG_EQUAL
2185 or REG_EQUIV note by hacking the list manually rather than calling
2186 remove_note. */
2187 gcc_assert (note);
2188
2189 remove_note (insn, note);
2190 }
2191}
7cd689bc 2192
5f0d2358
RK
2193/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2194 return 1 if it is found. A simple equality test is used to determine if
2195 NODE matches. */
2196
2197int
f7d504c2 2198in_expr_list_p (const_rtx listp, const_rtx node)
5f0d2358 2199{
f7d504c2 2200 const_rtx x;
5f0d2358
RK
2201
2202 for (x = listp; x; x = XEXP (x, 1))
2203 if (node == XEXP (x, 0))
2204 return 1;
2205
2206 return 0;
2207}
2208
dd248abd
RK
2209/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2210 remove that entry from the list if it is found.
55a98783 2211
dd248abd 2212 A simple equality test is used to determine if NODE matches. */
55a98783
JL
2213
2214void
2382940b 2215remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp)
55a98783 2216{
2382940b 2217 rtx_expr_list *temp = *listp;
55a98783
JL
2218 rtx prev = NULL_RTX;
2219
2220 while (temp)
2221 {
2382940b 2222 if (node == temp->element ())
55a98783
JL
2223 {
2224 /* Splice the node out of the list. */
2225 if (prev)
2382940b 2226 XEXP (prev, 1) = temp->next ();
55a98783 2227 else
2382940b 2228 *listp = temp->next ();
55a98783
JL
2229
2230 return;
2231 }
dd248abd
RK
2232
2233 prev = temp;
2382940b 2234 temp = temp->next ();
55a98783
JL
2235 }
2236}
b5241a5a
DM
2237
2238/* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
2239 remove that entry from the list if it is found.
2240
2241 A simple equality test is used to determine if NODE matches. */
2242
2243void
2244remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
2245{
2246 rtx_insn_list *temp = *listp;
2247 rtx prev = NULL;
2248
2249 while (temp)
2250 {
2251 if (node == temp->insn ())
2252 {
2253 /* Splice the node out of the list. */
2254 if (prev)
2255 XEXP (prev, 1) = temp->next ();
2256 else
2257 *listp = temp->next ();
2258
2259 return;
2260 }
2261
2262 prev = temp;
2263 temp = temp->next ();
2264 }
2265}
2c88418c 2266\f
2b067faf
RS
2267/* Nonzero if X contains any volatile instructions. These are instructions
2268 which may cause unpredictable machine state instructions, and thus no
adddc347
HPN
2269 instructions or register uses should be moved or combined across them.
2270 This includes only volatile asms and UNSPEC_VOLATILE instructions. */
2b067faf
RS
2271
2272int
f7d504c2 2273volatile_insn_p (const_rtx x)
2b067faf 2274{
f7d504c2 2275 const RTX_CODE code = GET_CODE (x);
2b067faf
RS
2276 switch (code)
2277 {
2278 case LABEL_REF:
2279 case SYMBOL_REF:
2b067faf 2280 case CONST:
d8116890 2281 CASE_CONST_ANY:
2b067faf
RS
2282 case CC0:
2283 case PC:
2284 case REG:
2285 case SCRATCH:
2286 case CLOBBER:
2b067faf
RS
2287 case ADDR_VEC:
2288 case ADDR_DIFF_VEC:
2289 case CALL:
2290 case MEM:
2291 return 0;
2292
2293 case UNSPEC_VOLATILE:
2b067faf
RS
2294 return 1;
2295
4c46ea23 2296 case ASM_INPUT:
2b067faf
RS
2297 case ASM_OPERANDS:
2298 if (MEM_VOLATILE_P (x))
2299 return 1;
e9a25f70
JL
2300
2301 default:
2302 break;
2b067faf
RS
2303 }
2304
2305 /* Recursively scan the operands of this expression. */
2306
2307 {
f7d504c2 2308 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2309 int i;
a6a2274a 2310
2b067faf
RS
2311 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2312 {
2313 if (fmt[i] == 'e')
2314 {
31001f72 2315 if (volatile_insn_p (XEXP (x, i)))
2b067faf
RS
2316 return 1;
2317 }
d4757e6a 2318 else if (fmt[i] == 'E')
2b067faf 2319 {
b3694847 2320 int j;
2b067faf 2321 for (j = 0; j < XVECLEN (x, i); j++)
31001f72 2322 if (volatile_insn_p (XVECEXP (x, i, j)))
2b067faf
RS
2323 return 1;
2324 }
2325 }
2326 }
2327 return 0;
2328}
2329
2c88418c 2330/* Nonzero if X contains any volatile memory references
2ac4fed0 2331 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2c88418c
RS
2332
2333int
f7d504c2 2334volatile_refs_p (const_rtx x)
2c88418c 2335{
f7d504c2 2336 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2337 switch (code)
2338 {
2339 case LABEL_REF:
2340 case SYMBOL_REF:
2c88418c 2341 case CONST:
d8116890 2342 CASE_CONST_ANY:
2c88418c
RS
2343 case CC0:
2344 case PC:
2345 case REG:
2346 case SCRATCH:
2347 case CLOBBER:
2c88418c
RS
2348 case ADDR_VEC:
2349 case ADDR_DIFF_VEC:
2350 return 0;
2351
2ac4fed0 2352 case UNSPEC_VOLATILE:
2c88418c
RS
2353 return 1;
2354
2355 case MEM:
4c46ea23 2356 case ASM_INPUT:
2c88418c
RS
2357 case ASM_OPERANDS:
2358 if (MEM_VOLATILE_P (x))
2359 return 1;
e9a25f70
JL
2360
2361 default:
2362 break;
2c88418c
RS
2363 }
2364
2365 /* Recursively scan the operands of this expression. */
2366
2367 {
f7d504c2 2368 const char *const fmt = GET_RTX_FORMAT (code);
b3694847 2369 int i;
a6a2274a 2370
2c88418c
RS
2371 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2372 {
2373 if (fmt[i] == 'e')
2374 {
2375 if (volatile_refs_p (XEXP (x, i)))
2376 return 1;
2377 }
d4757e6a 2378 else if (fmt[i] == 'E')
2c88418c 2379 {
b3694847 2380 int j;
2c88418c
RS
2381 for (j = 0; j < XVECLEN (x, i); j++)
2382 if (volatile_refs_p (XVECEXP (x, i, j)))
2383 return 1;
2384 }
2385 }
2386 }
2387 return 0;
2388}
2389
2390/* Similar to above, except that it also rejects register pre- and post-
2391 incrementing. */
2392
2393int
f7d504c2 2394side_effects_p (const_rtx x)
2c88418c 2395{
f7d504c2 2396 const RTX_CODE code = GET_CODE (x);
2c88418c
RS
2397 switch (code)
2398 {
2399 case LABEL_REF:
2400 case SYMBOL_REF:
2c88418c 2401 case CONST:
d8116890 2402 CASE_CONST_ANY:
2c88418c
RS
2403 case CC0:
2404 case PC:
2405 case REG:
2406 case SCRATCH:
2c88418c
RS
2407 case ADDR_VEC:
2408 case ADDR_DIFF_VEC:
b5b8b0ac 2409 case VAR_LOCATION:
2c88418c
RS
2410 return 0;
2411
2412 case CLOBBER:
2413 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2414 when some combination can't be done. If we see one, don't think
2415 that we can simplify the expression. */
2416 return (GET_MODE (x) != VOIDmode);
2417
2418 case PRE_INC:
2419 case PRE_DEC:
2420 case POST_INC:
2421 case POST_DEC:
1fb9c5cd
MH
2422 case PRE_MODIFY:
2423 case POST_MODIFY:
2c88418c 2424 case CALL:
2ac4fed0 2425 case UNSPEC_VOLATILE:
2c88418c
RS
2426 return 1;
2427
2428 case MEM:
4c46ea23 2429 case ASM_INPUT:
2c88418c
RS
2430 case ASM_OPERANDS:
2431 if (MEM_VOLATILE_P (x))
2432 return 1;
e9a25f70
JL
2433
2434 default:
2435 break;
2c88418c
RS
2436 }
2437
2438 /* Recursively scan the operands of this expression. */
2439
2440 {
b3694847
SS
2441 const char *fmt = GET_RTX_FORMAT (code);
2442 int i;
a6a2274a 2443
2c88418c
RS
2444 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2445 {
2446 if (fmt[i] == 'e')
2447 {
2448 if (side_effects_p (XEXP (x, i)))
2449 return 1;
2450 }
d4757e6a 2451 else if (fmt[i] == 'E')
2c88418c 2452 {
b3694847 2453 int j;
2c88418c
RS
2454 for (j = 0; j < XVECLEN (x, i); j++)
2455 if (side_effects_p (XVECEXP (x, i, j)))
2456 return 1;
2457 }
2458 }
2459 }
2460 return 0;
2461}
2462\f
e755fcf5 2463/* Return nonzero if evaluating rtx X might cause a trap.
48e8382e
PB
2464 FLAGS controls how to consider MEMs. A nonzero means the context
2465 of the access may have changed from the original, such that the
2466 address may have become invalid. */
2c88418c 2467
215b063c 2468int
f7d504c2 2469may_trap_p_1 (const_rtx x, unsigned flags)
2c88418c
RS
2470{
2471 int i;
2472 enum rtx_code code;
6f7d635c 2473 const char *fmt;
48e8382e
PB
2474
2475 /* We make no distinction currently, but this function is part of
2476 the internal target-hooks ABI so we keep the parameter as
2477 "unsigned flags". */
2478 bool code_changed = flags != 0;
2c88418c
RS
2479
2480 if (x == 0)
2481 return 0;
2482 code = GET_CODE (x);
2483 switch (code)
2484 {
2485 /* Handle these cases quickly. */
d8116890 2486 CASE_CONST_ANY:
2c88418c
RS
2487 case SYMBOL_REF:
2488 case LABEL_REF:
2489 case CONST:
2490 case PC:
2491 case CC0:
2492 case REG:
2493 case SCRATCH:
2494 return 0;
2495
215b063c 2496 case UNSPEC:
215b063c
PB
2497 return targetm.unspec_may_trap_p (x, flags);
2498
c84a808e 2499 case UNSPEC_VOLATILE:
215b063c 2500 case ASM_INPUT:
2c88418c
RS
2501 case TRAP_IF:
2502 return 1;
2503
22aa60a1
RH
2504 case ASM_OPERANDS:
2505 return MEM_VOLATILE_P (x);
2506
2c88418c
RS
2507 /* Memory ref can trap unless it's a static var or a stack slot. */
2508 case MEM:
d809253a
EB
2509 /* Recognize specific pattern of stack checking probes. */
2510 if (flag_stack_check
2511 && MEM_VOLATILE_P (x)
2512 && XEXP (x, 0) == stack_pointer_rtx)
2513 return 1;
e755fcf5 2514 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
48e8382e
PB
2515 reference; moving it out of context such as when moving code
2516 when optimizing, might cause its address to become invalid. */
2517 code_changed
2518 || !MEM_NOTRAP_P (x))
2519 {
f5541398 2520 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
48e8382e
PB
2521 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2522 GET_MODE (x), code_changed);
2523 }
2524
2525 return 0;
2c88418c
RS
2526
2527 /* Division by a non-constant might trap. */
2528 case DIV:
2529 case MOD:
2530 case UDIV:
2531 case UMOD:
3d3dbadd 2532 if (HONOR_SNANS (x))
52bfebf0 2533 return 1;
3d8bf70f 2534 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
f9013075
DE
2535 return flag_trapping_math;
2536 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2c88418c 2537 return 1;
e9a25f70
JL
2538 break;
2539
b278301b
RK
2540 case EXPR_LIST:
2541 /* An EXPR_LIST is used to represent a function call. This
2542 certainly may trap. */
2543 return 1;
e9a25f70 2544
734508ea
JW
2545 case GE:
2546 case GT:
2547 case LE:
2548 case LT:
19aec195 2549 case LTGT:
55143861 2550 case COMPARE:
734508ea 2551 /* Some floating point comparisons may trap. */
f5eb5fd0
JH
2552 if (!flag_trapping_math)
2553 break;
734508ea
JW
2554 /* ??? There is no machine independent way to check for tests that trap
2555 when COMPARE is used, though many targets do make this distinction.
2556 For instance, sparc uses CCFPE for compares which generate exceptions
2557 and CCFP for compares which do not generate exceptions. */
1b457aa4 2558 if (HONOR_NANS (x))
55143861
JJ
2559 return 1;
2560 /* But often the compare has some CC mode, so check operand
2561 modes as well. */
1b457aa4
MG
2562 if (HONOR_NANS (XEXP (x, 0))
2563 || HONOR_NANS (XEXP (x, 1)))
52bfebf0
RS
2564 return 1;
2565 break;
2566
2567 case EQ:
2568 case NE:
3d3dbadd 2569 if (HONOR_SNANS (x))
52bfebf0
RS
2570 return 1;
2571 /* Often comparison is CC mode, so check operand modes. */
3d3dbadd
MG
2572 if (HONOR_SNANS (XEXP (x, 0))
2573 || HONOR_SNANS (XEXP (x, 1)))
55143861
JJ
2574 return 1;
2575 break;
2576
22fd5743
FH
2577 case FIX:
2578 /* Conversion of floating point might trap. */
1b457aa4 2579 if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
22fd5743
FH
2580 return 1;
2581 break;
2582
05cc23e8
RH
2583 case NEG:
2584 case ABS:
e3947b34 2585 case SUBREG:
05cc23e8
RH
2586 /* These operations don't trap even with floating point. */
2587 break;
2588
2c88418c
RS
2589 default:
2590 /* Any floating arithmetic may trap. */
c84a808e 2591 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
2c88418c
RS
2592 return 1;
2593 }
2594
2595 fmt = GET_RTX_FORMAT (code);
2596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2597 {
2598 if (fmt[i] == 'e')
2599 {
e755fcf5 2600 if (may_trap_p_1 (XEXP (x, i), flags))
2c88418c
RS
2601 return 1;
2602 }
2603 else if (fmt[i] == 'E')
2604 {
b3694847 2605 int j;
2c88418c 2606 for (j = 0; j < XVECLEN (x, i); j++)
e755fcf5 2607 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2c88418c
RS
2608 return 1;
2609 }
2610 }
2611 return 0;
2612}
2358ff91
EB
2613
2614/* Return nonzero if evaluating rtx X might cause a trap. */
2615
2616int
f7d504c2 2617may_trap_p (const_rtx x)
2358ff91 2618{
e755fcf5
ZD
2619 return may_trap_p_1 (x, 0);
2620}
2621
c0220ea4 2622/* Same as above, but additionally return nonzero if evaluating rtx X might
2358ff91
EB
2623 cause a fault. We define a fault for the purpose of this function as a
2624 erroneous execution condition that cannot be encountered during the normal
2625 execution of a valid program; the typical example is an unaligned memory
2626 access on a strict alignment machine. The compiler guarantees that it
2627 doesn't generate code that will fault from a valid program, but this
2628 guarantee doesn't mean anything for individual instructions. Consider
2629 the following example:
2630
2631 struct S { int d; union { char *cp; int *ip; }; };
2632
2633 int foo(struct S *s)
2634 {
2635 if (s->d == 1)
2636 return *s->ip;
2637 else
2638 return *s->cp;
2639 }
2640
2641 on a strict alignment machine. In a valid program, foo will never be
2642 invoked on a structure for which d is equal to 1 and the underlying
2643 unique field of the union not aligned on a 4-byte boundary, but the
2644 expression *s->ip might cause a fault if considered individually.
2645
2646 At the RTL level, potentially problematic expressions will almost always
2647 verify may_trap_p; for example, the above dereference can be emitted as
2648 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2649 However, suppose that foo is inlined in a caller that causes s->cp to
2650 point to a local character variable and guarantees that s->d is not set
2651 to 1; foo may have been effectively translated into pseudo-RTL as:
2652
2653 if ((reg:SI) == 1)
2654 (set (reg:SI) (mem:SI (%fp - 7)))
2655 else
2656 (set (reg:QI) (mem:QI (%fp - 7)))
2657
2658 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2659 memory reference to a stack slot, but it will certainly cause a fault
2660 on a strict alignment machine. */
2661
2662int
f7d504c2 2663may_trap_or_fault_p (const_rtx x)
2358ff91 2664{
48e8382e 2665 return may_trap_p_1 (x, 1);
2358ff91 2666}
2c88418c
RS
2667\f
2668/* Return nonzero if X contains a comparison that is not either EQ or NE,
2669 i.e., an inequality. */
2670
2671int
f7d504c2 2672inequality_comparisons_p (const_rtx x)
2c88418c 2673{
b3694847
SS
2674 const char *fmt;
2675 int len, i;
f7d504c2 2676 const enum rtx_code code = GET_CODE (x);
2c88418c
RS
2677
2678 switch (code)
2679 {
2680 case REG:
2681 case SCRATCH:
2682 case PC:
2683 case CC0:
d8116890 2684 CASE_CONST_ANY:
2c88418c
RS
2685 case CONST:
2686 case LABEL_REF:
2687 case SYMBOL_REF:
2688 return 0;
2689
2690 case LT:
2691 case LTU:
2692 case GT:
2693 case GTU:
2694 case LE:
2695 case LEU:
2696 case GE:
2697 case GEU:
2698 return 1;
a6a2274a 2699
e9a25f70
JL
2700 default:
2701 break;
2c88418c
RS
2702 }
2703
2704 len = GET_RTX_LENGTH (code);
2705 fmt = GET_RTX_FORMAT (code);
2706
2707 for (i = 0; i < len; i++)
2708 {
2709 if (fmt[i] == 'e')
2710 {
2711 if (inequality_comparisons_p (XEXP (x, i)))
2712 return 1;
2713 }
2714 else if (fmt[i] == 'E')
2715 {
b3694847 2716 int j;
2c88418c
RS
2717 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2718 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2719 return 1;
2720 }
2721 }
a6a2274a 2722
2c88418c
RS
2723 return 0;
2724}
2725\f
1ed0205e
VM
2726/* Replace any occurrence of FROM in X with TO. The function does
2727 not enter into CONST_DOUBLE for the replace.
2c88418c
RS
2728
2729 Note that copying is not done so X must not be shared unless all copies
2730 are to be modified. */
2731
2732rtx
0c20a65f 2733replace_rtx (rtx x, rtx from, rtx to)
2c88418c 2734{
b3694847
SS
2735 int i, j;
2736 const char *fmt;
2c88418c
RS
2737
2738 if (x == from)
2739 return to;
2740
2741 /* Allow this function to make replacements in EXPR_LISTs. */
2742 if (x == 0)
2743 return 0;
2744
9dd791c8
AO
2745 if (GET_CODE (x) == SUBREG)
2746 {
55d796da 2747 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
9dd791c8 2748
481683e1 2749 if (CONST_INT_P (new_rtx))
9dd791c8 2750 {
55d796da 2751 x = simplify_subreg (GET_MODE (x), new_rtx,
9dd791c8
AO
2752 GET_MODE (SUBREG_REG (x)),
2753 SUBREG_BYTE (x));
41374e13 2754 gcc_assert (x);
9dd791c8
AO
2755 }
2756 else
55d796da 2757 SUBREG_REG (x) = new_rtx;
9dd791c8
AO
2758
2759 return x;
2760 }
2761 else if (GET_CODE (x) == ZERO_EXTEND)
2762 {
55d796da 2763 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
9dd791c8 2764
481683e1 2765 if (CONST_INT_P (new_rtx))
9dd791c8
AO
2766 {
2767 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
55d796da 2768 new_rtx, GET_MODE (XEXP (x, 0)));
41374e13 2769 gcc_assert (x);
9dd791c8
AO
2770 }
2771 else
55d796da 2772 XEXP (x, 0) = new_rtx;
9dd791c8
AO
2773
2774 return x;
2775 }
2776
2c88418c
RS
2777 fmt = GET_RTX_FORMAT (GET_CODE (x));
2778 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2779 {
2780 if (fmt[i] == 'e')
2781 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2782 else if (fmt[i] == 'E')
2783 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2784 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2785 }
2786
2787 return x;
a6a2274a 2788}
2c88418c 2789\f
a2b7026c
RS
2790/* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track
2791 the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */
39811184 2792
a2b7026c
RS
2793void
2794replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
39811184 2795{
a2b7026c
RS
2796 /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */
2797 rtx x = *loc;
2798 if (JUMP_TABLE_DATA_P (x))
4af16369 2799 {
a2b7026c
RS
2800 x = PATTERN (x);
2801 rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
2802 int len = GET_NUM_ELEM (vec);
2803 for (int i = 0; i < len; ++i)
4af16369 2804 {
a2b7026c
RS
2805 rtx ref = RTVEC_ELT (vec, i);
2806 if (XEXP (ref, 0) == old_label)
2807 {
2808 XEXP (ref, 0) = new_label;
2809 if (update_label_nuses)
2810 {
2811 ++LABEL_NUSES (new_label);
2812 --LABEL_NUSES (old_label);
2813 }
2814 }
4af16369 2815 }
a2b7026c 2816 return;
4af16369
JZ
2817 }
2818
39811184 2819 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
a2b7026c 2820 field. This is not handled by the iterator because it doesn't
39811184 2821 handle unprinted ('0') fields. */
a2b7026c
RS
2822 if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
2823 JUMP_LABEL (x) = new_label;
39811184 2824
a2b7026c
RS
2825 subrtx_ptr_iterator::array_type array;
2826 FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
4af16369 2827 {
a2b7026c
RS
2828 rtx *loc = *iter;
2829 if (rtx x = *loc)
4af16369 2830 {
a2b7026c
RS
2831 if (GET_CODE (x) == SYMBOL_REF
2832 && CONSTANT_POOL_ADDRESS_P (x))
2833 {
2834 rtx c = get_pool_constant (x);
2835 if (rtx_referenced_p (old_label, c))
2836 {
2837 /* Create a copy of constant C; replace the label inside
2838 but do not update LABEL_NUSES because uses in constant pool
2839 are not counted. */
2840 rtx new_c = copy_rtx (c);
2841 replace_label (&new_c, old_label, new_label, false);
2842
2843 /* Add the new constant NEW_C to constant pool and replace
2844 the old reference to constant by new reference. */
2845 rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
2846 *loc = replace_rtx (x, x, XEXP (new_mem, 0));
2847 }
2848 }
2849
2850 if ((GET_CODE (x) == LABEL_REF
2851 || GET_CODE (x) == INSN_LIST)
2852 && XEXP (x, 0) == old_label)
2853 {
2854 XEXP (x, 0) = new_label;
2855 if (update_label_nuses)
2856 {
2857 ++LABEL_NUSES (new_label);
2858 --LABEL_NUSES (old_label);
2859 }
2860 }
4af16369 2861 }
4af16369 2862 }
a2b7026c 2863}
39811184 2864
a2b7026c
RS
2865void
2866replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label,
2867 bool update_label_nuses)
2868{
2869 rtx insn_as_rtx = insn;
2870 replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
2871 gcc_checking_assert (insn_as_rtx == insn);
39811184
JZ
2872}
2873
e08cf836 2874/* Return true if X is referenced in BODY. */
39811184 2875
e08cf836
RS
2876bool
2877rtx_referenced_p (const_rtx x, const_rtx body)
39811184 2878{
e08cf836
RS
2879 subrtx_iterator::array_type array;
2880 FOR_EACH_SUBRTX (iter, array, body, ALL)
2881 if (const_rtx y = *iter)
2882 {
2883 /* Check if a label_ref Y refers to label X. */
a827d9b1
DM
2884 if (GET_CODE (y) == LABEL_REF
2885 && LABEL_P (x)
2886 && LABEL_REF_LABEL (y) == x)
e08cf836 2887 return true;
39811184 2888
e08cf836
RS
2889 if (rtx_equal_p (x, y))
2890 return true;
39811184 2891
e08cf836
RS
2892 /* If Y is a reference to pool constant traverse the constant. */
2893 if (GET_CODE (y) == SYMBOL_REF
2894 && CONSTANT_POOL_ADDRESS_P (y))
2895 iter.substitute (get_pool_constant (y));
2896 }
2897 return false;
39811184
JZ
2898}
2899
ee735eef
JZ
2900/* If INSN is a tablejump return true and store the label (before jump table) to
2901 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
39811184
JZ
2902
2903bool
c5241a21 2904tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep)
39811184 2905{
ee735eef
JZ
2906 rtx label, table;
2907
dc0ff1c8
BS
2908 if (!JUMP_P (insn))
2909 return false;
2910
2911 label = JUMP_LABEL (insn);
2912 if (label != NULL_RTX && !ANY_RETURN_P (label)
b32d5189 2913 && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX
481683e1 2914 && JUMP_TABLE_DATA_P (table))
39811184 2915 {
ee735eef
JZ
2916 if (labelp)
2917 *labelp = label;
2918 if (tablep)
8942ee0f 2919 *tablep = as_a <rtx_jump_table_data *> (table);
39811184
JZ
2920 return true;
2921 }
2922 return false;
2923}
2924
fce7e199
RH
2925/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2926 constant that is not in the constant pool and not in the condition
2927 of an IF_THEN_ELSE. */
2a1777af
JL
2928
2929static int
f7d504c2 2930computed_jump_p_1 (const_rtx x)
2a1777af 2931{
f7d504c2 2932 const enum rtx_code code = GET_CODE (x);
2a1777af 2933 int i, j;
6f7d635c 2934 const char *fmt;
2a1777af
JL
2935
2936 switch (code)
2937 {
2a1777af
JL
2938 case LABEL_REF:
2939 case PC:
2940 return 0;
2941
fce7e199 2942 case CONST:
d8116890 2943 CASE_CONST_ANY:
fce7e199 2944 case SYMBOL_REF:
2a1777af
JL
2945 case REG:
2946 return 1;
2947
2948 case MEM:
2949 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2950 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2951
2952 case IF_THEN_ELSE:
fce7e199
RH
2953 return (computed_jump_p_1 (XEXP (x, 1))
2954 || computed_jump_p_1 (XEXP (x, 2)));
1d300e19
KG
2955
2956 default:
2957 break;
2a1777af
JL
2958 }
2959
2960 fmt = GET_RTX_FORMAT (code);
2961 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2962 {
2963 if (fmt[i] == 'e'
fce7e199 2964 && computed_jump_p_1 (XEXP (x, i)))
2a1777af
JL
2965 return 1;
2966
d4757e6a 2967 else if (fmt[i] == 'E')
2a1777af 2968 for (j = 0; j < XVECLEN (x, i); j++)
fce7e199 2969 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2a1777af
JL
2970 return 1;
2971 }
2972
2973 return 0;
2974}
2975
2976/* Return nonzero if INSN is an indirect jump (aka computed jump).
2977
2978 Tablejumps and casesi insns are not considered indirect jumps;
4eb00163 2979 we can recognize them by a (use (label_ref)). */
2a1777af
JL
2980
2981int
f7d504c2 2982computed_jump_p (const_rtx insn)
2a1777af
JL
2983{
2984 int i;
4b4bf941 2985 if (JUMP_P (insn))
2a1777af
JL
2986 {
2987 rtx pat = PATTERN (insn);
2a1777af 2988
cf7c4aa6
HPN
2989 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2990 if (JUMP_LABEL (insn) != NULL)
f759eb8b 2991 return 0;
cf7c4aa6
HPN
2992
2993 if (GET_CODE (pat) == PARALLEL)
2a1777af
JL
2994 {
2995 int len = XVECLEN (pat, 0);
2996 int has_use_labelref = 0;
2997
2998 for (i = len - 1; i >= 0; i--)
2999 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3000 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3001 == LABEL_REF))
c7b3b99f
PCC
3002 {
3003 has_use_labelref = 1;
3004 break;
3005 }
2a1777af
JL
3006
3007 if (! has_use_labelref)
3008 for (i = len - 1; i >= 0; i--)
3009 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3010 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
fce7e199 3011 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2a1777af
JL
3012 return 1;
3013 }
3014 else if (GET_CODE (pat) == SET
3015 && SET_DEST (pat) == pc_rtx
fce7e199 3016 && computed_jump_p_1 (SET_SRC (pat)))
2a1777af
JL
3017 return 1;
3018 }
3019 return 0;
3020}
ccc2d6d0 3021
4deef538
AO
3022\f
3023
8d8e205b
RS
3024/* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of
3025 the equivalent add insn and pass the result to FN, using DATA as the
3026 final argument. */
4deef538
AO
3027
3028static int
8d8e205b 3029for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
4deef538 3030{
8d8e205b 3031 rtx x = XEXP (mem, 0);
4deef538
AO
3032 switch (GET_CODE (x))
3033 {
3034 case PRE_INC:
3035 case POST_INC:
3036 {
8d8e205b 3037 int size = GET_MODE_SIZE (GET_MODE (mem));
4deef538
AO
3038 rtx r1 = XEXP (x, 0);
3039 rtx c = gen_int_mode (size, GET_MODE (r1));
8d8e205b 3040 return fn (mem, x, r1, r1, c, data);
4deef538
AO
3041 }
3042
3043 case PRE_DEC:
3044 case POST_DEC:
3045 {
8d8e205b 3046 int size = GET_MODE_SIZE (GET_MODE (mem));
4deef538
AO
3047 rtx r1 = XEXP (x, 0);
3048 rtx c = gen_int_mode (-size, GET_MODE (r1));
8d8e205b 3049 return fn (mem, x, r1, r1, c, data);
4deef538
AO
3050 }
3051
3052 case PRE_MODIFY:
3053 case POST_MODIFY:
3054 {
3055 rtx r1 = XEXP (x, 0);
3056 rtx add = XEXP (x, 1);
8d8e205b 3057 return fn (mem, x, r1, add, NULL, data);
4deef538
AO
3058 }
3059
3060 default:
8d8e205b 3061 gcc_unreachable ();
4deef538
AO
3062 }
3063}
3064
8d8e205b
RS
3065/* Traverse *LOC looking for MEMs that have autoinc addresses.
3066 For each such autoinc operation found, call FN, passing it
4deef538
AO
3067 the innermost enclosing MEM, the operation itself, the RTX modified
3068 by the operation, two RTXs (the second may be NULL) that, once
3069 added, represent the value to be held by the modified RTX
8d8e205b
RS
3070 afterwards, and DATA. FN is to return 0 to continue the
3071 traversal or any other value to have it returned to the caller of
4deef538
AO
3072 for_each_inc_dec. */
3073
3074int
8d8e205b 3075for_each_inc_dec (rtx x,
4deef538 3076 for_each_inc_dec_fn fn,
8d8e205b 3077 void *data)
4deef538 3078{
8d8e205b
RS
3079 subrtx_var_iterator::array_type array;
3080 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
3081 {
3082 rtx mem = *iter;
3083 if (mem
3084 && MEM_P (mem)
3085 && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
3086 {
3087 int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
3088 if (res != 0)
3089 return res;
3090 iter.skip_subrtxes ();
3091 }
3092 }
3093 return 0;
4deef538
AO
3094}
3095
3096\f
777b1b71
RH
3097/* Searches X for any reference to REGNO, returning the rtx of the
3098 reference found if any. Otherwise, returns NULL_RTX. */
3099
3100rtx
0c20a65f 3101regno_use_in (unsigned int regno, rtx x)
777b1b71 3102{
b3694847 3103 const char *fmt;
777b1b71
RH
3104 int i, j;
3105 rtx tem;
3106
f8cfc6aa 3107 if (REG_P (x) && REGNO (x) == regno)
777b1b71
RH
3108 return x;
3109
3110 fmt = GET_RTX_FORMAT (GET_CODE (x));
3111 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3112 {
3113 if (fmt[i] == 'e')
3114 {
3115 if ((tem = regno_use_in (regno, XEXP (x, i))))
3116 return tem;
3117 }
3118 else if (fmt[i] == 'E')
3119 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3120 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3121 return tem;
3122 }
3123
3124 return NULL_RTX;
3125}
2dfa9a87 3126
e5c56fd9
JH
3127/* Return a value indicating whether OP, an operand of a commutative
3128 operation, is preferred as the first or second operand. The higher
3129 the value, the stronger the preference for being the first operand.
3130 We use negative values to indicate a preference for the first operand
3131 and positive values for the second operand. */
3132
9b3bd424 3133int
0c20a65f 3134commutative_operand_precedence (rtx op)
e5c56fd9 3135{
e3d6e740 3136 enum rtx_code code = GET_CODE (op);
b8698a0f 3137
e5c56fd9 3138 /* Constants always come the second operand. Prefer "nice" constants. */
e3d6e740 3139 if (code == CONST_INT)
7e0b4eae 3140 return -8;
807e902e
KZ
3141 if (code == CONST_WIDE_INT)
3142 return -8;
e3d6e740 3143 if (code == CONST_DOUBLE)
7e0b4eae 3144 return -7;
091a3ac7
CF
3145 if (code == CONST_FIXED)
3146 return -7;
9ce79a7a 3147 op = avoid_constant_pool_reference (op);
79b82df3 3148 code = GET_CODE (op);
ec8e098d
PB
3149
3150 switch (GET_RTX_CLASS (code))
3151 {
3152 case RTX_CONST_OBJ:
3153 if (code == CONST_INT)
7e0b4eae 3154 return -6;
807e902e
KZ
3155 if (code == CONST_WIDE_INT)
3156 return -6;
ec8e098d 3157 if (code == CONST_DOUBLE)
7e0b4eae 3158 return -5;
091a3ac7
CF
3159 if (code == CONST_FIXED)
3160 return -5;
7e0b4eae 3161 return -4;
ec8e098d
PB
3162
3163 case RTX_EXTRA:
3164 /* SUBREGs of objects should come second. */
3165 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
7e0b4eae 3166 return -3;
6fb5fa3c 3167 return 0;
ec8e098d
PB
3168
3169 case RTX_OBJ:
3170 /* Complex expressions should be the first, so decrease priority
7e0b4eae
PB
3171 of objects. Prefer pointer objects over non pointer objects. */
3172 if ((REG_P (op) && REG_POINTER (op))
3173 || (MEM_P (op) && MEM_POINTER (op)))
3174 return -1;
3175 return -2;
ec8e098d
PB
3176
3177 case RTX_COMM_ARITH:
3178 /* Prefer operands that are themselves commutative to be first.
3179 This helps to make things linear. In particular,
3180 (and (and (reg) (reg)) (not (reg))) is canonical. */
3181 return 4;
3182
3183 case RTX_BIN_ARITH:
3184 /* If only one operand is a binary expression, it will be the first
3185 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3186 is canonical, although it will usually be further simplified. */
3187 return 2;
b8698a0f 3188
ec8e098d
PB
3189 case RTX_UNARY:
3190 /* Then prefer NEG and NOT. */
3191 if (code == NEG || code == NOT)
3192 return 1;
e5c56fd9 3193
ec8e098d
PB
3194 default:
3195 return 0;
3196 }
e5c56fd9
JH
3197}
3198
f63d1bf7 3199/* Return 1 iff it is necessary to swap operands of commutative operation
e5c56fd9
JH
3200 in order to canonicalize expression. */
3201
7e0b4eae 3202bool
0c20a65f 3203swap_commutative_operands_p (rtx x, rtx y)
e5c56fd9 3204{
9b3bd424
RH
3205 return (commutative_operand_precedence (x)
3206 < commutative_operand_precedence (y));
e5c56fd9 3207}
2dfa9a87
MH
3208
3209/* Return 1 if X is an autoincrement side effect and the register is
3210 not the stack pointer. */
3211int
f7d504c2 3212auto_inc_p (const_rtx x)
2dfa9a87
MH
3213{
3214 switch (GET_CODE (x))
3215 {
3216 case PRE_INC:
3217 case POST_INC:
3218 case PRE_DEC:
3219 case POST_DEC:
3220 case PRE_MODIFY:
3221 case POST_MODIFY:
3222 /* There are no REG_INC notes for SP. */
3223 if (XEXP (x, 0) != stack_pointer_rtx)
3224 return 1;
3225 default:
3226 break;
3227 }
3228 return 0;
3229}
3b10cf4b 3230
f9da5064 3231/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
db7ba742 3232int
f7d504c2 3233loc_mentioned_in_p (rtx *loc, const_rtx in)
db7ba742 3234{
a52b023a
PB
3235 enum rtx_code code;
3236 const char *fmt;
db7ba742
R
3237 int i, j;
3238
a52b023a
PB
3239 if (!in)
3240 return 0;
3241
3242 code = GET_CODE (in);
3243 fmt = GET_RTX_FORMAT (code);
db7ba742
R
3244 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3245 {
db7ba742
R
3246 if (fmt[i] == 'e')
3247 {
e0651058 3248 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
db7ba742
R
3249 return 1;
3250 }
3251 else if (fmt[i] == 'E')
3252 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
e0651058
AO
3253 if (loc == &XVECEXP (in, i, j)
3254 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
db7ba742
R
3255 return 1;
3256 }
3257 return 0;
3258}
ddef6bc7 3259
bb51e270
RS
3260/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3261 and SUBREG_BYTE, return the bit offset where the subreg begins
3262 (counting from the least significant bit of the operand). */
33aceff2
JW
3263
3264unsigned int
ef4bddc2
RS
3265subreg_lsb_1 (machine_mode outer_mode,
3266 machine_mode inner_mode,
bb51e270 3267 unsigned int subreg_byte)
33aceff2 3268{
33aceff2
JW
3269 unsigned int bitpos;
3270 unsigned int byte;
3271 unsigned int word;
3272
3273 /* A paradoxical subreg begins at bit position 0. */
5511bc5a 3274 if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
33aceff2
JW
3275 return 0;
3276
3277 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3278 /* If the subreg crosses a word boundary ensure that
3279 it also begins and ends on a word boundary. */
41374e13
NS
3280 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3281 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3282 && (subreg_byte % UNITS_PER_WORD
3283 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
33aceff2
JW
3284
3285 if (WORDS_BIG_ENDIAN)
3286 word = (GET_MODE_SIZE (inner_mode)
bb51e270 3287 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
33aceff2 3288 else
bb51e270 3289 word = subreg_byte / UNITS_PER_WORD;
33aceff2
JW
3290 bitpos = word * BITS_PER_WORD;
3291
3292 if (BYTES_BIG_ENDIAN)
3293 byte = (GET_MODE_SIZE (inner_mode)
bb51e270 3294 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
33aceff2 3295 else
bb51e270 3296 byte = subreg_byte % UNITS_PER_WORD;
33aceff2
JW
3297 bitpos += byte * BITS_PER_UNIT;
3298
3299 return bitpos;
3300}
3301
bb51e270
RS
3302/* Given a subreg X, return the bit offset where the subreg begins
3303 (counting from the least significant bit of the reg). */
3304
3305unsigned int
f7d504c2 3306subreg_lsb (const_rtx x)
bb51e270
RS
3307{
3308 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3309 SUBREG_BYTE (x));
3310}
3311
f1f4e530 3312/* Fill in information about a subreg of a hard register.
ddef6bc7
JJ
3313 xregno - A regno of an inner hard subreg_reg (or what will become one).
3314 xmode - The mode of xregno.
3315 offset - The byte offset.
3316 ymode - The mode of a top level SUBREG (or what may become one).
0cb07998
RS
3317 info - Pointer to structure to fill in.
3318
3319 Rather than considering one particular inner register (and thus one
3320 particular "outer" register) in isolation, this function really uses
3321 XREGNO as a model for a sequence of isomorphic hard registers. Thus the
3322 function does not check whether adding INFO->offset to XREGNO gives
3323 a valid hard register; even if INFO->offset + XREGNO is out of range,
3324 there might be another register of the same type that is in range.
3325 Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new
3326 register, since that can depend on things like whether the final
3327 register number is even or odd. Callers that want to check whether
3328 this particular subreg can be replaced by a simple (reg ...) should
3329 use simplify_subreg_regno. */
3330
c619e982 3331void
ef4bddc2
RS
3332subreg_get_info (unsigned int xregno, machine_mode xmode,
3333 unsigned int offset, machine_mode ymode,
f1f4e530 3334 struct subreg_info *info)
04c5580f 3335{
8521c414 3336 int nregs_xmode, nregs_ymode;
04c5580f 3337 int mode_multiple, nregs_multiple;
f1f4e530 3338 int offset_adj, y_offset, y_offset_adj;
8521c414 3339 int regsize_xmode, regsize_ymode;
f1f4e530 3340 bool rknown;
04c5580f 3341
41374e13 3342 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
04c5580f 3343
f1f4e530
JM
3344 rknown = false;
3345
dd79bb7e
GK
3346 /* If there are holes in a non-scalar mode in registers, we expect
3347 that it is made up of its units concatenated together. */
8521c414 3348 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
dd79bb7e 3349 {
ef4bddc2 3350 machine_mode xmode_unit;
8521c414
JM
3351
3352 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3353 if (GET_MODE_INNER (xmode) == VOIDmode)
3354 xmode_unit = xmode;
3355 else
3356 xmode_unit = GET_MODE_INNER (xmode);
3357 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3358 gcc_assert (nregs_xmode
3359 == (GET_MODE_NUNITS (xmode)
3360 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3361 gcc_assert (hard_regno_nregs[xregno][xmode]
3362 == (hard_regno_nregs[xregno][xmode_unit]
3363 * GET_MODE_NUNITS (xmode)));
dd79bb7e
GK
3364
3365 /* You can only ask for a SUBREG of a value with holes in the middle
3366 if you don't cross the holes. (Such a SUBREG should be done by
3367 picking a different register class, or doing it in memory if
3368 necessary.) An example of a value with holes is XCmode on 32-bit
3369 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
b8698a0f 3370 3 for each part, but in memory it's two 128-bit parts.
dd79bb7e
GK
3371 Padding is assumed to be at the end (not necessarily the 'high part')
3372 of each unit. */
b8698a0f 3373 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
8521c414
JM
3374 < GET_MODE_NUNITS (xmode))
3375 && (offset / GET_MODE_SIZE (xmode_unit)
dd79bb7e 3376 != ((offset + GET_MODE_SIZE (ymode) - 1)
8521c414 3377 / GET_MODE_SIZE (xmode_unit))))
f1f4e530
JM
3378 {
3379 info->representable_p = false;
3380 rknown = true;
3381 }
dd79bb7e
GK
3382 }
3383 else
3384 nregs_xmode = hard_regno_nregs[xregno][xmode];
b8698a0f 3385
66fd46b6 3386 nregs_ymode = hard_regno_nregs[xregno][ymode];
04c5580f 3387
dd79bb7e 3388 /* Paradoxical subregs are otherwise valid. */
f1f4e530
JM
3389 if (!rknown
3390 && offset == 0
5511bc5a 3391 && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
f1f4e530
JM
3392 {
3393 info->representable_p = true;
3394 /* If this is a big endian paradoxical subreg, which uses more
3395 actual hard registers than the original register, we must
3396 return a negative offset so that we find the proper highpart
3397 of the register. */
3398 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
c0a6a1ef 3399 ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
f1f4e530
JM
3400 info->offset = nregs_xmode - nregs_ymode;
3401 else
3402 info->offset = 0;
3403 info->nregs = nregs_ymode;
3404 return;
3405 }
04c5580f 3406
8521c414
JM
3407 /* If registers store different numbers of bits in the different
3408 modes, we cannot generally form this subreg. */
f1f4e530 3409 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
5f7fc2b8
JM
3410 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3411 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3412 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
f1f4e530
JM
3413 {
3414 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
f1f4e530 3415 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
f1f4e530
JM
3416 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3417 {
3418 info->representable_p = false;
3419 info->nregs
3420 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3421 info->offset = offset / regsize_xmode;
3422 return;
3423 }
3424 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3425 {
3426 info->representable_p = false;
3427 info->nregs
3428 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3429 info->offset = offset / regsize_xmode;
3430 return;
3431 }
3432 }
8521c414 3433
dd79bb7e 3434 /* Lowpart subregs are otherwise valid. */
f1f4e530
JM
3435 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3436 {
3437 info->representable_p = true;
3438 rknown = true;
a446b4e8
JM
3439
3440 if (offset == 0 || nregs_xmode == nregs_ymode)
3441 {
3442 info->offset = 0;
3443 info->nregs = nregs_ymode;
3444 return;
3445 }
f1f4e530 3446 }
04c5580f 3447
dd79bb7e
GK
3448 /* This should always pass, otherwise we don't know how to verify
3449 the constraint. These conditions may be relaxed but
3450 subreg_regno_offset would need to be redesigned. */
41374e13 3451 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
41374e13 3452 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
04c5580f 3453
c0a6a1ef
BS
3454 if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3455 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3456 {
3457 HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3458 HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3459 HOST_WIDE_INT off_low = offset & (ysize - 1);
3460 HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3461 offset = (xsize - ysize - off_high) | off_low;
3462 }
b20b352b 3463 /* The XMODE value can be seen as a vector of NREGS_XMODE
dcc24678 3464 values. The subreg must represent a lowpart of given field.
04c5580f 3465 Compute what field it is. */
f1f4e530
JM
3466 offset_adj = offset;
3467 offset_adj -= subreg_lowpart_offset (ymode,
3468 mode_for_size (GET_MODE_BITSIZE (xmode)
3469 / nregs_xmode,
3470 MODE_INT, 0));
04c5580f 3471
dd79bb7e 3472 /* Size of ymode must not be greater than the size of xmode. */
04c5580f 3473 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
41374e13 3474 gcc_assert (mode_multiple != 0);
04c5580f
JH
3475
3476 y_offset = offset / GET_MODE_SIZE (ymode);
f1f4e530
JM
3477 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3478 nregs_multiple = nregs_xmode / nregs_ymode;
41374e13 3479
f1f4e530 3480 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
41374e13
NS
3481 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3482
f1f4e530
JM
3483 if (!rknown)
3484 {
3485 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3486 rknown = true;
3487 }
3488 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3489 info->nregs = nregs_ymode;
3490}
3491
3492/* This function returns the regno offset of a subreg expression.
3493 xregno - A regno of an inner hard subreg_reg (or what will become one).
3494 xmode - The mode of xregno.
3495 offset - The byte offset.
3496 ymode - The mode of a top level SUBREG (or what may become one).
3497 RETURN - The regno offset which would be used. */
3498unsigned int
ef4bddc2
RS
3499subreg_regno_offset (unsigned int xregno, machine_mode xmode,
3500 unsigned int offset, machine_mode ymode)
f1f4e530
JM
3501{
3502 struct subreg_info info;
3503 subreg_get_info (xregno, xmode, offset, ymode, &info);
3504 return info.offset;
3505}
3506
3507/* This function returns true when the offset is representable via
3508 subreg_offset in the given regno.
3509 xregno - A regno of an inner hard subreg_reg (or what will become one).
3510 xmode - The mode of xregno.
3511 offset - The byte offset.
3512 ymode - The mode of a top level SUBREG (or what may become one).
3513 RETURN - Whether the offset is representable. */
3514bool
ef4bddc2
RS
3515subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
3516 unsigned int offset, machine_mode ymode)
f1f4e530
JM
3517{
3518 struct subreg_info info;
3519 subreg_get_info (xregno, xmode, offset, ymode, &info);
05cee290 3520 return info.representable_p;
04c5580f
JH
3521}
3522
eef302d2
RS
3523/* Return the number of a YMODE register to which
3524
3525 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3526
3527 can be simplified. Return -1 if the subreg can't be simplified.
3528
3529 XREGNO is a hard register number. */
3530
3531int
ef4bddc2
RS
3532simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
3533 unsigned int offset, machine_mode ymode)
eef302d2
RS
3534{
3535 struct subreg_info info;
3536 unsigned int yregno;
3537
3538#ifdef CANNOT_CHANGE_MODE_CLASS
3539 /* Give the backend a chance to disallow the mode change. */
3540 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3541 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
55a2c322
VM
3542 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)
3543 /* We can use mode change in LRA for some transformations. */
3544 && ! lra_in_progress)
eef302d2
RS
3545 return -1;
3546#endif
3547
3548 /* We shouldn't simplify stack-related registers. */
3549 if ((!reload_completed || frame_pointer_needed)
d4e0d036 3550 && xregno == FRAME_POINTER_REGNUM)
eef302d2
RS
3551 return -1;
3552
3553 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
98072ee5 3554 && xregno == ARG_POINTER_REGNUM)
eef302d2
RS
3555 return -1;
3556
55a2c322
VM
3557 if (xregno == STACK_POINTER_REGNUM
3558 /* We should convert hard stack register in LRA if it is
3559 possible. */
3560 && ! lra_in_progress)
eef302d2
RS
3561 return -1;
3562
3563 /* Try to get the register offset. */
3564 subreg_get_info (xregno, xmode, offset, ymode, &info);
3565 if (!info.representable_p)
3566 return -1;
3567
3568 /* Make sure that the offsetted register value is in range. */
3569 yregno = xregno + info.offset;
3570 if (!HARD_REGISTER_NUM_P (yregno))
3571 return -1;
3572
3573 /* See whether (reg:YMODE YREGNO) is valid.
3574
3575 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
eb93b31f
EB
3576 This is a kludge to work around how complex FP arguments are passed
3577 on IA-64 and should be fixed. See PR target/49226. */
eef302d2
RS
3578 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3579 && HARD_REGNO_MODE_OK (xregno, xmode))
3580 return -1;
3581
3582 return (int) yregno;
3583}
3584
dc297297 3585/* Return the final regno that a subreg expression refers to. */
a6a2274a 3586unsigned int
f7d504c2 3587subreg_regno (const_rtx x)
ddef6bc7
JJ
3588{
3589 unsigned int ret;
3590 rtx subreg = SUBREG_REG (x);
3591 int regno = REGNO (subreg);
3592
a6a2274a
KH
3593 ret = regno + subreg_regno_offset (regno,
3594 GET_MODE (subreg),
ddef6bc7
JJ
3595 SUBREG_BYTE (x),
3596 GET_MODE (x));
3597 return ret;
3598
3599}
f1f4e530
JM
3600
3601/* Return the number of registers that a subreg expression refers
3602 to. */
3603unsigned int
f7d504c2 3604subreg_nregs (const_rtx x)
ba49cb7b
KZ
3605{
3606 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3607}
3608
3609/* Return the number of registers that a subreg REG with REGNO
3610 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3611 changed so that the regno can be passed in. */
3612
3613unsigned int
3614subreg_nregs_with_regno (unsigned int regno, const_rtx x)
f1f4e530
JM
3615{
3616 struct subreg_info info;
3617 rtx subreg = SUBREG_REG (x);
f1f4e530
JM
3618
3619 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3620 &info);
3621 return info.nregs;
3622}
3623
ba49cb7b 3624
833366d6
JH
3625struct parms_set_data
3626{
3627 int nregs;
3628 HARD_REG_SET regs;
3629};
3630
3631/* Helper function for noticing stores to parameter registers. */
3632static void
7bc980e1 3633parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
833366d6 3634{
1634b18f 3635 struct parms_set_data *const d = (struct parms_set_data *) data;
833366d6
JH
3636 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3637 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3638 {
3639 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3640 d->nregs--;
3641 }
3642}
3643
a6a2274a 3644/* Look backward for first parameter to be loaded.
b2df20b4
DJ
3645 Note that loads of all parameters will not necessarily be
3646 found if CSE has eliminated some of them (e.g., an argument
3647 to the outer function is passed down as a parameter).
833366d6 3648 Do not skip BOUNDARY. */
62fc98cc 3649rtx_insn *
9321cf00 3650find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
833366d6
JH
3651{
3652 struct parms_set_data parm;
9321cf00
DM
3653 rtx p;
3654 rtx_insn *before, *first_set;
833366d6
JH
3655
3656 /* Since different machines initialize their parameter registers
3657 in different orders, assume nothing. Collect the set of all
3658 parameter registers. */
3659 CLEAR_HARD_REG_SET (parm.regs);
3660 parm.nregs = 0;
3661 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3662 if (GET_CODE (XEXP (p, 0)) == USE
f8cfc6aa 3663 && REG_P (XEXP (XEXP (p, 0), 0)))
833366d6 3664 {
41374e13 3665 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
833366d6
JH
3666
3667 /* We only care about registers which can hold function
3668 arguments. */
3669 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3670 continue;
3671
3672 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3673 parm.nregs++;
3674 }
3675 before = call_insn;
b2df20b4 3676 first_set = call_insn;
833366d6
JH
3677
3678 /* Search backward for the first set of a register in this set. */
3679 while (parm.nregs && before != boundary)
3680 {
3681 before = PREV_INSN (before);
3682
3683 /* It is possible that some loads got CSEed from one call to
3684 another. Stop in that case. */
4b4bf941 3685 if (CALL_P (before))
833366d6
JH
3686 break;
3687
dbc1a163 3688 /* Our caller needs either ensure that we will find all sets
833366d6 3689 (in case code has not been optimized yet), or take care
eaec9b3d 3690 for possible labels in a way by setting boundary to preceding
833366d6 3691 CODE_LABEL. */
4b4bf941 3692 if (LABEL_P (before))
dbc1a163 3693 {
41374e13 3694 gcc_assert (before == boundary);
dbc1a163
RH
3695 break;
3696 }
833366d6 3697
0d025d43 3698 if (INSN_P (before))
b2df20b4
DJ
3699 {
3700 int nregs_old = parm.nregs;
3701 note_stores (PATTERN (before), parms_set, &parm);
3702 /* If we found something that did not set a parameter reg,
3703 we're done. Do not keep going, as that might result
3704 in hoisting an insn before the setting of a pseudo
3705 that is used by the hoisted insn. */
3706 if (nregs_old != parm.nregs)
3707 first_set = before;
3708 else
3709 break;
3710 }
833366d6 3711 }
9321cf00 3712 return first_set;
833366d6 3713}
3dec4024 3714
14b493d6 3715/* Return true if we should avoid inserting code between INSN and preceding
3dec4024
JH
3716 call instruction. */
3717
3718bool
e4685bc8 3719keep_with_call_p (const rtx_insn *insn)
3dec4024
JH
3720{
3721 rtx set;
3722
3723 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3724 {
f8cfc6aa 3725 if (REG_P (SET_DEST (set))
5df533b3 3726 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3dec4024
JH
3727 && fixed_regs[REGNO (SET_DEST (set))]
3728 && general_operand (SET_SRC (set), VOIDmode))
3729 return true;
f8cfc6aa 3730 if (REG_P (SET_SRC (set))
82f81f18 3731 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
f8cfc6aa 3732 && REG_P (SET_DEST (set))
3dec4024
JH
3733 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3734 return true;
bc204393
RH
3735 /* There may be a stack pop just after the call and before the store
3736 of the return register. Search for the actual store when deciding
3737 if we can break or not. */
3dec4024
JH
3738 if (SET_DEST (set) == stack_pointer_rtx)
3739 {
75547801 3740 /* This CONST_CAST is okay because next_nonnote_insn just
4e9b57fa 3741 returns its argument and we assign it to a const_rtx
75547801 3742 variable. */
e4685bc8
TS
3743 const rtx_insn *i2
3744 = next_nonnote_insn (const_cast<rtx_insn *> (insn));
bc204393 3745 if (i2 && keep_with_call_p (i2))
3dec4024
JH
3746 return true;
3747 }
3748 }
3749 return false;
3750}
71d2c5bd 3751
432f982f
JH
3752/* Return true if LABEL is a target of JUMP_INSN. This applies only
3753 to non-complex jumps. That is, direct unconditional, conditional,
3754 and tablejumps, but not computed jumps or returns. It also does
3755 not apply to the fallthru case of a conditional jump. */
3756
3757bool
c5241a21 3758label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
432f982f
JH
3759{
3760 rtx tmp = JUMP_LABEL (jump_insn);
8942ee0f 3761 rtx_jump_table_data *table;
432f982f
JH
3762
3763 if (label == tmp)
3764 return true;
3765
8942ee0f 3766 if (tablejump_p (jump_insn, NULL, &table))
432f982f 3767 {
95c43227 3768 rtvec vec = table->get_labels ();
432f982f
JH
3769 int i, veclen = GET_NUM_ELEM (vec);
3770
3771 for (i = 0; i < veclen; ++i)
3772 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3773 return true;
3774 }
3775
cb2f563b
HPN
3776 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3777 return true;
3778
432f982f
JH
3779 return false;
3780}
3781
f894b69b
PB
3782\f
3783/* Return an estimate of the cost of computing rtx X.
3784 One use is in cse, to decide which expression to keep in the hash table.
3785 Another is in rtl generation, to pick the cheapest way to multiply.
b8698a0f 3786 Other uses like the latter are expected in the future.
f40751dd 3787
68f932c4
RS
3788 X appears as operand OPNO in an expression with code OUTER_CODE.
3789 SPEED specifies whether costs optimized for speed or size should
f40751dd 3790 be returned. */
f894b69b
PB
3791
3792int
68f932c4 3793rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
f894b69b
PB
3794{
3795 int i, j;
3796 enum rtx_code code;
3797 const char *fmt;
3798 int total;
e098c169 3799 int factor;
f894b69b
PB
3800
3801 if (x == 0)
3802 return 0;
3803
e098c169
HPN
3804 /* A size N times larger than UNITS_PER_WORD likely needs N times as
3805 many insns, taking N times as long. */
3806 factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD;
3807 if (factor == 0)
3808 factor = 1;
3809
f894b69b
PB
3810 /* Compute the default costs of certain things.
3811 Note that targetm.rtx_costs can override the defaults. */
3812
3813 code = GET_CODE (x);
3814 switch (code)
3815 {
3816 case MULT:
e098c169
HPN
3817 /* Multiplication has time-complexity O(N*N), where N is the
3818 number of units (translated from digits) when using
3819 schoolbook long multiplication. */
3820 total = factor * factor * COSTS_N_INSNS (5);
f894b69b
PB
3821 break;
3822 case DIV:
3823 case UDIV:
3824 case MOD:
3825 case UMOD:
e098c169
HPN
3826 /* Similarly, complexity for schoolbook long division. */
3827 total = factor * factor * COSTS_N_INSNS (7);
f894b69b
PB
3828 break;
3829 case USE:
db3edc20 3830 /* Used in combine.c as a marker. */
f894b69b
PB
3831 total = 0;
3832 break;
e098c169
HPN
3833 case SET:
3834 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
3835 the mode for the factor. */
3836 factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD;
3837 if (factor == 0)
3838 factor = 1;
3839 /* Pass through. */
f894b69b 3840 default:
e098c169 3841 total = factor * COSTS_N_INSNS (1);
f894b69b
PB
3842 }
3843
3844 switch (code)
3845 {
3846 case REG:
3847 return 0;
3848
3849 case SUBREG:
edb81165 3850 total = 0;
f894b69b
PB
3851 /* If we can't tie these modes, make this expensive. The larger
3852 the mode, the more expensive it is. */
3853 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
e098c169 3854 return COSTS_N_INSNS (2 + factor);
f894b69b
PB
3855 break;
3856
3857 default:
68f932c4 3858 if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
f894b69b
PB
3859 return total;
3860 break;
3861 }
3862
3863 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3864 which is already in total. */
3865
3866 fmt = GET_RTX_FORMAT (code);
3867 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3868 if (fmt[i] == 'e')
68f932c4 3869 total += rtx_cost (XEXP (x, i), code, i, speed);
f894b69b
PB
3870 else if (fmt[i] == 'E')
3871 for (j = 0; j < XVECLEN (x, i); j++)
68f932c4 3872 total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
f894b69b
PB
3873
3874 return total;
3875}
22939744
BS
3876
3877/* Fill in the structure C with information about both speed and size rtx
68f932c4 3878 costs for X, which is operand OPNO in an expression with code OUTER. */
22939744
BS
3879
3880void
68f932c4
RS
3881get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3882 struct full_rtx_costs *c)
22939744 3883{
68f932c4
RS
3884 c->speed = rtx_cost (x, outer, opno, true);
3885 c->size = rtx_cost (x, outer, opno, false);
22939744
BS
3886}
3887
f894b69b
PB
3888\f
3889/* Return cost of address expression X.
b8698a0f 3890 Expect that X is properly formed address reference.
f40751dd
JH
3891
3892 SPEED parameter specify whether costs optimized for speed or size should
3893 be returned. */
f894b69b
PB
3894
3895int
ef4bddc2 3896address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
f894b69b 3897{
f894b69b
PB
3898 /* We may be asked for cost of various unusual addresses, such as operands
3899 of push instruction. It is not worthwhile to complicate writing
3900 of the target hook by such cases. */
3901
09e881c9 3902 if (!memory_address_addr_space_p (mode, x, as))
f894b69b
PB
3903 return 1000;
3904
b413068c 3905 return targetm.address_cost (x, mode, as, speed);
f894b69b
PB
3906}
3907
3908/* If the target doesn't override, compute the cost as with arithmetic. */
3909
3910int
ef4bddc2 3911default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
f894b69b 3912{
68f932c4 3913 return rtx_cost (x, MEM, 0, speed);
f894b69b 3914}
2f93eea8
PB
3915\f
3916
3917unsigned HOST_WIDE_INT
ef4bddc2 3918nonzero_bits (const_rtx x, machine_mode mode)
2f93eea8
PB
3919{
3920 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3921}
3922
3923unsigned int
ef4bddc2 3924num_sign_bit_copies (const_rtx x, machine_mode mode)
2f93eea8
PB
3925{
3926 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3927}
3928
3929/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3930 It avoids exponential behavior in nonzero_bits1 when X has
3931 identical subexpressions on the first or the second level. */
3932
3933static unsigned HOST_WIDE_INT
ef4bddc2
RS
3934cached_nonzero_bits (const_rtx x, machine_mode mode, const_rtx known_x,
3935 machine_mode known_mode,
2f93eea8
PB
3936 unsigned HOST_WIDE_INT known_ret)
3937{
3938 if (x == known_x && mode == known_mode)
3939 return known_ret;
3940
3941 /* Try to find identical subexpressions. If found call
3942 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3943 precomputed value for the subexpression as KNOWN_RET. */
3944
3945 if (ARITHMETIC_P (x))
3946 {
3947 rtx x0 = XEXP (x, 0);
3948 rtx x1 = XEXP (x, 1);
3949
3950 /* Check the first level. */
3951 if (x0 == x1)
3952 return nonzero_bits1 (x, mode, x0, mode,
3953 cached_nonzero_bits (x0, mode, known_x,
3954 known_mode, known_ret));
3955
3956 /* Check the second level. */
3957 if (ARITHMETIC_P (x0)
3958 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3959 return nonzero_bits1 (x, mode, x1, mode,
3960 cached_nonzero_bits (x1, mode, known_x,
3961 known_mode, known_ret));
3962
3963 if (ARITHMETIC_P (x1)
3964 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3965 return nonzero_bits1 (x, mode, x0, mode,
3966 cached_nonzero_bits (x0, mode, known_x,
3967 known_mode, known_ret));
3968 }
3969
3970 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3971}
3972
3973/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3974 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3975 is less useful. We can't allow both, because that results in exponential
3976 run time recursion. There is a nullstone testcase that triggered
3977 this. This macro avoids accidental uses of num_sign_bit_copies. */
3978#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3979
3980/* Given an expression, X, compute which bits in X can be nonzero.
3981 We don't care about bits outside of those defined in MODE.
3982
3983 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3984 an arithmetic operation, we can do better. */
3985
3986static unsigned HOST_WIDE_INT
ef4bddc2
RS
3987nonzero_bits1 (const_rtx x, machine_mode mode, const_rtx known_x,
3988 machine_mode known_mode,
2f93eea8
PB
3989 unsigned HOST_WIDE_INT known_ret)
3990{
3991 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3992 unsigned HOST_WIDE_INT inner_nz;
3993 enum rtx_code code;
ef4bddc2 3994 machine_mode inner_mode;
5511bc5a 3995 unsigned int mode_width = GET_MODE_PRECISION (mode);
2f93eea8 3996
ff596cd2
RL
3997 /* For floating-point and vector values, assume all bits are needed. */
3998 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3999 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
4000 return nonzero;
4001
4002 /* If X is wider than MODE, use its mode instead. */
5511bc5a 4003 if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
2f93eea8
PB
4004 {
4005 mode = GET_MODE (x);
4006 nonzero = GET_MODE_MASK (mode);
5511bc5a 4007 mode_width = GET_MODE_PRECISION (mode);
2f93eea8
PB
4008 }
4009
4010 if (mode_width > HOST_BITS_PER_WIDE_INT)
4011 /* Our only callers in this case look for single bit values. So
4012 just return the mode mask. Those tests will then be false. */
4013 return nonzero;
4014
4015#ifndef WORD_REGISTER_OPERATIONS
4016 /* If MODE is wider than X, but both are a single word for both the host
4017 and target machines, we can compute this from which bits of the
4018 object might be nonzero in its own mode, taking into account the fact
4019 that on many CISC machines, accessing an object in a wider mode
4020 causes the high-order bits to become undefined. So they are
4021 not known to be zero. */
4022
4023 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
5511bc5a
BS
4024 && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
4025 && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
4026 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4027 {
4028 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
4029 known_x, known_mode, known_ret);
4030 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
4031 return nonzero;
4032 }
4033#endif
4034
4035 code = GET_CODE (x);
4036 switch (code)
4037 {
4038 case REG:
4039#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4040 /* If pointers extend unsigned and this is a pointer in Pmode, say that
4041 all the bits above ptr_mode are known to be zero. */
5932a4d4 4042 /* As we do not know which address space the pointer is referring to,
d4ebfa65
BE
4043 we can do this only if the target does not support different pointer
4044 or address modes depending on the address space. */
4045 if (target_default_pointer_address_modes_p ()
4046 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
2f93eea8
PB
4047 && REG_POINTER (x))
4048 nonzero &= GET_MODE_MASK (ptr_mode);
4049#endif
4050
4051 /* Include declared information about alignment of pointers. */
4052 /* ??? We don't properly preserve REG_POINTER changes across
4053 pointer-to-integer casts, so we can't trust it except for
4054 things that we know must be pointers. See execute/960116-1.c. */
4055 if ((x == stack_pointer_rtx
4056 || x == frame_pointer_rtx
4057 || x == arg_pointer_rtx)
4058 && REGNO_POINTER_ALIGN (REGNO (x)))
4059 {
4060 unsigned HOST_WIDE_INT alignment
4061 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4062
4063#ifdef PUSH_ROUNDING
4064 /* If PUSH_ROUNDING is defined, it is possible for the
4065 stack to be momentarily aligned only to that amount,
4066 so we pick the least alignment. */
4067 if (x == stack_pointer_rtx && PUSH_ARGS)
4068 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
4069 alignment);
4070#endif
4071
4072 nonzero &= ~(alignment - 1);
4073 }
4074
4075 {
4076 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
55d796da 4077 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
2f93eea8
PB
4078 known_mode, known_ret,
4079 &nonzero_for_hook);
4080
55d796da
KG
4081 if (new_rtx)
4082 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
2f93eea8
PB
4083 known_mode, known_ret);
4084
4085 return nonzero_for_hook;
4086 }
4087
4088 case CONST_INT:
4089#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4090 /* If X is negative in MODE, sign-extend the value. */
c04fc4f0
EB
4091 if (INTVAL (x) > 0
4092 && mode_width < BITS_PER_WORD
4093 && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4094 != 0)
0cadbfaa 4095 return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
2f93eea8
PB
4096#endif
4097
c04fc4f0 4098 return UINTVAL (x);
2f93eea8
PB
4099
4100 case MEM:
4101#ifdef LOAD_EXTEND_OP
4102 /* In many, if not most, RISC machines, reading a byte from memory
4103 zeros the rest of the register. Noticing that fact saves a lot
4104 of extra zero-extends. */
4105 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4106 nonzero &= GET_MODE_MASK (GET_MODE (x));
4107#endif
4108 break;
4109
4110 case EQ: case NE:
4111 case UNEQ: case LTGT:
4112 case GT: case GTU: case UNGT:
4113 case LT: case LTU: case UNLT:
4114 case GE: case GEU: case UNGE:
4115 case LE: case LEU: case UNLE:
4116 case UNORDERED: case ORDERED:
2f93eea8
PB
4117 /* If this produces an integer result, we know which bits are set.
4118 Code here used to clear bits outside the mode of X, but that is
4119 now done above. */
b8698a0f
L
4120 /* Mind that MODE is the mode the caller wants to look at this
4121 operation in, and not the actual operation mode. We can wind
505ac507
RH
4122 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4123 that describes the results of a vector compare. */
4124 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2f93eea8
PB
4125 && mode_width <= HOST_BITS_PER_WIDE_INT)
4126 nonzero = STORE_FLAG_VALUE;
4127 break;
4128
4129 case NEG:
4130#if 0
4131 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4132 and num_sign_bit_copies. */
4133 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5511bc5a 4134 == GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4135 nonzero = 1;
4136#endif
4137
86cdf393 4138 if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
2f93eea8
PB
4139 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4140 break;
4141
4142 case ABS:
4143#if 0
4144 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4145 and num_sign_bit_copies. */
4146 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5511bc5a 4147 == GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4148 nonzero = 1;
4149#endif
4150 break;
4151
4152 case TRUNCATE:
4153 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4154 known_x, known_mode, known_ret)
4155 & GET_MODE_MASK (mode));
4156 break;
4157
4158 case ZERO_EXTEND:
4159 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4160 known_x, known_mode, known_ret);
4161 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4162 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4163 break;
4164
4165 case SIGN_EXTEND:
4166 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4167 Otherwise, show all the bits in the outer mode but not the inner
4168 may be nonzero. */
4169 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4170 known_x, known_mode, known_ret);
4171 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4172 {
4173 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
2d0c270f 4174 if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
2f93eea8
PB
4175 inner_nz |= (GET_MODE_MASK (mode)
4176 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4177 }
4178
4179 nonzero &= inner_nz;
4180 break;
4181
4182 case AND:
4183 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4184 known_x, known_mode, known_ret)
4185 & cached_nonzero_bits (XEXP (x, 1), mode,
4186 known_x, known_mode, known_ret);
4187 break;
4188
4189 case XOR: case IOR:
4190 case UMIN: case UMAX: case SMIN: case SMAX:
4191 {
c04fc4f0
EB
4192 unsigned HOST_WIDE_INT nonzero0
4193 = cached_nonzero_bits (XEXP (x, 0), mode,
4194 known_x, known_mode, known_ret);
2f93eea8
PB
4195
4196 /* Don't call nonzero_bits for the second time if it cannot change
4197 anything. */
4198 if ((nonzero & nonzero0) != nonzero)
4199 nonzero &= nonzero0
4200 | cached_nonzero_bits (XEXP (x, 1), mode,
4201 known_x, known_mode, known_ret);
4202 }
4203 break;
4204
4205 case PLUS: case MINUS:
4206 case MULT:
4207 case DIV: case UDIV:
4208 case MOD: case UMOD:
4209 /* We can apply the rules of arithmetic to compute the number of
4210 high- and low-order zero bits of these operations. We start by
4211 computing the width (position of the highest-order nonzero bit)
4212 and the number of low-order zero bits for each value. */
4213 {
c04fc4f0
EB
4214 unsigned HOST_WIDE_INT nz0
4215 = cached_nonzero_bits (XEXP (x, 0), mode,
4216 known_x, known_mode, known_ret);
4217 unsigned HOST_WIDE_INT nz1
4218 = cached_nonzero_bits (XEXP (x, 1), mode,
4219 known_x, known_mode, known_ret);
5511bc5a 4220 int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
2f93eea8
PB
4221 int width0 = floor_log2 (nz0) + 1;
4222 int width1 = floor_log2 (nz1) + 1;
4223 int low0 = floor_log2 (nz0 & -nz0);
4224 int low1 = floor_log2 (nz1 & -nz1);
c04fc4f0
EB
4225 unsigned HOST_WIDE_INT op0_maybe_minusp
4226 = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4227 unsigned HOST_WIDE_INT op1_maybe_minusp
4228 = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
2f93eea8
PB
4229 unsigned int result_width = mode_width;
4230 int result_low = 0;
4231
4232 switch (code)
4233 {
4234 case PLUS:
4235 result_width = MAX (width0, width1) + 1;
4236 result_low = MIN (low0, low1);
4237 break;
4238 case MINUS:
4239 result_low = MIN (low0, low1);
4240 break;
4241 case MULT:
4242 result_width = width0 + width1;
4243 result_low = low0 + low1;
4244 break;
4245 case DIV:
4246 if (width1 == 0)
4247 break;
c04fc4f0 4248 if (!op0_maybe_minusp && !op1_maybe_minusp)
2f93eea8
PB
4249 result_width = width0;
4250 break;
4251 case UDIV:
4252 if (width1 == 0)
4253 break;
4254 result_width = width0;
4255 break;
4256 case MOD:
4257 if (width1 == 0)
4258 break;
c04fc4f0 4259 if (!op0_maybe_minusp && !op1_maybe_minusp)
2f93eea8
PB
4260 result_width = MIN (width0, width1);
4261 result_low = MIN (low0, low1);
4262 break;
4263 case UMOD:
4264 if (width1 == 0)
4265 break;
4266 result_width = MIN (width0, width1);
4267 result_low = MIN (low0, low1);
4268 break;
4269 default:
41374e13 4270 gcc_unreachable ();
2f93eea8
PB
4271 }
4272
4273 if (result_width < mode_width)
c04fc4f0 4274 nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
2f93eea8
PB
4275
4276 if (result_low > 0)
c04fc4f0 4277 nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
2f93eea8
PB
4278 }
4279 break;
4280
4281 case ZERO_EXTRACT:
481683e1 4282 if (CONST_INT_P (XEXP (x, 1))
2f93eea8 4283 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
c04fc4f0 4284 nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
2f93eea8
PB
4285 break;
4286
4287 case SUBREG:
4288 /* If this is a SUBREG formed for a promoted variable that has
4289 been zero-extended, we know that at least the high-order bits
4290 are zero, though others might be too. */
4291
362d42dc 4292 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
2f93eea8
PB
4293 nonzero = GET_MODE_MASK (GET_MODE (x))
4294 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4295 known_x, known_mode, known_ret);
4296
2d0c270f 4297 inner_mode = GET_MODE (SUBREG_REG (x));
2f93eea8
PB
4298 /* If the inner mode is a single word for both the host and target
4299 machines, we can compute this from which bits of the inner
4300 object might be nonzero. */
5511bc5a
BS
4301 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4302 && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
2f93eea8
PB
4303 {
4304 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4305 known_x, known_mode, known_ret);
4306
4307#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4308 /* If this is a typical RISC machine, we only have to worry
4309 about the way loads are extended. */
2d0c270f
BS
4310 if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4311 ? val_signbit_known_set_p (inner_mode, nonzero)
4312 : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
3c0cb5de 4313 || !MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4314#endif
4315 {
4316 /* On many CISC machines, accessing an object in a wider mode
4317 causes the high-order bits to become undefined. So they are
4318 not known to be zero. */
5511bc5a
BS
4319 if (GET_MODE_PRECISION (GET_MODE (x))
4320 > GET_MODE_PRECISION (inner_mode))
2f93eea8 4321 nonzero |= (GET_MODE_MASK (GET_MODE (x))
2d0c270f 4322 & ~GET_MODE_MASK (inner_mode));
2f93eea8
PB
4323 }
4324 }
4325 break;
4326
4327 case ASHIFTRT:
4328 case LSHIFTRT:
4329 case ASHIFT:
4330 case ROTATE:
4331 /* The nonzero bits are in two classes: any bits within MODE
4332 that aren't in GET_MODE (x) are always significant. The rest of the
4333 nonzero bits are those that are significant in the operand of
4334 the shift when shifted the appropriate number of bits. This
4335 shows that high-order bits are cleared by the right shift and
4336 low-order bits by left shifts. */
481683e1 4337 if (CONST_INT_P (XEXP (x, 1))
2f93eea8 4338 && INTVAL (XEXP (x, 1)) >= 0
39b2ac74 4339 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5511bc5a 4340 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8 4341 {
ef4bddc2 4342 machine_mode inner_mode = GET_MODE (x);
5511bc5a 4343 unsigned int width = GET_MODE_PRECISION (inner_mode);
2f93eea8
PB
4344 int count = INTVAL (XEXP (x, 1));
4345 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
c04fc4f0
EB
4346 unsigned HOST_WIDE_INT op_nonzero
4347 = cached_nonzero_bits (XEXP (x, 0), mode,
4348 known_x, known_mode, known_ret);
2f93eea8
PB
4349 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4350 unsigned HOST_WIDE_INT outer = 0;
4351
4352 if (mode_width > width)
4353 outer = (op_nonzero & nonzero & ~mode_mask);
4354
4355 if (code == LSHIFTRT)
4356 inner >>= count;
4357 else if (code == ASHIFTRT)
4358 {
4359 inner >>= count;
4360
4361 /* If the sign bit may have been nonzero before the shift, we
4362 need to mark all the places it could have been copied to
4363 by the shift as possibly nonzero. */
c04fc4f0
EB
4364 if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4365 inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4366 << (width - count);
2f93eea8
PB
4367 }
4368 else if (code == ASHIFT)
4369 inner <<= count;
4370 else
4371 inner = ((inner << (count % width)
4372 | (inner >> (width - (count % width)))) & mode_mask);
4373
4374 nonzero &= (outer | inner);
4375 }
4376 break;
4377
4378 case FFS:
4379 case POPCOUNT:
4380 /* This is at most the number of bits in the mode. */
c04fc4f0 4381 nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4382 break;
4383
4384 case CLZ:
4385 /* If CLZ has a known value at zero, then the nonzero bits are
4386 that value, plus the number of bits in the mode minus one. */
4387 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
c04fc4f0
EB
4388 nonzero
4389 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4390 else
4391 nonzero = -1;
4392 break;
4393
4394 case CTZ:
4395 /* If CTZ has a known value at zero, then the nonzero bits are
4396 that value, plus the number of bits in the mode minus one. */
4397 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
c04fc4f0
EB
4398 nonzero
4399 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
2f93eea8
PB
4400 else
4401 nonzero = -1;
4402 break;
4403
8840ae2b
JJ
4404 case CLRSB:
4405 /* This is at most the number of bits in the mode minus 1. */
4406 nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4407 break;
4408
2f93eea8
PB
4409 case PARITY:
4410 nonzero = 1;
4411 break;
4412
4413 case IF_THEN_ELSE:
4414 {
c04fc4f0
EB
4415 unsigned HOST_WIDE_INT nonzero_true
4416 = cached_nonzero_bits (XEXP (x, 1), mode,
4417 known_x, known_mode, known_ret);
2f93eea8
PB
4418
4419 /* Don't call nonzero_bits for the second time if it cannot change
4420 anything. */
4421 if ((nonzero & nonzero_true) != nonzero)
4422 nonzero &= nonzero_true
4423 | cached_nonzero_bits (XEXP (x, 2), mode,
4424 known_x, known_mode, known_ret);
4425 }
4426 break;
4427
4428 default:
4429 break;
4430 }
4431
4432 return nonzero;
4433}
4434
4435/* See the macro definition above. */
4436#undef cached_num_sign_bit_copies
4437
4438\f
4439/* The function cached_num_sign_bit_copies is a wrapper around
4440 num_sign_bit_copies1. It avoids exponential behavior in
4441 num_sign_bit_copies1 when X has identical subexpressions on the
4442 first or the second level. */
4443
4444static unsigned int
ef4bddc2
RS
4445cached_num_sign_bit_copies (const_rtx x, machine_mode mode, const_rtx known_x,
4446 machine_mode known_mode,
2f93eea8
PB
4447 unsigned int known_ret)
4448{
4449 if (x == known_x && mode == known_mode)
4450 return known_ret;
4451
4452 /* Try to find identical subexpressions. If found call
4453 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4454 the precomputed value for the subexpression as KNOWN_RET. */
4455
4456 if (ARITHMETIC_P (x))
4457 {
4458 rtx x0 = XEXP (x, 0);
4459 rtx x1 = XEXP (x, 1);
4460
4461 /* Check the first level. */
4462 if (x0 == x1)
4463 return
4464 num_sign_bit_copies1 (x, mode, x0, mode,
4465 cached_num_sign_bit_copies (x0, mode, known_x,
4466 known_mode,
4467 known_ret));
4468
4469 /* Check the second level. */
4470 if (ARITHMETIC_P (x0)
4471 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4472 return
4473 num_sign_bit_copies1 (x, mode, x1, mode,
4474 cached_num_sign_bit_copies (x1, mode, known_x,
4475 known_mode,
4476 known_ret));
4477
4478 if (ARITHMETIC_P (x1)
4479 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4480 return
4481 num_sign_bit_copies1 (x, mode, x0, mode,
4482 cached_num_sign_bit_copies (x0, mode, known_x,
4483 known_mode,
4484 known_ret));
4485 }
4486
4487 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4488}
4489
4490/* Return the number of bits at the high-order end of X that are known to
4491 be equal to the sign bit. X will be used in mode MODE; if MODE is
4492 VOIDmode, X will be used in its own mode. The returned value will always
4493 be between 1 and the number of bits in MODE. */
4494
4495static unsigned int
ef4bddc2
RS
4496num_sign_bit_copies1 (const_rtx x, machine_mode mode, const_rtx known_x,
4497 machine_mode known_mode,
2f93eea8
PB
4498 unsigned int known_ret)
4499{
4500 enum rtx_code code = GET_CODE (x);
5511bc5a 4501 unsigned int bitwidth = GET_MODE_PRECISION (mode);
2f93eea8
PB
4502 int num0, num1, result;
4503 unsigned HOST_WIDE_INT nonzero;
4504
4505 /* If we weren't given a mode, use the mode of X. If the mode is still
4506 VOIDmode, we don't know anything. Likewise if one of the modes is
4507 floating-point. */
4508
4509 if (mode == VOIDmode)
4510 mode = GET_MODE (x);
4511
ff596cd2
RL
4512 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4513 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
2f93eea8
PB
4514 return 1;
4515
4516 /* For a smaller object, just ignore the high bits. */
5511bc5a 4517 if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4518 {
4519 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4520 known_x, known_mode, known_ret);
4521 return MAX (1,
5511bc5a 4522 num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
2f93eea8
PB
4523 }
4524
5511bc5a 4525 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4526 {
4527#ifndef WORD_REGISTER_OPERATIONS
5511bc5a
BS
4528 /* If this machine does not do all register operations on the entire
4529 register and MODE is wider than the mode of X, we can say nothing
4530 at all about the high-order bits. */
2f93eea8
PB
4531 return 1;
4532#else
4533 /* Likewise on machines that do, if the mode of the object is smaller
4534 than a word and loads of that size don't sign extend, we can say
4535 nothing about the high order bits. */
5511bc5a 4536 if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
2f93eea8
PB
4537#ifdef LOAD_EXTEND_OP
4538 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4539#endif
4540 )
4541 return 1;
4542#endif
4543 }
4544
4545 switch (code)
4546 {
4547 case REG:
4548
4549#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4550 /* If pointers extend signed and this is a pointer in Pmode, say that
4551 all the bits above ptr_mode are known to be sign bit copies. */
5932a4d4 4552 /* As we do not know which address space the pointer is referring to,
d4ebfa65
BE
4553 we can do this only if the target does not support different pointer
4554 or address modes depending on the address space. */
4555 if (target_default_pointer_address_modes_p ()
4556 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4557 && mode == Pmode && REG_POINTER (x))
5511bc5a 4558 return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
2f93eea8
PB
4559#endif
4560
4561 {
4562 unsigned int copies_for_hook = 1, copies = 1;
55d796da 4563 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
2f93eea8
PB
4564 known_mode, known_ret,
4565 &copies_for_hook);
4566
55d796da
KG
4567 if (new_rtx)
4568 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
2f93eea8
PB
4569 known_mode, known_ret);
4570
4571 if (copies > 1 || copies_for_hook > 1)
4572 return MAX (copies, copies_for_hook);
4573
4574 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4575 }
4576 break;
4577
4578 case MEM:
4579#ifdef LOAD_EXTEND_OP
4580 /* Some RISC machines sign-extend all loads of smaller than a word. */
4581 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4582 return MAX (1, ((int) bitwidth
5511bc5a 4583 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
2f93eea8
PB
4584#endif
4585 break;
4586
4587 case CONST_INT:
4588 /* If the constant is negative, take its 1's complement and remask.
4589 Then see how many zero bits we have. */
c04fc4f0 4590 nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
2f93eea8 4591 if (bitwidth <= HOST_BITS_PER_WIDE_INT
c04fc4f0 4592 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
4593 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4594
4595 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4596
4597 case SUBREG:
4598 /* If this is a SUBREG for a promoted object that is sign-extended
4599 and we are looking at it in a wider mode, we know that at least the
4600 high-order bits are known to be sign bit copies. */
4601
362d42dc 4602 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
2f93eea8
PB
4603 {
4604 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4605 known_x, known_mode, known_ret);
4606 return MAX ((int) bitwidth
5511bc5a 4607 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
2f93eea8
PB
4608 num0);
4609 }
4610
4611 /* For a smaller object, just ignore the high bits. */
5511bc5a 4612 if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
2f93eea8
PB
4613 {
4614 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4615 known_x, known_mode, known_ret);
4616 return MAX (1, (num0
5511bc5a 4617 - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
2f93eea8
PB
4618 - bitwidth)));
4619 }
4620
4621#ifdef WORD_REGISTER_OPERATIONS
4622#ifdef LOAD_EXTEND_OP
4623 /* For paradoxical SUBREGs on machines where all register operations
4624 affect the entire register, just look inside. Note that we are
4625 passing MODE to the recursive call, so the number of sign bit copies
4626 will remain relative to that mode, not the inner mode. */
4627
4628 /* This works only if loads sign extend. Otherwise, if we get a
4629 reload for the inner part, it may be loaded from the stack, and
4630 then we lose all sign bit copies that existed before the store
4631 to the stack. */
4632
6a4bdc79 4633 if (paradoxical_subreg_p (x)
2f93eea8 4634 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3c0cb5de 4635 && MEM_P (SUBREG_REG (x)))
2f93eea8
PB
4636 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4637 known_x, known_mode, known_ret);
4638#endif
4639#endif
4640 break;
4641
4642 case SIGN_EXTRACT:
481683e1 4643 if (CONST_INT_P (XEXP (x, 1)))
2f93eea8
PB
4644 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4645 break;
4646
4647 case SIGN_EXTEND:
5511bc5a 4648 return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
2f93eea8
PB
4649 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4650 known_x, known_mode, known_ret));
4651
4652 case TRUNCATE:
4653 /* For a smaller object, just ignore the high bits. */
4654 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4655 known_x, known_mode, known_ret);
5511bc5a 4656 return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
2f93eea8
PB
4657 - bitwidth)));
4658
4659 case NOT:
4660 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4661 known_x, known_mode, known_ret);
4662
4663 case ROTATE: case ROTATERT:
4664 /* If we are rotating left by a number of bits less than the number
4665 of sign bit copies, we can just subtract that amount from the
4666 number. */
481683e1 4667 if (CONST_INT_P (XEXP (x, 1))
2f93eea8
PB
4668 && INTVAL (XEXP (x, 1)) >= 0
4669 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4670 {
4671 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4672 known_x, known_mode, known_ret);
4673 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4674 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4675 }
4676 break;
4677
4678 case NEG:
4679 /* In general, this subtracts one sign bit copy. But if the value
4680 is known to be positive, the number of sign bit copies is the
4681 same as that of the input. Finally, if the input has just one bit
4682 that might be nonzero, all the bits are copies of the sign bit. */
4683 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4684 known_x, known_mode, known_ret);
4685 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4686 return num0 > 1 ? num0 - 1 : 1;
4687
4688 nonzero = nonzero_bits (XEXP (x, 0), mode);
4689 if (nonzero == 1)
4690 return bitwidth;
4691
4692 if (num0 > 1
c04fc4f0 4693 && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
2f93eea8
PB
4694 num0--;
4695
4696 return num0;
4697
4698 case IOR: case AND: case XOR:
4699 case SMIN: case SMAX: case UMIN: case UMAX:
4700 /* Logical operations will preserve the number of sign-bit copies.
4701 MIN and MAX operations always return one of the operands. */
4702 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4703 known_x, known_mode, known_ret);
4704 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4705 known_x, known_mode, known_ret);
22761ec3
AN
4706
4707 /* If num1 is clearing some of the top bits then regardless of
4708 the other term, we are guaranteed to have at least that many
4709 high-order zero bits. */
4710 if (code == AND
4711 && num1 > 1
4712 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4713 && CONST_INT_P (XEXP (x, 1))
c04fc4f0
EB
4714 && (UINTVAL (XEXP (x, 1))
4715 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
22761ec3
AN
4716 return num1;
4717
4718 /* Similarly for IOR when setting high-order bits. */
4719 if (code == IOR
4720 && num1 > 1
4721 && bitwidth <= HOST_BITS_PER_WIDE_INT
481683e1 4722 && CONST_INT_P (XEXP (x, 1))
c04fc4f0
EB
4723 && (UINTVAL (XEXP (x, 1))
4724 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
22761ec3
AN
4725 return num1;
4726
2f93eea8
PB
4727 return MIN (num0, num1);
4728
4729 case PLUS: case MINUS:
4730 /* For addition and subtraction, we can have a 1-bit carry. However,
4731 if we are subtracting 1 from a positive number, there will not
4732 be such a carry. Furthermore, if the positive number is known to
4733 be 0 or 1, we know the result is either -1 or 0. */
4734
4735 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4736 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4737 {
4738 nonzero = nonzero_bits (XEXP (x, 0), mode);
c04fc4f0 4739 if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
2f93eea8
PB
4740 return (nonzero == 1 || nonzero == 0 ? bitwidth
4741 : bitwidth - floor_log2 (nonzero) - 1);
4742 }
4743
4744 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4745 known_x, known_mode, known_ret);
4746 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4747 known_x, known_mode, known_ret);
4748 result = MAX (1, MIN (num0, num1) - 1);
4749
2f93eea8
PB
4750 return result;
4751
4752 case MULT:
4753 /* The number of bits of the product is the sum of the number of
4754 bits of both terms. However, unless one of the terms if known
4755 to be positive, we must allow for an additional bit since negating
4756 a negative number can remove one sign bit copy. */
4757
4758 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4759 known_x, known_mode, known_ret);
4760 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4761 known_x, known_mode, known_ret);
4762
4763 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4764 if (result > 0
4765 && (bitwidth > HOST_BITS_PER_WIDE_INT
4766 || (((nonzero_bits (XEXP (x, 0), mode)
c04fc4f0 4767 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8 4768 && ((nonzero_bits (XEXP (x, 1), mode)
c04fc4f0
EB
4769 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4770 != 0))))
2f93eea8
PB
4771 result--;
4772
4773 return MAX (1, result);
4774
4775 case UDIV:
4776 /* The result must be <= the first operand. If the first operand
4777 has the high bit set, we know nothing about the number of sign
4778 bit copies. */
4779 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4780 return 1;
4781 else if ((nonzero_bits (XEXP (x, 0), mode)
c04fc4f0 4782 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
4783 return 1;
4784 else
4785 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4786 known_x, known_mode, known_ret);
4787
4788 case UMOD:
24d179b4
JJ
4789 /* The result must be <= the second operand. If the second operand
4790 has (or just might have) the high bit set, we know nothing about
4791 the number of sign bit copies. */
4792 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4793 return 1;
4794 else if ((nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 4795 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
24d179b4
JJ
4796 return 1;
4797 else
4798 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
2f93eea8
PB
4799 known_x, known_mode, known_ret);
4800
4801 case DIV:
4802 /* Similar to unsigned division, except that we have to worry about
4803 the case where the divisor is negative, in which case we have
4804 to add 1. */
4805 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4806 known_x, known_mode, known_ret);
4807 if (result > 1
4808 && (bitwidth > HOST_BITS_PER_WIDE_INT
4809 || (nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 4810 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
2f93eea8
PB
4811 result--;
4812
4813 return result;
4814
4815 case MOD:
4816 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4817 known_x, known_mode, known_ret);
4818 if (result > 1
4819 && (bitwidth > HOST_BITS_PER_WIDE_INT
4820 || (nonzero_bits (XEXP (x, 1), mode)
c04fc4f0 4821 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
2f93eea8
PB
4822 result--;
4823
4824 return result;
4825
4826 case ASHIFTRT:
4827 /* Shifts by a constant add to the number of bits equal to the
4828 sign bit. */
4829 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4830 known_x, known_mode, known_ret);
481683e1 4831 if (CONST_INT_P (XEXP (x, 1))
39b2ac74 4832 && INTVAL (XEXP (x, 1)) > 0
5511bc5a 4833 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4834 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4835
4836 return num0;
4837
4838 case ASHIFT:
4839 /* Left shifts destroy copies. */
481683e1 4840 if (!CONST_INT_P (XEXP (x, 1))
2f93eea8 4841 || INTVAL (XEXP (x, 1)) < 0
39b2ac74 4842 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
5511bc5a 4843 || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
2f93eea8
PB
4844 return 1;
4845
4846 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4847 known_x, known_mode, known_ret);
4848 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4849
4850 case IF_THEN_ELSE:
4851 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4852 known_x, known_mode, known_ret);
4853 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4854 known_x, known_mode, known_ret);
4855 return MIN (num0, num1);
4856
4857 case EQ: case NE: case GE: case GT: case LE: case LT:
4858 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4859 case GEU: case GTU: case LEU: case LTU:
4860 case UNORDERED: case ORDERED:
4861 /* If the constant is negative, take its 1's complement and remask.
4862 Then see how many zero bits we have. */
4863 nonzero = STORE_FLAG_VALUE;
4864 if (bitwidth <= HOST_BITS_PER_WIDE_INT
c04fc4f0 4865 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
2f93eea8
PB
4866 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4867
4868 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4869
4870 default:
4871 break;
4872 }
4873
4874 /* If we haven't been able to figure it out by one of the above rules,
4875 see if some of the high-order bits are known to be zero. If so,
4876 count those bits and return one less than that amount. If we can't
4877 safely compute the mask for this mode, always return BITWIDTH. */
4878
5511bc5a 4879 bitwidth = GET_MODE_PRECISION (mode);
2f93eea8
PB
4880 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4881 return 1;
4882
4883 nonzero = nonzero_bits (x, mode);
c04fc4f0 4884 return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
2f93eea8
PB
4885 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4886}
6fd21094
RS
4887
4888/* Calculate the rtx_cost of a single instruction. A return value of
4889 zero indicates an instruction pattern without a known cost. */
4890
4891int
f40751dd 4892insn_rtx_cost (rtx pat, bool speed)
6fd21094
RS
4893{
4894 int i, cost;
4895 rtx set;
4896
4897 /* Extract the single set rtx from the instruction pattern.
4898 We can't use single_set since we only have the pattern. */
4899 if (GET_CODE (pat) == SET)
4900 set = pat;
4901 else if (GET_CODE (pat) == PARALLEL)
4902 {
4903 set = NULL_RTX;
4904 for (i = 0; i < XVECLEN (pat, 0); i++)
4905 {
4906 rtx x = XVECEXP (pat, 0, i);
4907 if (GET_CODE (x) == SET)
4908 {
4909 if (set)
4910 return 0;
4911 set = x;
4912 }
4913 }
4914 if (!set)
4915 return 0;
4916 }
4917 else
4918 return 0;
4919
5e8f01f4 4920 cost = set_src_cost (SET_SRC (set), speed);
6fd21094
RS
4921 return cost > 0 ? cost : COSTS_N_INSNS (1);
4922}
75473b02 4923
11204b2d
ZC
4924/* Returns estimate on cost of computing SEQ. */
4925
4926unsigned
4927seq_cost (const rtx_insn *seq, bool speed)
4928{
4929 unsigned cost = 0;
4930 rtx set;
4931
4932 for (; seq; seq = NEXT_INSN (seq))
4933 {
4934 set = single_set (seq);
4935 if (set)
4936 cost += set_rtx_cost (set, speed);
4937 else
4938 cost++;
4939 }
4940
4941 return cost;
4942}
4943
75473b02
SB
4944/* Given an insn INSN and condition COND, return the condition in a
4945 canonical form to simplify testing by callers. Specifically:
4946
4947 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4948 (2) Both operands will be machine operands; (cc0) will have been replaced.
4949 (3) If an operand is a constant, it will be the second operand.
4950 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4951 for GE, GEU, and LEU.
4952
4953 If the condition cannot be understood, or is an inequality floating-point
4954 comparison which needs to be reversed, 0 will be returned.
4955
4956 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4957
4958 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4959 insn used in locating the condition was found. If a replacement test
4960 of the condition is desired, it should be placed in front of that
4961 insn and we will be sure that the inputs are still valid.
4962
4963 If WANT_REG is nonzero, we wish the condition to be relative to that
4964 register, if possible. Therefore, do not canonicalize the condition
b8698a0f 4965 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
75473b02
SB
4966 to be a compare to a CC mode register.
4967
4968 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4969 and at INSN. */
4970
4971rtx
61aa0978
DM
4972canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
4973 rtx_insn **earliest,
75473b02
SB
4974 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4975{
4976 enum rtx_code code;
61aa0978 4977 rtx_insn *prev = insn;
f7d504c2 4978 const_rtx set;
75473b02
SB
4979 rtx tem;
4980 rtx op0, op1;
4981 int reverse_code = 0;
ef4bddc2 4982 machine_mode mode;
569f8d98 4983 basic_block bb = BLOCK_FOR_INSN (insn);
75473b02
SB
4984
4985 code = GET_CODE (cond);
4986 mode = GET_MODE (cond);
4987 op0 = XEXP (cond, 0);
4988 op1 = XEXP (cond, 1);
4989
4990 if (reverse)
4991 code = reversed_comparison_code (cond, insn);
4992 if (code == UNKNOWN)
4993 return 0;
4994
4995 if (earliest)
4996 *earliest = insn;
4997
4998 /* If we are comparing a register with zero, see if the register is set
4999 in the previous insn to a COMPARE or a comparison operation. Perform
5000 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5001 in cse.c */
5002
5003 while ((GET_RTX_CLASS (code) == RTX_COMPARE
5004 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5005 && op1 == CONST0_RTX (GET_MODE (op0))
5006 && op0 != want_reg)
5007 {
5008 /* Set nonzero when we find something of interest. */
5009 rtx x = 0;
5010
5011#ifdef HAVE_cc0
5012 /* If comparison with cc0, import actual comparison from compare
5013 insn. */
5014 if (op0 == cc0_rtx)
5015 {
5016 if ((prev = prev_nonnote_insn (prev)) == 0
5017 || !NONJUMP_INSN_P (prev)
5018 || (set = single_set (prev)) == 0
5019 || SET_DEST (set) != cc0_rtx)
5020 return 0;
5021
5022 op0 = SET_SRC (set);
5023 op1 = CONST0_RTX (GET_MODE (op0));
5024 if (earliest)
5025 *earliest = prev;
5026 }
5027#endif
5028
5029 /* If this is a COMPARE, pick up the two things being compared. */
5030 if (GET_CODE (op0) == COMPARE)
5031 {
5032 op1 = XEXP (op0, 1);
5033 op0 = XEXP (op0, 0);
5034 continue;
5035 }
5036 else if (!REG_P (op0))
5037 break;
5038
5039 /* Go back to the previous insn. Stop if it is not an INSN. We also
5040 stop if it isn't a single set or if it has a REG_INC note because
5041 we don't want to bother dealing with it. */
5042
f0fc0803 5043 prev = prev_nonnote_nondebug_insn (prev);
b5b8b0ac
AO
5044
5045 if (prev == 0
75473b02 5046 || !NONJUMP_INSN_P (prev)
569f8d98
ZD
5047 || FIND_REG_INC_NOTE (prev, NULL_RTX)
5048 /* In cfglayout mode, there do not have to be labels at the
5049 beginning of a block, or jumps at the end, so the previous
5050 conditions would not stop us when we reach bb boundary. */
5051 || BLOCK_FOR_INSN (prev) != bb)
75473b02
SB
5052 break;
5053
5054 set = set_of (op0, prev);
5055
5056 if (set
5057 && (GET_CODE (set) != SET
5058 || !rtx_equal_p (SET_DEST (set), op0)))
5059 break;
5060
5061 /* If this is setting OP0, get what it sets it to if it looks
5062 relevant. */
5063 if (set)
5064 {
ef4bddc2 5065 machine_mode inner_mode = GET_MODE (SET_DEST (set));
75473b02
SB
5066#ifdef FLOAT_STORE_FLAG_VALUE
5067 REAL_VALUE_TYPE fsfv;
5068#endif
5069
5070 /* ??? We may not combine comparisons done in a CCmode with
5071 comparisons not done in a CCmode. This is to aid targets
5072 like Alpha that have an IEEE compliant EQ instruction, and
5073 a non-IEEE compliant BEQ instruction. The use of CCmode is
5074 actually artificial, simply to prevent the combination, but
5075 should not affect other platforms.
5076
5077 However, we must allow VOIDmode comparisons to match either
5078 CCmode or non-CCmode comparison, because some ports have
5079 modeless comparisons inside branch patterns.
5080
5081 ??? This mode check should perhaps look more like the mode check
5082 in simplify_comparison in combine. */
2c8798a2
RS
5083 if (((GET_MODE_CLASS (mode) == MODE_CC)
5084 != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5085 && mode != VOIDmode
5086 && inner_mode != VOIDmode)
5087 break;
5088 if (GET_CODE (SET_SRC (set)) == COMPARE
5089 || (((code == NE
5090 || (code == LT
5091 && val_signbit_known_set_p (inner_mode,
5092 STORE_FLAG_VALUE))
75473b02 5093#ifdef FLOAT_STORE_FLAG_VALUE
2c8798a2
RS
5094 || (code == LT
5095 && SCALAR_FLOAT_MODE_P (inner_mode)
5096 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5097 REAL_VALUE_NEGATIVE (fsfv)))
75473b02 5098#endif
2c8798a2
RS
5099 ))
5100 && COMPARISON_P (SET_SRC (set))))
75473b02
SB
5101 x = SET_SRC (set);
5102 else if (((code == EQ
5103 || (code == GE
2d0c270f
BS
5104 && val_signbit_known_set_p (inner_mode,
5105 STORE_FLAG_VALUE))
75473b02
SB
5106#ifdef FLOAT_STORE_FLAG_VALUE
5107 || (code == GE
3d8bf70f 5108 && SCALAR_FLOAT_MODE_P (inner_mode)
75473b02
SB
5109 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5110 REAL_VALUE_NEGATIVE (fsfv)))
5111#endif
5112 ))
2c8798a2 5113 && COMPARISON_P (SET_SRC (set)))
75473b02
SB
5114 {
5115 reverse_code = 1;
5116 x = SET_SRC (set);
5117 }
2c8798a2
RS
5118 else if ((code == EQ || code == NE)
5119 && GET_CODE (SET_SRC (set)) == XOR)
5120 /* Handle sequences like:
5121
5122 (set op0 (xor X Y))
5123 ...(eq|ne op0 (const_int 0))...
5124
5125 in which case:
5126
5127 (eq op0 (const_int 0)) reduces to (eq X Y)
5128 (ne op0 (const_int 0)) reduces to (ne X Y)
5129
5130 This is the form used by MIPS16, for example. */
5131 x = SET_SRC (set);
75473b02
SB
5132 else
5133 break;
5134 }
5135
5136 else if (reg_set_p (op0, prev))
5137 /* If this sets OP0, but not directly, we have to give up. */
5138 break;
5139
5140 if (x)
5141 {
5142 /* If the caller is expecting the condition to be valid at INSN,
5143 make sure X doesn't change before INSN. */
5144 if (valid_at_insn_p)
5145 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5146 break;
5147 if (COMPARISON_P (x))
5148 code = GET_CODE (x);
5149 if (reverse_code)
5150 {
5151 code = reversed_comparison_code (x, prev);
5152 if (code == UNKNOWN)
5153 return 0;
5154 reverse_code = 0;
5155 }
5156
5157 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5158 if (earliest)
5159 *earliest = prev;
5160 }
5161 }
5162
5163 /* If constant is first, put it last. */
5164 if (CONSTANT_P (op0))
5165 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5166
5167 /* If OP0 is the result of a comparison, we weren't able to find what
5168 was really being compared, so fail. */
5169 if (!allow_cc_mode
5170 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5171 return 0;
5172
5173 /* Canonicalize any ordered comparison with integers involving equality
5174 if we can do computations in the relevant mode and we do not
5175 overflow. */
5176
5177 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
481683e1 5178 && CONST_INT_P (op1)
75473b02 5179 && GET_MODE (op0) != VOIDmode
5511bc5a 5180 && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
75473b02
SB
5181 {
5182 HOST_WIDE_INT const_val = INTVAL (op1);
5183 unsigned HOST_WIDE_INT uconst_val = const_val;
5184 unsigned HOST_WIDE_INT max_val
5185 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5186
5187 switch (code)
5188 {
5189 case LE:
5190 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5191 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5192 break;
5193
5194 /* When cross-compiling, const_val might be sign-extended from
5195 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5196 case GE:
c04fc4f0
EB
5197 if ((const_val & max_val)
5198 != ((unsigned HOST_WIDE_INT) 1
5511bc5a 5199 << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
75473b02
SB
5200 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5201 break;
5202
5203 case LEU:
5204 if (uconst_val < max_val)
5205 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5206 break;
5207
5208 case GEU:
5209 if (uconst_val != 0)
5210 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5211 break;
5212
5213 default:
5214 break;
5215 }
5216 }
5217
5218 /* Never return CC0; return zero instead. */
5219 if (CC0_P (op0))
5220 return 0;
5221
5222 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5223}
5224
5225/* Given a jump insn JUMP, return the condition that will cause it to branch
5226 to its JUMP_LABEL. If the condition cannot be understood, or is an
5227 inequality floating-point comparison which needs to be reversed, 0 will
5228 be returned.
5229
5230 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5231 insn used in locating the condition was found. If a replacement test
5232 of the condition is desired, it should be placed in front of that
5233 insn and we will be sure that the inputs are still valid. If EARLIEST
5234 is null, the returned condition will be valid at INSN.
5235
5236 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5237 compare CC mode register.
5238
5239 VALID_AT_INSN_P is the same as for canonicalize_condition. */
5240
5241rtx
61aa0978
DM
5242get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
5243 int valid_at_insn_p)
75473b02
SB
5244{
5245 rtx cond;
5246 int reverse;
5247 rtx set;
5248
5249 /* If this is not a standard conditional jump, we can't parse it. */
5250 if (!JUMP_P (jump)
5251 || ! any_condjump_p (jump))
5252 return 0;
5253 set = pc_set (jump);
5254
5255 cond = XEXP (SET_SRC (set), 0);
5256
5257 /* If this branches to JUMP_LABEL when the condition is false, reverse
5258 the condition. */
5259 reverse
5260 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
a827d9b1 5261 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
75473b02
SB
5262
5263 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5264 allow_cc_mode, valid_at_insn_p);
5265}
5266
b12cbf2c
AN
5267/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5268 TARGET_MODE_REP_EXTENDED.
5269
5270 Note that we assume that the property of
5271 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5272 narrower than mode B. I.e., if A is a mode narrower than B then in
5273 order to be able to operate on it in mode B, mode A needs to
5274 satisfy the requirements set by the representation of mode B. */
5275
5276static void
5277init_num_sign_bit_copies_in_rep (void)
5278{
ef4bddc2 5279 machine_mode mode, in_mode;
b12cbf2c
AN
5280
5281 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5282 in_mode = GET_MODE_WIDER_MODE (mode))
5283 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5284 mode = GET_MODE_WIDER_MODE (mode))
5285 {
ef4bddc2 5286 machine_mode i;
b12cbf2c
AN
5287
5288 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5289 extends to the next widest mode. */
5290 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5291 || GET_MODE_WIDER_MODE (mode) == in_mode);
5292
5293 /* We are in in_mode. Count how many bits outside of mode
5294 have to be copies of the sign-bit. */
5295 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5296 {
ef4bddc2 5297 machine_mode wider = GET_MODE_WIDER_MODE (i);
b12cbf2c
AN
5298
5299 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5300 /* We can only check sign-bit copies starting from the
5301 top-bit. In order to be able to check the bits we
5302 have already seen we pretend that subsequent bits
5303 have to be sign-bit copies too. */
5304 || num_sign_bit_copies_in_rep [in_mode][mode])
5305 num_sign_bit_copies_in_rep [in_mode][mode]
5511bc5a 5306 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
b12cbf2c
AN
5307 }
5308 }
5309}
5310
d3b72690
PB
5311/* Suppose that truncation from the machine mode of X to MODE is not a
5312 no-op. See if there is anything special about X so that we can
5313 assume it already contains a truncated value of MODE. */
5314
5315bool
ef4bddc2 5316truncated_to_mode (machine_mode mode, const_rtx x)
d3b72690 5317{
b12cbf2c
AN
5318 /* This register has already been used in MODE without explicit
5319 truncation. */
5320 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5321 return true;
5322
5323 /* See if we already satisfy the requirements of MODE. If yes we
5324 can just switch to MODE. */
5325 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5326 && (num_sign_bit_copies (x, GET_MODE (x))
5327 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5328 return true;
d3b72690 5329
b12cbf2c
AN
5330 return false;
5331}
cf94b0fc 5332\f
476dd0ce
RS
5333/* Return true if RTX code CODE has a single sequence of zero or more
5334 "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds
5335 entry in that case. */
5336
5337static bool
5338setup_reg_subrtx_bounds (unsigned int code)
5339{
5340 const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
5341 unsigned int i = 0;
5342 for (; format[i] != 'e'; ++i)
5343 {
5344 if (!format[i])
5345 /* No subrtxes. Leave start and count as 0. */
5346 return true;
5347 if (format[i] == 'E' || format[i] == 'V')
5348 return false;
5349 }
5350
5351 /* Record the sequence of 'e's. */
5352 rtx_all_subrtx_bounds[code].start = i;
5353 do
5354 ++i;
5355 while (format[i] == 'e');
5356 rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
5357 /* rtl-iter.h relies on this. */
5358 gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
5359
5360 for (; format[i]; ++i)
5361 if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
5362 return false;
5363
5364 return true;
5365}
5366
e02101ff 5367/* Initialize rtx_all_subrtx_bounds. */
cf94b0fc
PB
5368void
5369init_rtlanal (void)
5370{
5371 int i;
5372 for (i = 0; i < NUM_RTX_CODE; i++)
5373 {
476dd0ce
RS
5374 if (!setup_reg_subrtx_bounds (i))
5375 rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
5376 if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
5377 rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
cf94b0fc 5378 }
b12cbf2c
AN
5379
5380 init_num_sign_bit_copies_in_rep ();
cf94b0fc 5381}
3d8504ac
RS
5382\f
5383/* Check whether this is a constant pool constant. */
5384bool
5385constant_pool_constant_p (rtx x)
5386{
5387 x = avoid_constant_pool_reference (x);
48175537 5388 return CONST_DOUBLE_P (x);
3d8504ac 5389}
842e098c
AN
5390\f
5391/* If M is a bitmask that selects a field of low-order bits within an item but
5392 not the entire word, return the length of the field. Return -1 otherwise.
5393 M is used in machine mode MODE. */
5394
5395int
ef4bddc2 5396low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
842e098c
AN
5397{
5398 if (mode != VOIDmode)
5399 {
5511bc5a 5400 if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
842e098c
AN
5401 return -1;
5402 m &= GET_MODE_MASK (mode);
5403 }
5404
5405 return exact_log2 (m + 1);
5406}
372d6395
RS
5407
5408/* Return the mode of MEM's address. */
5409
ef4bddc2 5410machine_mode
372d6395
RS
5411get_address_mode (rtx mem)
5412{
ef4bddc2 5413 machine_mode mode;
372d6395
RS
5414
5415 gcc_assert (MEM_P (mem));
5416 mode = GET_MODE (XEXP (mem, 0));
5417 if (mode != VOIDmode)
5418 return mode;
5419 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5420}
ca3f2950
SB
5421\f
5422/* Split up a CONST_DOUBLE or integer constant rtx
5423 into two rtx's for single words,
5424 storing in *FIRST the word that comes first in memory in the target
807e902e
KZ
5425 and in *SECOND the other.
5426
5427 TODO: This function needs to be rewritten to work on any size
5428 integer. */
ca3f2950
SB
5429
5430void
5431split_double (rtx value, rtx *first, rtx *second)
5432{
5433 if (CONST_INT_P (value))
5434 {
5435 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5436 {
5437 /* In this case the CONST_INT holds both target words.
5438 Extract the bits from it into two word-sized pieces.
5439 Sign extend each half to HOST_WIDE_INT. */
5440 unsigned HOST_WIDE_INT low, high;
5441 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5442 unsigned bits_per_word = BITS_PER_WORD;
5443
5444 /* Set sign_bit to the most significant bit of a word. */
5445 sign_bit = 1;
5446 sign_bit <<= bits_per_word - 1;
5447
5448 /* Set mask so that all bits of the word are set. We could
5449 have used 1 << BITS_PER_WORD instead of basing the
5450 calculation on sign_bit. However, on machines where
5451 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5452 compiler warning, even though the code would never be
5453 executed. */
5454 mask = sign_bit << 1;
5455 mask--;
5456
5457 /* Set sign_extend as any remaining bits. */
5458 sign_extend = ~mask;
5459
5460 /* Pick the lower word and sign-extend it. */
5461 low = INTVAL (value);
5462 low &= mask;
5463 if (low & sign_bit)
5464 low |= sign_extend;
5465
5466 /* Pick the higher word, shifted to the least significant
5467 bits, and sign-extend it. */
5468 high = INTVAL (value);
5469 high >>= bits_per_word - 1;
5470 high >>= 1;
5471 high &= mask;
5472 if (high & sign_bit)
5473 high |= sign_extend;
5474
5475 /* Store the words in the target machine order. */
5476 if (WORDS_BIG_ENDIAN)
5477 {
5478 *first = GEN_INT (high);
5479 *second = GEN_INT (low);
5480 }
5481 else
5482 {
5483 *first = GEN_INT (low);
5484 *second = GEN_INT (high);
5485 }
5486 }
5487 else
5488 {
5489 /* The rule for using CONST_INT for a wider mode
5490 is that we regard the value as signed.
5491 So sign-extend it. */
5492 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5493 if (WORDS_BIG_ENDIAN)
5494 {
5495 *first = high;
5496 *second = value;
5497 }
5498 else
5499 {
5500 *first = value;
5501 *second = high;
5502 }
5503 }
5504 }
807e902e
KZ
5505 else if (GET_CODE (value) == CONST_WIDE_INT)
5506 {
5507 /* All of this is scary code and needs to be converted to
5508 properly work with any size integer. */
5509 gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
5510 if (WORDS_BIG_ENDIAN)
5511 {
5512 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5513 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5514 }
5515 else
5516 {
5517 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5518 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5519 }
5520 }
48175537 5521 else if (!CONST_DOUBLE_P (value))
ca3f2950
SB
5522 {
5523 if (WORDS_BIG_ENDIAN)
5524 {
5525 *first = const0_rtx;
5526 *second = value;
5527 }
5528 else
5529 {
5530 *first = value;
5531 *second = const0_rtx;
5532 }
5533 }
5534 else if (GET_MODE (value) == VOIDmode
5535 /* This is the old way we did CONST_DOUBLE integers. */
5536 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5537 {
5538 /* In an integer, the words are defined as most and least significant.
5539 So order them by the target's convention. */
5540 if (WORDS_BIG_ENDIAN)
5541 {
5542 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5543 *second = GEN_INT (CONST_DOUBLE_LOW (value));
5544 }
5545 else
5546 {
5547 *first = GEN_INT (CONST_DOUBLE_LOW (value));
5548 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5549 }
5550 }
5551 else
5552 {
5553 REAL_VALUE_TYPE r;
5554 long l[2];
5555 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5556
5557 /* Note, this converts the REAL_VALUE_TYPE to the target's
5558 format, splits up the floating point double and outputs
5559 exactly 32 bits of it into each of l[0] and l[1] --
5560 not necessarily BITS_PER_WORD bits. */
5561 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5562
5563 /* If 32 bits is an entire word for the target, but not for the host,
5564 then sign-extend on the host so that the number will look the same
5565 way on the host that it would on the target. See for instance
5566 simplify_unary_operation. The #if is needed to avoid compiler
5567 warnings. */
5568
5569#if HOST_BITS_PER_LONG > 32
5570 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5571 {
5572 if (l[0] & ((long) 1 << 31))
5573 l[0] |= ((long) (-1) << 32);
5574 if (l[1] & ((long) 1 << 31))
5575 l[1] |= ((long) (-1) << 32);
5576 }
5577#endif
5578
5579 *first = GEN_INT (l[0]);
5580 *second = GEN_INT (l[1]);
5581 }
5582}
5583
3936bafc
YR
5584/* Return true if X is a sign_extract or zero_extract from the least
5585 significant bit. */
5586
5587static bool
5588lsb_bitfield_op_p (rtx x)
5589{
5590 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
5591 {
ef4bddc2 5592 machine_mode mode = GET_MODE (XEXP (x, 0));
a9195970 5593 HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
3936bafc
YR
5594 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
5595
5596 return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0));
5597 }
5598 return false;
5599}
5600
277f65de
RS
5601/* Strip outer address "mutations" from LOC and return a pointer to the
5602 inner value. If OUTER_CODE is nonnull, store the code of the innermost
5603 stripped expression there.
5604
5605 "Mutations" either convert between modes or apply some kind of
3936bafc 5606 extension, truncation or alignment. */
277f65de
RS
5607
5608rtx *
5609strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
5610{
5611 for (;;)
5612 {
5613 enum rtx_code code = GET_CODE (*loc);
5614 if (GET_RTX_CLASS (code) == RTX_UNARY)
5615 /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
5616 used to convert between pointer sizes. */
5617 loc = &XEXP (*loc, 0);
3936bafc
YR
5618 else if (lsb_bitfield_op_p (*loc))
5619 /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
5620 acts as a combined truncation and extension. */
5621 loc = &XEXP (*loc, 0);
277f65de
RS
5622 else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
5623 /* (and ... (const_int -X)) is used to align to X bytes. */
5624 loc = &XEXP (*loc, 0);
163497f1
VM
5625 else if (code == SUBREG
5626 && !OBJECT_P (SUBREG_REG (*loc))
5627 && subreg_lowpart_p (*loc))
5628 /* (subreg (operator ...) ...) inside and is used for mode
5629 conversion too. */
99a0106f 5630 loc = &SUBREG_REG (*loc);
277f65de
RS
5631 else
5632 return loc;
5633 if (outer_code)
5634 *outer_code = code;
5635 }
5636}
5637
ec5a3504
RS
5638/* Return true if CODE applies some kind of scale. The scaled value is
5639 is the first operand and the scale is the second. */
277f65de
RS
5640
5641static bool
ec5a3504 5642binary_scale_code_p (enum rtx_code code)
277f65de 5643{
ec5a3504
RS
5644 return (code == MULT
5645 || code == ASHIFT
5646 /* Needed by ARM targets. */
5647 || code == ASHIFTRT
5648 || code == LSHIFTRT
5649 || code == ROTATE
5650 || code == ROTATERT);
277f65de
RS
5651}
5652
ec5a3504
RS
5653/* If *INNER can be interpreted as a base, return a pointer to the inner term
5654 (see address_info). Return null otherwise. */
277f65de 5655
ec5a3504
RS
5656static rtx *
5657get_base_term (rtx *inner)
277f65de 5658{
ec5a3504
RS
5659 if (GET_CODE (*inner) == LO_SUM)
5660 inner = strip_address_mutations (&XEXP (*inner, 0));
5661 if (REG_P (*inner)
5662 || MEM_P (*inner)
948cd9a5
MK
5663 || GET_CODE (*inner) == SUBREG
5664 || GET_CODE (*inner) == SCRATCH)
ec5a3504
RS
5665 return inner;
5666 return 0;
5667}
5668
5669/* If *INNER can be interpreted as an index, return a pointer to the inner term
5670 (see address_info). Return null otherwise. */
5671
5672static rtx *
5673get_index_term (rtx *inner)
5674{
5675 /* At present, only constant scales are allowed. */
5676 if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
5677 inner = strip_address_mutations (&XEXP (*inner, 0));
5678 if (REG_P (*inner)
5679 || MEM_P (*inner)
5680 || GET_CODE (*inner) == SUBREG)
5681 return inner;
5682 return 0;
277f65de
RS
5683}
5684
5685/* Set the segment part of address INFO to LOC, given that INNER is the
5686 unmutated value. */
5687
5688static void
5689set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
5690{
277f65de
RS
5691 gcc_assert (!info->segment);
5692 info->segment = loc;
5693 info->segment_term = inner;
5694}
5695
5696/* Set the base part of address INFO to LOC, given that INNER is the
5697 unmutated value. */
5698
5699static void
5700set_address_base (struct address_info *info, rtx *loc, rtx *inner)
5701{
277f65de
RS
5702 gcc_assert (!info->base);
5703 info->base = loc;
5704 info->base_term = inner;
5705}
5706
5707/* Set the index part of address INFO to LOC, given that INNER is the
5708 unmutated value. */
5709
5710static void
5711set_address_index (struct address_info *info, rtx *loc, rtx *inner)
5712{
277f65de
RS
5713 gcc_assert (!info->index);
5714 info->index = loc;
5715 info->index_term = inner;
5716}
5717
5718/* Set the displacement part of address INFO to LOC, given that INNER
5719 is the constant term. */
5720
5721static void
5722set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
5723{
277f65de
RS
5724 gcc_assert (!info->disp);
5725 info->disp = loc;
5726 info->disp_term = inner;
5727}
5728
5729/* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
5730 rest of INFO accordingly. */
5731
5732static void
5733decompose_incdec_address (struct address_info *info)
5734{
5735 info->autoinc_p = true;
5736
5737 rtx *base = &XEXP (*info->inner, 0);
5738 set_address_base (info, base, base);
5739 gcc_checking_assert (info->base == info->base_term);
5740
5741 /* These addresses are only valid when the size of the addressed
5742 value is known. */
5743 gcc_checking_assert (info->mode != VOIDmode);
5744}
5745
5746/* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
5747 of INFO accordingly. */
5748
5749static void
5750decompose_automod_address (struct address_info *info)
5751{
5752 info->autoinc_p = true;
5753
5754 rtx *base = &XEXP (*info->inner, 0);
5755 set_address_base (info, base, base);
5756 gcc_checking_assert (info->base == info->base_term);
5757
5758 rtx plus = XEXP (*info->inner, 1);
5759 gcc_assert (GET_CODE (plus) == PLUS);
5760
5761 info->base_term2 = &XEXP (plus, 0);
5762 gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
5763
5764 rtx *step = &XEXP (plus, 1);
5765 rtx *inner_step = strip_address_mutations (step);
5766 if (CONSTANT_P (*inner_step))
5767 set_address_disp (info, step, inner_step);
5768 else
5769 set_address_index (info, step, inner_step);
5770}
5771
5772/* Treat *LOC as a tree of PLUS operands and store pointers to the summed
5773 values in [PTR, END). Return a pointer to the end of the used array. */
5774
5775static rtx **
5776extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
5777{
5778 rtx x = *loc;
5779 if (GET_CODE (x) == PLUS)
5780 {
5781 ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
5782 ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
5783 }
5784 else
5785 {
5786 gcc_assert (ptr != end);
5787 *ptr++ = loc;
5788 }
5789 return ptr;
5790}
5791
5792/* Evaluate the likelihood of X being a base or index value, returning
5793 positive if it is likely to be a base, negative if it is likely to be
5794 an index, and 0 if we can't tell. Make the magnitude of the return
5795 value reflect the amount of confidence we have in the answer.
5796
5797 MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
5798
5799static int
ef4bddc2 5800baseness (rtx x, machine_mode mode, addr_space_t as,
277f65de
RS
5801 enum rtx_code outer_code, enum rtx_code index_code)
5802{
277f65de
RS
5803 /* Believe *_POINTER unless the address shape requires otherwise. */
5804 if (REG_P (x) && REG_POINTER (x))
5805 return 2;
5806 if (MEM_P (x) && MEM_POINTER (x))
5807 return 2;
5808
5809 if (REG_P (x) && HARD_REGISTER_P (x))
5810 {
5811 /* X is a hard register. If it only fits one of the base
5812 or index classes, choose that interpretation. */
5813 int regno = REGNO (x);
5814 bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
5815 bool index_p = REGNO_OK_FOR_INDEX_P (regno);
5816 if (base_p != index_p)
5817 return base_p ? 1 : -1;
5818 }
5819 return 0;
5820}
5821
5822/* INFO->INNER describes a normal, non-automodified address.
5823 Fill in the rest of INFO accordingly. */
5824
5825static void
5826decompose_normal_address (struct address_info *info)
5827{
5828 /* Treat the address as the sum of up to four values. */
5829 rtx *ops[4];
5830 size_t n_ops = extract_plus_operands (info->inner, ops,
5831 ops + ARRAY_SIZE (ops)) - ops;
5832
5833 /* If there is more than one component, any base component is in a PLUS. */
5834 if (n_ops > 1)
5835 info->base_outer_code = PLUS;
5836
ec5a3504
RS
5837 /* Try to classify each sum operand now. Leave those that could be
5838 either a base or an index in OPS. */
277f65de
RS
5839 rtx *inner_ops[4];
5840 size_t out = 0;
5841 for (size_t in = 0; in < n_ops; ++in)
5842 {
5843 rtx *loc = ops[in];
5844 rtx *inner = strip_address_mutations (loc);
5845 if (CONSTANT_P (*inner))
5846 set_address_disp (info, loc, inner);
5847 else if (GET_CODE (*inner) == UNSPEC)
5848 set_address_segment (info, loc, inner);
5849 else
5850 {
ec5a3504
RS
5851 /* The only other possibilities are a base or an index. */
5852 rtx *base_term = get_base_term (inner);
5853 rtx *index_term = get_index_term (inner);
5854 gcc_assert (base_term || index_term);
5855 if (!base_term)
5856 set_address_index (info, loc, index_term);
5857 else if (!index_term)
5858 set_address_base (info, loc, base_term);
5859 else
5860 {
5861 gcc_assert (base_term == index_term);
5862 ops[out] = loc;
5863 inner_ops[out] = base_term;
5864 ++out;
5865 }
277f65de
RS
5866 }
5867 }
5868
5869 /* Classify the remaining OPS members as bases and indexes. */
5870 if (out == 1)
5871 {
ec5a3504
RS
5872 /* If we haven't seen a base or an index yet, assume that this is
5873 the base. If we were confident that another term was the base
5874 or index, treat the remaining operand as the other kind. */
5875 if (!info->base)
277f65de
RS
5876 set_address_base (info, ops[0], inner_ops[0]);
5877 else
5878 set_address_index (info, ops[0], inner_ops[0]);
5879 }
5880 else if (out == 2)
5881 {
5882 /* In the event of a tie, assume the base comes first. */
5883 if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
5884 GET_CODE (*ops[1]))
5885 >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
5886 GET_CODE (*ops[0])))
5887 {
5888 set_address_base (info, ops[0], inner_ops[0]);
5889 set_address_index (info, ops[1], inner_ops[1]);
5890 }
5891 else
5892 {
5893 set_address_base (info, ops[1], inner_ops[1]);
5894 set_address_index (info, ops[0], inner_ops[0]);
5895 }
5896 }
5897 else
5898 gcc_assert (out == 0);
5899}
5900
5901/* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
5902 or VOIDmode if not known. AS is the address space associated with LOC.
5903 OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
5904
5905void
ef4bddc2 5906decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
277f65de
RS
5907 addr_space_t as, enum rtx_code outer_code)
5908{
5909 memset (info, 0, sizeof (*info));
5910 info->mode = mode;
5911 info->as = as;
5912 info->addr_outer_code = outer_code;
5913 info->outer = loc;
5914 info->inner = strip_address_mutations (loc, &outer_code);
5915 info->base_outer_code = outer_code;
5916 switch (GET_CODE (*info->inner))
5917 {
5918 case PRE_DEC:
5919 case PRE_INC:
5920 case POST_DEC:
5921 case POST_INC:
5922 decompose_incdec_address (info);
5923 break;
5924
5925 case PRE_MODIFY:
5926 case POST_MODIFY:
5927 decompose_automod_address (info);
5928 break;
5929
5930 default:
5931 decompose_normal_address (info);
5932 break;
5933 }
5934}
5935
5936/* Describe address operand LOC in INFO. */
5937
5938void
5939decompose_lea_address (struct address_info *info, rtx *loc)
5940{
5941 decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
5942}
5943
5944/* Describe the address of MEM X in INFO. */
5945
5946void
5947decompose_mem_address (struct address_info *info, rtx x)
5948{
5949 gcc_assert (MEM_P (x));
5950 decompose_address (info, &XEXP (x, 0), GET_MODE (x),
5951 MEM_ADDR_SPACE (x), MEM);
5952}
5953
5954/* Update INFO after a change to the address it describes. */
5955
5956void
5957update_address (struct address_info *info)
5958{
5959 decompose_address (info, info->outer, info->mode, info->as,
5960 info->addr_outer_code);
5961}
5962
5963/* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
5964 more complicated than that. */
5965
5966HOST_WIDE_INT
5967get_index_scale (const struct address_info *info)
5968{
5969 rtx index = *info->index;
5970 if (GET_CODE (index) == MULT
5971 && CONST_INT_P (XEXP (index, 1))
5972 && info->index_term == &XEXP (index, 0))
5973 return INTVAL (XEXP (index, 1));
5974
5975 if (GET_CODE (index) == ASHIFT
5976 && CONST_INT_P (XEXP (index, 1))
5977 && info->index_term == &XEXP (index, 0))
5978 return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1));
5979
5980 if (info->index == info->index_term)
5981 return 1;
5982
5983 return 0;
5984}
5985
5986/* Return the "index code" of INFO, in the form required by
5987 ok_for_base_p_1. */
5988
5989enum rtx_code
5990get_index_code (const struct address_info *info)
5991{
5992 if (info->index)
5993 return GET_CODE (*info->index);
5994
5995 if (info->disp)
5996 return GET_CODE (*info->disp);
5997
5998 return SCRATCH;
5999}
093a6c99 6000
093a6c99
RS
6001/* Return true if X contains a thread-local symbol. */
6002
6003bool
6180e3d8 6004tls_referenced_p (const_rtx x)
093a6c99
RS
6005{
6006 if (!targetm.have_tls)
6007 return false;
6008
6180e3d8 6009 subrtx_iterator::array_type array;
ebd3cb12 6010 FOR_EACH_SUBRTX (iter, array, x, ALL)
6180e3d8
RS
6011 if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6012 return true;
6013 return false;
093a6c99 6014}