]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/rtlanal.c
2015-06-17 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / rtlanal.c
CommitLineData
62d6a022 1/* Analyze RTL for GNU compiler.
d353bf18 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
635aff97 3
f12b58b3 4This file is part of GCC.
635aff97 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
635aff97 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
635aff97 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
635aff97 19
20
21#include "config.h"
405711de 22#include "system.h"
805e22b2 23#include "coretypes.h"
24#include "tm.h"
0b205f4c 25#include "diagnostic-core.h"
b8011969 26#include "hard-reg-set.h"
b2528b76 27#include "rtl.h"
aee989f5 28#include "insn-config.h"
29#include "recog.h"
26619827 30#include "target.h"
31#include "output.h"
77ec0c64 32#include "tm_p.h"
350b17ef 33#include "flags.h"
67d6c12b 34#include "regs.h"
d263732c 35#include "function.h"
94ea8568 36#include "predict.h"
37#include "basic-block.h"
3072d30e 38#include "df.h"
b20a8bb4 39#include "symtab.h"
e0ab7256 40#include "tree.h"
06f9d6ef 41#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
1efe9e9d 42#include "addresses.h"
69924e56 43#include "rtl-iter.h"
635aff97 44
99b86c05 45/* Forward declarations */
81a410b1 46static void set_of_1 (rtx, const_rtx, void *);
dd9b9fc5 47static bool covers_regno_p (const_rtx, unsigned int);
48static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
dd9b9fc5 49static int computed_jump_p_1 (const_rtx);
81a410b1 50static void parms_set (rtx, const_rtx, void *);
ca6d6e84 51
3754d046 52static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, machine_mode,
53 const_rtx, machine_mode,
d263732c 54 unsigned HOST_WIDE_INT);
3754d046 55static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, machine_mode,
56 const_rtx, machine_mode,
d263732c 57 unsigned HOST_WIDE_INT);
3754d046 58static unsigned int cached_num_sign_bit_copies (const_rtx, machine_mode, const_rtx,
59 machine_mode,
d263732c 60 unsigned int);
3754d046 61static unsigned int num_sign_bit_copies1 (const_rtx, machine_mode, const_rtx,
62 machine_mode, unsigned int);
d263732c 63
69924e56 64rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
65rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
66
4956440a 67/* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
68 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
69 SIGN_EXTEND then while narrowing we also have to enforce the
70 representation and sign-extend the value to mode DESTINATION_REP.
71
72 If the value is already sign-extended to DESTINATION_REP mode we
73 can just switch to DESTINATION mode on it. For each pair of
74 integral modes SOURCE and DESTINATION, when truncating from SOURCE
75 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
76 contains the number of high-order bits in SOURCE that have to be
77 copies of the sign-bit so that we can do this mode-switch to
78 DESTINATION. */
79
80static unsigned int
81num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
635aff97 82\f
69924e56 83/* Store X into index I of ARRAY. ARRAY is known to have at least I
84 elements. Return the new base of ARRAY. */
85
86template <typename T>
87typename T::value_type *
88generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
89 value_type *base,
90 size_t i, value_type x)
91{
92 if (base == array.stack)
93 {
94 if (i < LOCAL_ELEMS)
95 {
96 base[i] = x;
97 return base;
98 }
99 gcc_checking_assert (i == LOCAL_ELEMS);
4f7f3b39 100 /* A previous iteration might also have moved from the stack to the
101 heap, in which case the heap array will already be big enough. */
102 if (vec_safe_length (array.heap) <= i)
103 vec_safe_grow (array.heap, i + 1);
69924e56 104 base = array.heap->address ();
105 memcpy (base, array.stack, sizeof (array.stack));
106 base[LOCAL_ELEMS] = x;
107 return base;
108 }
109 unsigned int length = array.heap->length ();
110 if (length > i)
111 {
112 gcc_checking_assert (base == array.heap->address ());
113 base[i] = x;
114 return base;
115 }
116 else
117 {
118 gcc_checking_assert (i == length);
119 vec_safe_push (array.heap, x);
120 return array.heap->address ();
121 }
122}
123
124/* Add the subrtxes of X to worklist ARRAY, starting at END. Return the
125 number of elements added to the worklist. */
126
127template <typename T>
128size_t
129generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
130 value_type *base,
131 size_t end, rtx_type x)
132{
e43f6e8c 133 enum rtx_code code = GET_CODE (x);
134 const char *format = GET_RTX_FORMAT (code);
69924e56 135 size_t orig_end = end;
e43f6e8c 136 if (__builtin_expect (INSN_P (x), false))
137 {
138 /* Put the pattern at the top of the queue, since that's what
139 we're likely to want most. It also allows for the SEQUENCE
140 code below. */
141 for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
142 if (format[i] == 'e')
143 {
144 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
145 if (__builtin_expect (end < LOCAL_ELEMS, true))
146 base[end++] = subx;
147 else
148 base = add_single_to_queue (array, base, end++, subx);
149 }
150 }
151 else
152 for (int i = 0; format[i]; ++i)
153 if (format[i] == 'e')
154 {
155 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
156 if (__builtin_expect (end < LOCAL_ELEMS, true))
157 base[end++] = subx;
158 else
159 base = add_single_to_queue (array, base, end++, subx);
160 }
161 else if (format[i] == 'E')
162 {
163 unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
164 rtx *vec = x->u.fld[i].rt_rtvec->elem;
165 if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
166 for (unsigned int j = 0; j < length; j++)
167 base[end++] = T::get_value (vec[j]);
168 else
169 for (unsigned int j = 0; j < length; j++)
170 base = add_single_to_queue (array, base, end++,
171 T::get_value (vec[j]));
172 if (code == SEQUENCE && end == length)
173 /* If the subrtxes of the sequence fill the entire array then
174 we know that no other parts of a containing insn are queued.
175 The caller is therefore iterating over the sequence as a
176 PATTERN (...), so we also want the patterns of the
177 subinstructions. */
178 for (unsigned int j = 0; j < length; j++)
179 {
180 typename T::rtx_type x = T::get_rtx (base[j]);
181 if (INSN_P (x))
182 base[j] = T::get_value (PATTERN (x));
183 }
184 }
69924e56 185 return end - orig_end;
186}
187
188template <typename T>
189void
190generic_subrtx_iterator <T>::free_array (array_type &array)
191{
192 vec_free (array.heap);
193}
194
195template <typename T>
196const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
197
198template class generic_subrtx_iterator <const_rtx_accessor>;
199template class generic_subrtx_iterator <rtx_var_accessor>;
200template class generic_subrtx_iterator <rtx_ptr_accessor>;
201
635aff97 202/* Return 1 if the value of X is unstable
203 (would be different at a different point in the program).
204 The frame pointer, arg pointer, etc. are considered stable
205 (within one function) and so is anything marked `unchanging'. */
206
207int
dd9b9fc5 208rtx_unstable_p (const_rtx x)
635aff97 209{
dd9b9fc5 210 const RTX_CODE code = GET_CODE (x);
19cb6b50 211 int i;
212 const char *fmt;
635aff97 213
a3c6603a 214 switch (code)
215 {
216 case MEM:
b04fab2a 217 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
635aff97 218
a3c6603a 219 case CONST:
0349edce 220 CASE_CONST_ANY:
a3c6603a 221 case SYMBOL_REF:
222 case LABEL_REF:
223 return 0;
635aff97 224
a3c6603a 225 case REG:
226 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
d9c8e13d 227 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
b8011969 228 /* The arg pointer varies if it is not a fixed register. */
b04fab2a 229 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
d9c8e13d 230 return 0;
d9c8e13d 231 /* ??? When call-clobbered, the value is stable modulo the restore
232 that must happen after a call. This currently screws up local-alloc
233 into believing that the restore is not needed. */
260e669e 234 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
d9c8e13d 235 return 0;
d9c8e13d 236 return 1;
a3c6603a 237
238 case ASM_OPERANDS:
239 if (MEM_VOLATILE_P (x))
240 return 1;
241
d632b59a 242 /* Fall through. */
a3c6603a 243
244 default:
245 break;
246 }
635aff97 247
248 fmt = GET_RTX_FORMAT (code);
249 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
250 if (fmt[i] == 'e')
cac0c8c9 251 {
252 if (rtx_unstable_p (XEXP (x, i)))
253 return 1;
254 }
255 else if (fmt[i] == 'E')
256 {
257 int j;
258 for (j = 0; j < XVECLEN (x, i); j++)
259 if (rtx_unstable_p (XVECEXP (x, i, j)))
260 return 1;
261 }
262
635aff97 263 return 0;
264}
265
266/* Return 1 if X has a value that can vary even between two
267 executions of the program. 0 means X can be compared reliably
268 against certain constants or near-constants.
ea087693 269 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
270 zero, we are slightly more conservative.
635aff97 271 The frame pointer and the arg pointer are considered constant. */
272
52d07779 273bool
274rtx_varies_p (const_rtx x, bool for_alias)
635aff97 275{
70f5822c 276 RTX_CODE code;
19cb6b50 277 int i;
278 const char *fmt;
635aff97 279
70f5822c 280 if (!x)
281 return 0;
282
283 code = GET_CODE (x);
635aff97 284 switch (code)
285 {
286 case MEM:
b04fab2a 287 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
002fe3cb 288
635aff97 289 case CONST:
0349edce 290 CASE_CONST_ANY:
635aff97 291 case SYMBOL_REF:
292 case LABEL_REF:
293 return 0;
294
295 case REG:
296 /* Note that we have to test for the actual rtx used for the frame
297 and arg pointers and not just the register number in case we have
298 eliminated the frame and/or arg pointer and are using it
299 for pseudos. */
d9c8e13d 300 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
b8011969 301 /* The arg pointer varies if it is not a fixed register. */
302 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
d9c8e13d 303 return 0;
ea087693 304 if (x == pic_offset_table_rtx
ea087693 305 /* ??? When call-clobbered, the value is stable modulo the restore
306 that must happen after a call. This currently screws up
307 local-alloc into believing that the restore is not needed, so we
308 must return 0 only if we are called from alias analysis. */
260e669e 309 && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
ea087693 310 return 0;
d9c8e13d 311 return 1;
635aff97 312
313 case LO_SUM:
314 /* The operand 0 of a LO_SUM is considered constant
f7cd7994 315 (in fact it is related specifically to operand 1)
316 during alias analysis. */
317 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
318 || rtx_varies_p (XEXP (x, 1), for_alias);
2617fe26 319
a3c6603a 320 case ASM_OPERANDS:
321 if (MEM_VOLATILE_P (x))
322 return 1;
323
d632b59a 324 /* Fall through. */
a3c6603a 325
0dbd1c74 326 default:
327 break;
635aff97 328 }
329
330 fmt = GET_RTX_FORMAT (code);
331 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
332 if (fmt[i] == 'e')
cac0c8c9 333 {
ea087693 334 if (rtx_varies_p (XEXP (x, i), for_alias))
cac0c8c9 335 return 1;
336 }
337 else if (fmt[i] == 'E')
338 {
339 int j;
340 for (j = 0; j < XVECLEN (x, i); j++)
ea087693 341 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
cac0c8c9 342 return 1;
343 }
344
635aff97 345 return 0;
346}
347
be8108ee 348/* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
349 bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
350 UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
351 references on strict alignment machines. */
635aff97 352
1aecae7f 353static int
0eee494e 354rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
3754d046 355 machine_mode mode, bool unaligned_mems)
635aff97 356{
19cb6b50 357 enum rtx_code code = GET_CODE (x);
635aff97 358
be8108ee 359 /* The offset must be a multiple of the mode size if we are considering
360 unaligned memory references on strict alignment machines. */
361 if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0)
0eee494e 362 {
363 HOST_WIDE_INT actual_offset = offset;
be8108ee 364
0eee494e 365#ifdef SPARC_STACK_BOUNDARY_HACK
366 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
367 the real alignment of %sp. However, when it does this, the
368 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
369 if (SPARC_STACK_BOUNDARY_HACK
370 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
371 actual_offset -= STACK_POINTER_OFFSET;
372#endif
373
99e9b19f 374 if (actual_offset % GET_MODE_SIZE (mode) != 0)
375 return 1;
0eee494e 376 }
377
635aff97 378 switch (code)
379 {
380 case SYMBOL_REF:
0eee494e 381 if (SYMBOL_REF_WEAK (x))
382 return 1;
383 if (!CONSTANT_POOL_ADDRESS_P (x))
384 {
385 tree decl;
386 HOST_WIDE_INT decl_size;
387
388 if (offset < 0)
389 return 1;
390 if (size == 0)
391 size = GET_MODE_SIZE (mode);
392 if (size == 0)
393 return offset != 0;
394
395 /* If the size of the access or of the symbol is unknown,
396 assume the worst. */
397 decl = SYMBOL_REF_DECL (x);
398
399 /* Else check that the access is in bounds. TODO: restructure
92ddcd97 400 expr_size/tree_expr_size/int_expr_size and just use the latter. */
0eee494e 401 if (!decl)
402 decl_size = -1;
403 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
e913b5cd 404 decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl))
405 ? tree_to_shwi (DECL_SIZE_UNIT (decl))
0eee494e 406 : -1);
407 else if (TREE_CODE (decl) == STRING_CST)
408 decl_size = TREE_STRING_LENGTH (decl);
409 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
410 decl_size = int_size_in_bytes (TREE_TYPE (decl));
411 else
412 decl_size = -1;
413
414 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
415 }
416
417 return 0;
67f79732 418
635aff97 419 case LABEL_REF:
635aff97 420 return 0;
421
422 case REG:
be8108ee 423 /* Stack references are assumed not to trap, but we need to deal with
424 nonsensical offsets. */
425 if (x == frame_pointer_rtx)
426 {
427 HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET;
428 if (size == 0)
429 size = GET_MODE_SIZE (mode);
430 if (FRAME_GROWS_DOWNWARD)
431 {
432 if (adj_offset < frame_offset || adj_offset + size - 1 >= 0)
433 return 1;
434 }
435 else
436 {
437 if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset)
438 return 1;
439 }
440 return 0;
441 }
442 /* ??? Need to add a similar guard for nonsensical offsets. */
443 if (x == hard_frame_pointer_rtx
c0c2b734 444 || x == stack_pointer_rtx
445 /* The arg pointer varies if it is not a fixed register. */
446 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
447 return 0;
448 /* All of the virtual frame registers are stack references. */
449 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
450 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
451 return 0;
452 return 1;
635aff97 453
454 case CONST:
0eee494e 455 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
456 mode, unaligned_mems);
635aff97 457
458 case PLUS:
1aecae7f 459 /* An address is assumed not to trap if:
0eee494e 460 - it is the pic register plus a constant. */
461 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
462 return 0;
463
be8108ee 464 /* - or it is an address that can't trap plus a constant integer. */
971ba038 465 if (CONST_INT_P (XEXP (x, 1))
0eee494e 466 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
467 size, mode, unaligned_mems))
1aecae7f 468 return 0;
469
470 return 1;
635aff97 471
472 case LO_SUM:
c0c2b734 473 case PRE_MODIFY:
0eee494e 474 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
475 mode, unaligned_mems);
c0c2b734 476
477 case PRE_DEC:
478 case PRE_INC:
479 case POST_DEC:
480 case POST_INC:
481 case POST_MODIFY:
0eee494e 482 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
483 mode, unaligned_mems);
c0c2b734 484
0dbd1c74 485 default:
486 break;
635aff97 487 }
488
489 /* If it isn't one of the case above, it can cause a trap. */
490 return 1;
491}
492
1aecae7f 493/* Return nonzero if the use of X as an address in a MEM can cause a trap. */
494
495int
dd9b9fc5 496rtx_addr_can_trap_p (const_rtx x)
1aecae7f 497{
0eee494e 498 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
1aecae7f 499}
500
805e22b2 501/* Return true if X is an address that is known to not be zero. */
502
503bool
dd9b9fc5 504nonzero_address_p (const_rtx x)
805e22b2 505{
dd9b9fc5 506 const enum rtx_code code = GET_CODE (x);
805e22b2 507
508 switch (code)
509 {
510 case SYMBOL_REF:
511 return !SYMBOL_REF_WEAK (x);
512
513 case LABEL_REF:
514 return true;
515
805e22b2 516 case REG:
517 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
518 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
519 || x == stack_pointer_rtx
520 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
521 return true;
522 /* All of the virtual frame registers are stack references. */
523 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
524 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
525 return true;
526 return false;
527
528 case CONST:
529 return nonzero_address_p (XEXP (x, 0));
530
531 case PLUS:
805e22b2 532 /* Handle PIC references. */
f1c575eb 533 if (XEXP (x, 0) == pic_offset_table_rtx
805e22b2 534 && CONSTANT_P (XEXP (x, 1)))
535 return true;
536 return false;
537
538 case PRE_MODIFY:
539 /* Similar to the above; allow positive offsets. Further, since
540 auto-inc is only allowed in memories, the register must be a
541 pointer. */
971ba038 542 if (CONST_INT_P (XEXP (x, 1))
805e22b2 543 && INTVAL (XEXP (x, 1)) > 0)
544 return true;
545 return nonzero_address_p (XEXP (x, 0));
546
547 case PRE_INC:
548 /* Similarly. Further, the offset is always positive. */
549 return true;
550
551 case PRE_DEC:
552 case POST_DEC:
553 case POST_INC:
554 case POST_MODIFY:
555 return nonzero_address_p (XEXP (x, 0));
556
557 case LO_SUM:
558 return nonzero_address_p (XEXP (x, 1));
559
560 default:
561 break;
562 }
563
564 /* If it isn't one of the case above, might be zero. */
565 return false;
566}
567
2617fe26 568/* Return 1 if X refers to a memory location whose address
635aff97 569 cannot be compared reliably with constant addresses,
2617fe26 570 or if X refers to a BLKmode memory object.
ea087693 571 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
572 zero, we are slightly more conservative. */
635aff97 573
52d07779 574bool
575rtx_addr_varies_p (const_rtx x, bool for_alias)
635aff97 576{
19cb6b50 577 enum rtx_code code;
578 int i;
579 const char *fmt;
635aff97 580
581 if (x == 0)
582 return 0;
583
584 code = GET_CODE (x);
585 if (code == MEM)
ea087693 586 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
635aff97 587
588 fmt = GET_RTX_FORMAT (code);
589 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
590 if (fmt[i] == 'e')
cbea2709 591 {
ea087693 592 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
cbea2709 593 return 1;
594 }
595 else if (fmt[i] == 'E')
596 {
597 int j;
598 for (j = 0; j < XVECLEN (x, i); j++)
ea087693 599 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
cbea2709 600 return 1;
601 }
635aff97 602 return 0;
603}
604\f
cf7fb72d 605/* Return the CALL in X if there is one. */
606
607rtx
608get_call_rtx_from (rtx x)
609{
610 if (INSN_P (x))
611 x = PATTERN (x);
612 if (GET_CODE (x) == PARALLEL)
613 x = XVECEXP (x, 0, 0);
614 if (GET_CODE (x) == SET)
615 x = SET_SRC (x);
616 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
617 return x;
618 return NULL_RTX;
619}
620\f
635aff97 621/* Return the value of the integer term in X, if one is apparent;
622 otherwise return 0.
623 Only obvious integer terms are detected.
04641143 624 This is used in cse.c with the `related_value' field. */
635aff97 625
d3115c90 626HOST_WIDE_INT
dd9b9fc5 627get_integer_term (const_rtx x)
635aff97 628{
629 if (GET_CODE (x) == CONST)
630 x = XEXP (x, 0);
631
632 if (GET_CODE (x) == MINUS
971ba038 633 && CONST_INT_P (XEXP (x, 1)))
635aff97 634 return - INTVAL (XEXP (x, 1));
635 if (GET_CODE (x) == PLUS
971ba038 636 && CONST_INT_P (XEXP (x, 1)))
635aff97 637 return INTVAL (XEXP (x, 1));
638 return 0;
639}
640
641/* If X is a constant, return the value sans apparent integer term;
642 otherwise return 0.
643 Only obvious integer terms are detected. */
644
645rtx
dd9b9fc5 646get_related_value (const_rtx x)
635aff97 647{
648 if (GET_CODE (x) != CONST)
649 return 0;
650 x = XEXP (x, 0);
651 if (GET_CODE (x) == PLUS
971ba038 652 && CONST_INT_P (XEXP (x, 1)))
635aff97 653 return XEXP (x, 0);
654 else if (GET_CODE (x) == MINUS
971ba038 655 && CONST_INT_P (XEXP (x, 1)))
635aff97 656 return XEXP (x, 0);
657 return 0;
658}
659\f
e0ab7256 660/* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
661 to somewhere in the same object or object_block as SYMBOL. */
662
663bool
dd9b9fc5 664offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
e0ab7256 665{
666 tree decl;
667
668 if (GET_CODE (symbol) != SYMBOL_REF)
669 return false;
670
671 if (offset == 0)
672 return true;
673
674 if (offset > 0)
675 {
676 if (CONSTANT_POOL_ADDRESS_P (symbol)
677 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
678 return true;
679
680 decl = SYMBOL_REF_DECL (symbol);
681 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
682 return true;
683 }
684
685 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
686 && SYMBOL_REF_BLOCK (symbol)
687 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
688 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
689 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
690 return true;
691
692 return false;
693}
694
695/* Split X into a base and a constant offset, storing them in *BASE_OUT
696 and *OFFSET_OUT respectively. */
697
698void
699split_const (rtx x, rtx *base_out, rtx *offset_out)
700{
701 if (GET_CODE (x) == CONST)
702 {
703 x = XEXP (x, 0);
971ba038 704 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
e0ab7256 705 {
706 *base_out = XEXP (x, 0);
707 *offset_out = XEXP (x, 1);
708 return;
709 }
710 }
711 *base_out = x;
712 *offset_out = const0_rtx;
713}
714\f
40988080 715/* Return the number of places FIND appears within X. If COUNT_DEST is
716 zero, we do not count occurrences inside the destination of a SET. */
717
718int
dd9b9fc5 719count_occurrences (const_rtx x, const_rtx find, int count_dest)
40988080 720{
721 int i, j;
722 enum rtx_code code;
723 const char *format_ptr;
724 int count;
725
726 if (x == find)
727 return 1;
728
729 code = GET_CODE (x);
730
731 switch (code)
732 {
733 case REG:
0349edce 734 CASE_CONST_ANY:
40988080 735 case SYMBOL_REF:
736 case CODE_LABEL:
737 case PC:
738 case CC0:
739 return 0;
740
ac6a6c76 741 case EXPR_LIST:
742 count = count_occurrences (XEXP (x, 0), find, count_dest);
743 if (XEXP (x, 1))
744 count += count_occurrences (XEXP (x, 1), find, count_dest);
745 return count;
48e1416a 746
40988080 747 case MEM:
e16ceb8e 748 if (MEM_P (find) && rtx_equal_p (x, find))
40988080 749 return 1;
750 break;
751
752 case SET:
753 if (SET_DEST (x) == find && ! count_dest)
754 return count_occurrences (SET_SRC (x), find, count_dest);
755 break;
756
757 default:
758 break;
759 }
760
761 format_ptr = GET_RTX_FORMAT (code);
762 count = 0;
763
764 for (i = 0; i < GET_RTX_LENGTH (code); i++)
765 {
766 switch (*format_ptr++)
767 {
768 case 'e':
769 count += count_occurrences (XEXP (x, i), find, count_dest);
770 break;
771
772 case 'E':
773 for (j = 0; j < XVECLEN (x, i); j++)
774 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
775 break;
776 }
777 }
778 return count;
779}
3072d30e 780
0a98b6d9 781\f
782/* Return TRUE if OP is a register or subreg of a register that
783 holds an unsigned quantity. Otherwise, return FALSE. */
784
785bool
786unsigned_reg_p (rtx op)
787{
788 if (REG_P (op)
789 && REG_EXPR (op)
790 && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
791 return true;
792
793 if (GET_CODE (op) == SUBREG
e8629f9e 794 && SUBREG_PROMOTED_SIGN (op))
0a98b6d9 795 return true;
796
797 return false;
798}
799
40988080 800\f
635aff97 801/* Nonzero if register REG appears somewhere within IN.
802 Also works if REG is not a register; in this case it checks
803 for a subexpression of IN that is Lisp "equal" to REG. */
804
805int
dd9b9fc5 806reg_mentioned_p (const_rtx reg, const_rtx in)
635aff97 807{
19cb6b50 808 const char *fmt;
809 int i;
810 enum rtx_code code;
635aff97 811
812 if (in == 0)
813 return 0;
814
815 if (reg == in)
816 return 1;
817
818 if (GET_CODE (in) == LABEL_REF)
b49f2e4b 819 return reg == LABEL_REF_LABEL (in);
635aff97 820
821 code = GET_CODE (in);
822
823 switch (code)
824 {
825 /* Compare registers by number. */
826 case REG:
8ad4c111 827 return REG_P (reg) && REGNO (in) == REGNO (reg);
635aff97 828
829 /* These codes have no constituent expressions
830 and are unique. */
831 case SCRATCH:
832 case CC0:
833 case PC:
834 return 0;
835
0349edce 836 CASE_CONST_ANY:
635aff97 837 /* These are kept unique for a given value. */
838 return 0;
2617fe26 839
0dbd1c74 840 default:
841 break;
635aff97 842 }
843
844 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
845 return 1;
846
847 fmt = GET_RTX_FORMAT (code);
848
849 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
850 {
851 if (fmt[i] == 'E')
852 {
19cb6b50 853 int j;
635aff97 854 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
855 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
856 return 1;
857 }
858 else if (fmt[i] == 'e'
859 && reg_mentioned_p (reg, XEXP (in, i)))
860 return 1;
861 }
862 return 0;
863}
864\f
865/* Return 1 if in between BEG and END, exclusive of BEG and END, there is
866 no CODE_LABEL insn. */
867
868int
91a55c11 869no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
635aff97 870{
91a55c11 871 rtx_insn *p;
62de2472 872 if (beg == end)
873 return 0;
635aff97 874 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
6d7dc5b9 875 if (LABEL_P (p))
635aff97 876 return 0;
877 return 1;
878}
879
880/* Nonzero if register REG is used in an insn between
881 FROM_INSN and TO_INSN (exclusive of those two). */
882
883int
91a55c11 884reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
885 const rtx_insn *to_insn)
635aff97 886{
200c2a8f 887 rtx_insn *insn;
635aff97 888
889 if (from_insn == to_insn)
890 return 0;
891
892 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
9845d120 893 if (NONDEBUG_INSN_P (insn)
0c7201ca 894 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
ecbd66eb 895 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
635aff97 896 return 1;
897 return 0;
898}
899\f
900/* Nonzero if the old value of X, a register, is referenced in BODY. If X
901 is entirely replaced by a new value and the only use is as a SET_DEST,
902 we do not consider it a reference. */
903
904int
dd9b9fc5 905reg_referenced_p (const_rtx x, const_rtx body)
635aff97 906{
907 int i;
908
909 switch (GET_CODE (body))
910 {
911 case SET:
912 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
913 return 1;
914
915 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
916 of a REG that occupies all of the REG, the insn references X if
917 it is mentioned in the destination. */
918 if (GET_CODE (SET_DEST (body)) != CC0
919 && GET_CODE (SET_DEST (body)) != PC
8ad4c111 920 && !REG_P (SET_DEST (body))
635aff97 921 && ! (GET_CODE (SET_DEST (body)) == SUBREG
8ad4c111 922 && REG_P (SUBREG_REG (SET_DEST (body)))
635aff97 923 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
924 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
925 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
926 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
927 && reg_overlap_mentioned_p (x, SET_DEST (body)))
928 return 1;
0dbd1c74 929 return 0;
635aff97 930
931 case ASM_OPERANDS:
932 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
933 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
934 return 1;
0dbd1c74 935 return 0;
635aff97 936
937 case CALL:
938 case USE:
155b05dc 939 case IF_THEN_ELSE:
635aff97 940 return reg_overlap_mentioned_p (x, body);
941
942 case TRAP_IF:
943 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
944
9f449ed6 945 case PREFETCH:
946 return reg_overlap_mentioned_p (x, XEXP (body, 0));
947
3384a30e 948 case UNSPEC:
949 case UNSPEC_VOLATILE:
57d44d09 950 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
951 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
952 return 1;
953 return 0;
954
635aff97 955 case PARALLEL:
956 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
957 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
958 return 1;
0dbd1c74 959 return 0;
2617fe26 960
cccfd0f9 961 case CLOBBER:
e16ceb8e 962 if (MEM_P (XEXP (body, 0)))
cccfd0f9 963 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
964 return 1;
965 return 0;
966
406034fa 967 case COND_EXEC:
968 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
969 return 1;
970 return reg_referenced_p (x, COND_EXEC_CODE (body));
971
0dbd1c74 972 default:
973 return 0;
635aff97 974 }
635aff97 975}
635aff97 976\f
977/* Nonzero if register REG is set or clobbered in an insn between
978 FROM_INSN and TO_INSN (exclusive of those two). */
979
980int
311f821c 981reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
982 const rtx_insn *to_insn)
635aff97 983{
200c2a8f 984 const rtx_insn *insn;
635aff97 985
986 if (from_insn == to_insn)
987 return 0;
988
989 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
9204e736 990 if (INSN_P (insn) && reg_set_p (reg, insn))
635aff97 991 return 1;
992 return 0;
993}
994
995/* Internals of reg_set_between_p. */
635aff97 996int
7ecb5bb2 997reg_set_p (const_rtx reg, const_rtx insn)
635aff97 998{
8624ddf6 999 /* After delay slot handling, call and branch insns might be in a
1000 sequence. Check all the elements there. */
1001 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1002 {
1003 for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
1004 if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
1005 return true;
1006
1007 return false;
1008 }
1009
635aff97 1010 /* We can be passed an insn or part of one. If we are passed an insn,
1011 check if a side-effect of the insn clobbers REG. */
805e22b2 1012 if (INSN_P (insn)
1013 && (FIND_REG_INC_NOTE (insn, reg)
6d7dc5b9 1014 || (CALL_P (insn)
8ad4c111 1015 && ((REG_P (reg)
3c71a5cc 1016 && REGNO (reg) < FIRST_PSEUDO_REGISTER
20128b13 1017 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
1018 GET_MODE (reg), REGNO (reg)))
e16ceb8e 1019 || MEM_P (reg)
805e22b2 1020 || find_reg_fusage (insn, CLOBBER, reg)))))
8624ddf6 1021 return true;
635aff97 1022
b7995d54 1023 return set_of (reg, insn) != NULL_RTX;
635aff97 1024}
1025
1026/* Similar to reg_set_between_p, but check all registers in X. Return 0
1027 only if none of them are modified between START and END. Return 1 if
f0b5f617 1028 X contains a MEM; this routine does use memory aliasing. */
635aff97 1029
1030int
32482209 1031modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
635aff97 1032{
5493cb9a 1033 const enum rtx_code code = GET_CODE (x);
d2ca078f 1034 const char *fmt;
2c18b47c 1035 int i, j;
200c2a8f 1036 rtx_insn *insn;
c7bf7428 1037
1038 if (start == end)
1039 return 0;
635aff97 1040
1041 switch (code)
1042 {
0349edce 1043 CASE_CONST_ANY:
635aff97 1044 case CONST:
1045 case SYMBOL_REF:
1046 case LABEL_REF:
1047 return 0;
1048
1049 case PC:
1050 case CC0:
1051 return 1;
1052
1053 case MEM:
c7bf7428 1054 if (modified_between_p (XEXP (x, 0), start, end))
635aff97 1055 return 1;
bf0ee60a 1056 if (MEM_READONLY_P (x))
1057 return 0;
c7bf7428 1058 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1059 if (memory_modified_in_insn_p (x, insn))
1060 return 1;
1061 return 0;
635aff97 1062 break;
1063
1064 case REG:
1065 return reg_set_between_p (x, start, end);
2617fe26 1066
0dbd1c74 1067 default:
1068 break;
635aff97 1069 }
1070
1071 fmt = GET_RTX_FORMAT (code);
1072 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2c18b47c 1073 {
1074 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1075 return 1;
1076
1bd8ca86 1077 else if (fmt[i] == 'E')
2c18b47c 1078 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1079 if (modified_between_p (XVECEXP (x, i, j), start, end))
1080 return 1;
1081 }
1082
1083 return 0;
1084}
1085
1086/* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1087 of them are modified in INSN. Return 1 if X contains a MEM; this routine
c7bf7428 1088 does use memory aliasing. */
2c18b47c 1089
1090int
5493cb9a 1091modified_in_p (const_rtx x, const_rtx insn)
2c18b47c 1092{
5493cb9a 1093 const enum rtx_code code = GET_CODE (x);
d2ca078f 1094 const char *fmt;
2c18b47c 1095 int i, j;
1096
1097 switch (code)
1098 {
0349edce 1099 CASE_CONST_ANY:
2c18b47c 1100 case CONST:
1101 case SYMBOL_REF:
1102 case LABEL_REF:
1103 return 0;
1104
1105 case PC:
1106 case CC0:
635aff97 1107 return 1;
1108
2c18b47c 1109 case MEM:
c7bf7428 1110 if (modified_in_p (XEXP (x, 0), insn))
2c18b47c 1111 return 1;
bf0ee60a 1112 if (MEM_READONLY_P (x))
1113 return 0;
c7bf7428 1114 if (memory_modified_in_insn_p (x, insn))
1115 return 1;
1116 return 0;
2c18b47c 1117 break;
1118
1119 case REG:
1120 return reg_set_p (x, insn);
0dbd1c74 1121
1122 default:
1123 break;
2c18b47c 1124 }
1125
1126 fmt = GET_RTX_FORMAT (code);
1127 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1128 {
1129 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1130 return 1;
1131
1bd8ca86 1132 else if (fmt[i] == 'E')
2c18b47c 1133 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1134 if (modified_in_p (XVECEXP (x, i, j), insn))
1135 return 1;
1136 }
1137
635aff97 1138 return 0;
1139}
1140\f
b7995d54 1141/* Helper function for set_of. */
1142struct set_of_data
1143 {
81a410b1 1144 const_rtx found;
1145 const_rtx pat;
b7995d54 1146 };
1147
1148static void
81a410b1 1149set_of_1 (rtx x, const_rtx pat, void *data1)
b7995d54 1150{
81a410b1 1151 struct set_of_data *const data = (struct set_of_data *) (data1);
1152 if (rtx_equal_p (x, data->pat)
1153 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1154 data->found = pat;
b7995d54 1155}
1156
1157/* Give an INSN, return a SET or CLOBBER expression that does modify PAT
4a82352a 1158 (either directly or via STRICT_LOW_PART and similar modifiers). */
81a410b1 1159const_rtx
1160set_of (const_rtx pat, const_rtx insn)
b7995d54 1161{
1162 struct set_of_data data;
1163 data.found = NULL_RTX;
1164 data.pat = pat;
1165 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1166 return data.found;
1167}
effd1640 1168
e4442dc5 1169/* Add all hard register in X to *PSET. */
1170void
1171find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
1172{
1173 subrtx_iterator::array_type array;
1174 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1175 {
1176 const_rtx x = *iter;
1177 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1178 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1179 }
1180}
1181
effd1640 1182/* This function, called through note_stores, collects sets and
1183 clobbers of hard registers in a HARD_REG_SET, which is pointed to
1184 by DATA. */
1185void
1186record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1187{
1188 HARD_REG_SET *pset = (HARD_REG_SET *)data;
1189 if (REG_P (x) && HARD_REGISTER_P (x))
1190 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1191}
1192
1193/* Examine INSN, and compute the set of hard registers written by it.
1194 Store it in *PSET. Should only be called after reload. */
1195void
b9452872 1196find_all_hard_reg_sets (const rtx_insn *insn, HARD_REG_SET *pset, bool implicit)
effd1640 1197{
1198 rtx link;
1199
1200 CLEAR_HARD_REG_SET (*pset);
1201 note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1202 if (CALL_P (insn))
6792947d 1203 {
1204 if (implicit)
1205 IOR_HARD_REG_SET (*pset, call_used_reg_set);
1206
1207 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1208 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1209 }
effd1640 1210 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1211 if (REG_NOTE_KIND (link) == REG_INC)
1212 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1213}
1214
effd1640 1215/* Like record_hard_reg_sets, but called through note_uses. */
1216void
1217record_hard_reg_uses (rtx *px, void *data)
1218{
e4442dc5 1219 find_all_hard_regs (*px, (HARD_REG_SET *) data);
effd1640 1220}
b7995d54 1221\f
635aff97 1222/* Given an INSN, return a SET expression if this insn has only a single SET.
1223 It may also have CLOBBERs, USEs, or SET whose output
1224 will not be used, which we ignore. */
1225
1226rtx
50fc2d35 1227single_set_2 (const rtx_insn *insn, const_rtx pat)
635aff97 1228{
b6590daa 1229 rtx set = NULL;
1230 int set_verified = 1;
635aff97 1231 int i;
b6590daa 1232
6531eca9 1233 if (GET_CODE (pat) == PARALLEL)
635aff97 1234 {
b6590daa 1235 for (i = 0; i < XVECLEN (pat, 0); i++)
6531eca9 1236 {
b6590daa 1237 rtx sub = XVECEXP (pat, 0, i);
1238 switch (GET_CODE (sub))
1239 {
1240 case USE:
1241 case CLOBBER:
1242 break;
1243
1244 case SET:
1245 /* We can consider insns having multiple sets, where all
1246 but one are dead as single set insns. In common case
1247 only single set is present in the pattern so we want
dd5b4b36 1248 to avoid checking for REG_UNUSED notes unless necessary.
b6590daa 1249
1250 When we reach set first time, we just expect this is
1251 the single set we are looking for and only when more
1252 sets are found in the insn, we check them. */
1253 if (!set_verified)
1254 {
1255 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1256 && !side_effects_p (set))
1257 set = NULL;
1258 else
1259 set_verified = 1;
1260 }
1261 if (!set)
1262 set = sub, set_verified = 0;
1263 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1264 || side_effects_p (sub))
1265 return NULL_RTX;
1266 break;
1267
1268 default:
1269 return NULL_RTX;
1270 }
93127143 1271 }
635aff97 1272 }
b6590daa 1273 return set;
635aff97 1274}
e6cae665 1275
1276/* Given an INSN, return nonzero if it has more than one SET, else return
1277 zero. */
1278
21b510d8 1279int
dd9b9fc5 1280multiple_sets (const_rtx insn)
e6cae665 1281{
2c641110 1282 int found;
e6cae665 1283 int i;
2617fe26 1284
e6cae665 1285 /* INSN must be an insn. */
9204e736 1286 if (! INSN_P (insn))
e6cae665 1287 return 0;
1288
1289 /* Only a PARALLEL can have multiple SETs. */
1290 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1291 {
1292 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1293 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1294 {
1295 /* If we have already found a SET, then return now. */
1296 if (found)
1297 return 1;
1298 else
1299 found = 1;
1300 }
1301 }
2617fe26 1302
e6cae665 1303 /* Either zero or one SET. */
1304 return 0;
1305}
635aff97 1306\f
c955554c 1307/* Return nonzero if the destination of SET equals the source
1308 and there are no side effects. */
1309
1310int
dd9b9fc5 1311set_noop_p (const_rtx set)
c955554c 1312{
1313 rtx src = SET_SRC (set);
1314 rtx dst = SET_DEST (set);
1315
675b92cc 1316 if (dst == pc_rtx && src == pc_rtx)
1317 return 1;
1318
e16ceb8e 1319 if (MEM_P (dst) && MEM_P (src))
53fffe66 1320 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1321
476d094d 1322 if (GET_CODE (dst) == ZERO_EXTRACT)
c955554c 1323 return rtx_equal_p (XEXP (dst, 0), src)
53fffe66 1324 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1325 && !side_effects_p (src);
c955554c 1326
1327 if (GET_CODE (dst) == STRICT_LOW_PART)
1328 dst = XEXP (dst, 0);
1329
1330 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1331 {
1332 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1333 return 0;
1334 src = SUBREG_REG (src);
1335 dst = SUBREG_REG (dst);
1336 }
1337
b2bb3848 1338 /* It is a NOOP if destination overlaps with selected src vector
1339 elements. */
1340 if (GET_CODE (src) == VEC_SELECT
1341 && REG_P (XEXP (src, 0)) && REG_P (dst)
1342 && HARD_REGISTER_P (XEXP (src, 0))
1343 && HARD_REGISTER_P (dst))
1344 {
1345 int i;
1346 rtx par = XEXP (src, 1);
1347 rtx src0 = XEXP (src, 0);
1348 int c0 = INTVAL (XVECEXP (par, 0, 0));
1349 HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1350
1351 for (i = 1; i < XVECLEN (par, 0); i++)
1352 if (INTVAL (XVECEXP (par, 0, i)) != c0 + i)
1353 return 0;
1354 return
1355 simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1356 offset, GET_MODE (dst)) == (int) REGNO (dst);
1357 }
1358
8ad4c111 1359 return (REG_P (src) && REG_P (dst)
c955554c 1360 && REGNO (src) == REGNO (dst));
1361}
b08cd584 1362\f
1363/* Return nonzero if an insn consists only of SETs, each of which only sets a
1364 value to itself. */
1365
1366int
e79ab52b 1367noop_move_p (const rtx_insn *insn)
b08cd584 1368{
1369 rtx pat = PATTERN (insn);
1370
1805c35c 1371 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1372 return 1;
1373
b08cd584 1374 /* Insns carrying these notes are useful later on. */
1375 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1376 return 0;
1377
3f0def8e 1378 /* Check the code to be executed for COND_EXEC. */
1379 if (GET_CODE (pat) == COND_EXEC)
1380 pat = COND_EXEC_CODE (pat);
1381
b08cd584 1382 if (GET_CODE (pat) == SET && set_noop_p (pat))
1383 return 1;
1384
1385 if (GET_CODE (pat) == PARALLEL)
1386 {
1387 int i;
1388 /* If nothing but SETs of registers to themselves,
1389 this insn can also be deleted. */
1390 for (i = 0; i < XVECLEN (pat, 0); i++)
1391 {
1392 rtx tem = XVECEXP (pat, 0, i);
1393
1394 if (GET_CODE (tem) == USE
1395 || GET_CODE (tem) == CLOBBER)
1396 continue;
1397
1398 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1399 return 0;
1400 }
1401
1402 return 1;
1403 }
1404 return 0;
1405}
1406\f
c955554c 1407
635aff97 1408/* Return nonzero if register in range [REGNO, ENDREGNO)
1409 appears either explicitly or implicitly in X
1410 other than being stored into.
1411
1412 References contained within the substructure at LOC do not count.
1413 LOC may be zero, meaning don't ignore anything. */
1414
2ec77a7c 1415bool
dd9b9fc5 1416refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
3ad4992f 1417 rtx *loc)
635aff97 1418{
02e7a332 1419 int i;
1420 unsigned int x_regno;
1421 RTX_CODE code;
1422 const char *fmt;
635aff97 1423
1424 repeat:
1425 /* The contents of a REG_NONNEG note is always zero, so we must come here
1426 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1427 if (x == 0)
2ec77a7c 1428 return false;
635aff97 1429
1430 code = GET_CODE (x);
1431
1432 switch (code)
1433 {
1434 case REG:
02e7a332 1435 x_regno = REGNO (x);
2c18b47c 1436
1437 /* If we modifying the stack, frame, or argument pointer, it will
1438 clobber a virtual register. In fact, we could be more precise,
1439 but it isn't worth it. */
02e7a332 1440 if ((x_regno == STACK_POINTER_REGNUM
c6bb296a 1441 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1442 && x_regno == ARG_POINTER_REGNUM)
02e7a332 1443 || x_regno == FRAME_POINTER_REGNUM)
2c18b47c 1444 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
2ec77a7c 1445 return true;
2c18b47c 1446
a2c6f0b7 1447 return endregno > x_regno && regno < END_REGNO (x);
635aff97 1448
1449 case SUBREG:
1450 /* If this is a SUBREG of a hard reg, we can see exactly which
1451 registers are being modified. Otherwise, handle normally. */
8ad4c111 1452 if (REG_P (SUBREG_REG (x))
635aff97 1453 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1454 {
701e46d0 1455 unsigned int inner_regno = subreg_regno (x);
02e7a332 1456 unsigned int inner_endregno
aee171c8 1457 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
fe2ebfc8 1458 ? subreg_nregs (x) : 1);
635aff97 1459
1460 return endregno > inner_regno && regno < inner_endregno;
1461 }
1462 break;
1463
1464 case CLOBBER:
1465 case SET:
1466 if (&SET_DEST (x) != loc
1467 /* Note setting a SUBREG counts as referring to the REG it is in for
1468 a pseudo but not for hard registers since we can
1469 treat each word individually. */
1470 && ((GET_CODE (SET_DEST (x)) == SUBREG
1471 && loc != &SUBREG_REG (SET_DEST (x))
8ad4c111 1472 && REG_P (SUBREG_REG (SET_DEST (x)))
635aff97 1473 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1474 && refers_to_regno_p (regno, endregno,
1475 SUBREG_REG (SET_DEST (x)), loc))
8ad4c111 1476 || (!REG_P (SET_DEST (x))
635aff97 1477 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
2ec77a7c 1478 return true;
635aff97 1479
1480 if (code == CLOBBER || loc == &SET_SRC (x))
2ec77a7c 1481 return false;
635aff97 1482 x = SET_SRC (x);
1483 goto repeat;
0dbd1c74 1484
1485 default:
1486 break;
635aff97 1487 }
1488
1489 /* X does not match, so try its subexpressions. */
1490
1491 fmt = GET_RTX_FORMAT (code);
1492 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1493 {
1494 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1495 {
1496 if (i == 0)
1497 {
1498 x = XEXP (x, 0);
1499 goto repeat;
1500 }
1501 else
1502 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
2ec77a7c 1503 return true;
635aff97 1504 }
1505 else if (fmt[i] == 'E')
1506 {
19cb6b50 1507 int j;
ea0041f4 1508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
635aff97 1509 if (loc != &XVECEXP (x, i, j)
1510 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
2ec77a7c 1511 return true;
635aff97 1512 }
1513 }
2ec77a7c 1514 return false;
635aff97 1515}
1516
1517/* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1518 we check if any register number in X conflicts with the relevant register
1519 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1520 contains a MEM (we don't bother checking for memory addresses that can't
1521 conflict because we expect this to be a rare case. */
1522
1523int
dd9b9fc5 1524reg_overlap_mentioned_p (const_rtx x, const_rtx in)
635aff97 1525{
02e7a332 1526 unsigned int regno, endregno;
635aff97 1527
2f3f2ddf 1528 /* If either argument is a constant, then modifying X can not
1529 affect IN. Here we look at IN, we can profitably combine
1530 CONSTANT_P (x) with the switch statement below. */
1531 if (CONSTANT_P (in))
8eb7d6fc 1532 return 0;
406034fa 1533
2f3f2ddf 1534 recurse:
406034fa 1535 switch (GET_CODE (x))
635aff97 1536 {
2f3f2ddf 1537 case STRICT_LOW_PART:
1538 case ZERO_EXTRACT:
1539 case SIGN_EXTRACT:
1540 /* Overly conservative. */
1541 x = XEXP (x, 0);
1542 goto recurse;
1543
406034fa 1544 case SUBREG:
635aff97 1545 regno = REGNO (SUBREG_REG (x));
1546 if (regno < FIRST_PSEUDO_REGISTER)
701e46d0 1547 regno = subreg_regno (x);
fe2ebfc8 1548 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1549 ? subreg_nregs (x) : 1);
406034fa 1550 goto do_reg;
635aff97 1551
406034fa 1552 case REG:
1553 regno = REGNO (x);
a2c6f0b7 1554 endregno = END_REGNO (x);
fe2ebfc8 1555 do_reg:
337d789b 1556 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
635aff97 1557
406034fa 1558 case MEM:
1559 {
1560 const char *fmt;
1561 int i;
635aff97 1562
e16ceb8e 1563 if (MEM_P (in))
635aff97 1564 return 1;
1565
406034fa 1566 fmt = GET_RTX_FORMAT (GET_CODE (in));
1567 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
bc99e194 1568 if (fmt[i] == 'e')
1569 {
1570 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1571 return 1;
1572 }
1573 else if (fmt[i] == 'E')
1574 {
1575 int j;
1576 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1577 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1578 return 1;
1579 }
3a1b2351 1580
406034fa 1581 return 0;
1582 }
1583
1584 case SCRATCH:
1585 case PC:
1586 case CC0:
1587 return reg_mentioned_p (x, in);
1588
1589 case PARALLEL:
e291e4ee 1590 {
216b2683 1591 int i;
e291e4ee 1592
1593 /* If any register in here refers to it we return true. */
4b303227 1594 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1595 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1596 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
2f3f2ddf 1597 return 1;
4b303227 1598 return 0;
e291e4ee 1599 }
635aff97 1600
406034fa 1601 default:
04e579b6 1602 gcc_assert (CONSTANT_P (x));
2f3f2ddf 1603 return 0;
1604 }
635aff97 1605}
1606\f
635aff97 1607/* Call FUN on each register or MEM that is stored into or clobbered by X.
02a63053 1608 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1609 ignored by note_stores, but passed to FUN.
1610
1611 FUN receives three arguments:
1612 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1613 2. the SET or CLOBBER rtx that does the store,
1614 3. the pointer DATA provided to note_stores.
635aff97 1615
1616 If the item being stored in or clobbered is a SUBREG of a hard register,
1617 the SUBREG will be passed. */
2617fe26 1618
635aff97 1619void
81a410b1 1620note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
635aff97 1621{
a80f1c6c 1622 int i;
216b2683 1623
a80f1c6c 1624 if (GET_CODE (x) == COND_EXEC)
1625 x = COND_EXEC_CODE (x);
216b2683 1626
a80f1c6c 1627 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1628 {
1629 rtx dest = SET_DEST (x);
1630
1631 while ((GET_CODE (dest) == SUBREG
1632 && (!REG_P (SUBREG_REG (dest))
1633 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1634 || GET_CODE (dest) == ZERO_EXTRACT
1635 || GET_CODE (dest) == STRICT_LOW_PART)
1636 dest = XEXP (dest, 0);
1637
1638 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1639 each of whose first operand is a register. */
1640 if (GET_CODE (dest) == PARALLEL)
1641 {
1642 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1643 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1644 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1645 }
1646 else
1647 (*fun) (dest, x, data);
1648 }
02e7a332 1649
a80f1c6c 1650 else if (GET_CODE (x) == PARALLEL)
1651 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1652 note_stores (XVECEXP (x, 0, i), fun, data);
1653}
635aff97 1654\f
99b86c05 1655/* Like notes_stores, but call FUN for each expression that is being
1656 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1657 FUN for each expression, not any interior subexpressions. FUN receives a
1658 pointer to the expression and the DATA passed to this function.
1659
1660 Note that this is not quite the same test as that done in reg_referenced_p
1661 since that considers something as being referenced if it is being
1662 partially set, while we do not. */
1663
1664void
3ad4992f 1665note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
99b86c05 1666{
1667 rtx body = *pbody;
1668 int i;
1669
1670 switch (GET_CODE (body))
1671 {
1672 case COND_EXEC:
1673 (*fun) (&COND_EXEC_TEST (body), data);
1674 note_uses (&COND_EXEC_CODE (body), fun, data);
1675 return;
1676
1677 case PARALLEL:
1678 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1679 note_uses (&XVECEXP (body, 0, i), fun, data);
1680 return;
1681
48df5a7f 1682 case SEQUENCE:
1683 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1684 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1685 return;
1686
99b86c05 1687 case USE:
1688 (*fun) (&XEXP (body, 0), data);
1689 return;
1690
1691 case ASM_OPERANDS:
1692 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1693 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1694 return;
1695
1696 case TRAP_IF:
1697 (*fun) (&TRAP_CONDITION (body), data);
1698 return;
1699
9f449ed6 1700 case PREFETCH:
1701 (*fun) (&XEXP (body, 0), data);
1702 return;
1703
99b86c05 1704 case UNSPEC:
1705 case UNSPEC_VOLATILE:
1706 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1707 (*fun) (&XVECEXP (body, 0, i), data);
1708 return;
1709
1710 case CLOBBER:
e16ceb8e 1711 if (MEM_P (XEXP (body, 0)))
99b86c05 1712 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1713 return;
1714
1715 case SET:
1716 {
1717 rtx dest = SET_DEST (body);
1718
1719 /* For sets we replace everything in source plus registers in memory
1720 expression in store and operands of a ZERO_EXTRACT. */
1721 (*fun) (&SET_SRC (body), data);
1722
1723 if (GET_CODE (dest) == ZERO_EXTRACT)
1724 {
1725 (*fun) (&XEXP (dest, 1), data);
1726 (*fun) (&XEXP (dest, 2), data);
1727 }
1728
1729 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1730 dest = XEXP (dest, 0);
1731
e16ceb8e 1732 if (MEM_P (dest))
99b86c05 1733 (*fun) (&XEXP (dest, 0), data);
1734 }
1735 return;
1736
1737 default:
1738 /* All the other possibilities never store. */
1739 (*fun) (pbody, data);
1740 return;
1741 }
1742}
1743\f
635aff97 1744/* Return nonzero if X's old contents don't survive after INSN.
1745 This will be true if X is (cc0) or if X is a register and
1746 X dies in INSN or because INSN entirely sets X.
1747
476d094d 1748 "Entirely set" means set directly and not through a SUBREG, or
1749 ZERO_EXTRACT, so no trace of the old contents remains.
635aff97 1750 Likewise, REG_INC does not count.
1751
1752 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1753 but for this use that makes no difference, since regs don't overlap
1754 during their lifetimes. Therefore, this function may be used
3072d30e 1755 at any time after deaths have been computed.
635aff97 1756
1757 If REG is a hard reg that occupies multiple machine registers, this
1758 function will only return 1 if each of those registers will be replaced
1759 by INSN. */
1760
1761int
dd9b9fc5 1762dead_or_set_p (const_rtx insn, const_rtx x)
635aff97 1763{
a2c6f0b7 1764 unsigned int regno, end_regno;
02e7a332 1765 unsigned int i;
635aff97 1766
1767 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1768 if (GET_CODE (x) == CC0)
1769 return 1;
1770
04e579b6 1771 gcc_assert (REG_P (x));
635aff97 1772
1773 regno = REGNO (x);
a2c6f0b7 1774 end_regno = END_REGNO (x);
1775 for (i = regno; i < end_regno; i++)
635aff97 1776 if (! dead_or_set_regno_p (insn, i))
1777 return 0;
1778
1779 return 1;
1780}
1781
30c74f6d 1782/* Return TRUE iff DEST is a register or subreg of a register and
1783 doesn't change the number of words of the inner register, and any
1784 part of the register is TEST_REGNO. */
1785
1786static bool
dd9b9fc5 1787covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
30c74f6d 1788{
1789 unsigned int regno, endregno;
1790
1791 if (GET_CODE (dest) == SUBREG
1792 && (((GET_MODE_SIZE (GET_MODE (dest))
1793 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1794 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1795 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1796 dest = SUBREG_REG (dest);
1797
1798 if (!REG_P (dest))
1799 return false;
1800
1801 regno = REGNO (dest);
a2c6f0b7 1802 endregno = END_REGNO (dest);
30c74f6d 1803 return (test_regno >= regno && test_regno < endregno);
1804}
1805
1806/* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1807 any member matches the covers_regno_no_parallel_p criteria. */
1808
1809static bool
dd9b9fc5 1810covers_regno_p (const_rtx dest, unsigned int test_regno)
30c74f6d 1811{
1812 if (GET_CODE (dest) == PARALLEL)
1813 {
1814 /* Some targets place small structures in registers for return
1815 values of functions, and those registers are wrapped in
1816 PARALLELs that we may see as the destination of a SET. */
1817 int i;
1818
1819 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1820 {
1821 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1822 if (inner != NULL_RTX
1823 && covers_regno_no_parallel_p (inner, test_regno))
1824 return true;
1825 }
1826
1827 return false;
1828 }
1829 else
1830 return covers_regno_no_parallel_p (dest, test_regno);
1831}
1832
3072d30e 1833/* Utility function for dead_or_set_p to check an individual register. */
635aff97 1834
1835int
dd9b9fc5 1836dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
635aff97 1837{
dd9b9fc5 1838 const_rtx pattern;
635aff97 1839
eb1c92fb 1840 /* See if there is a death note for something that includes TEST_REGNO. */
1841 if (find_regno_note (insn, REG_DEAD, test_regno))
1842 return 1;
635aff97 1843
6d7dc5b9 1844 if (CALL_P (insn)
0c7201ca 1845 && find_regno_fusage (insn, CLOBBER, test_regno))
1846 return 1;
1847
406034fa 1848 pattern = PATTERN (insn);
1849
d6e8850f 1850 /* If a COND_EXEC is not executed, the value survives. */
406034fa 1851 if (GET_CODE (pattern) == COND_EXEC)
d6e8850f 1852 return 0;
406034fa 1853
1854 if (GET_CODE (pattern) == SET)
30c74f6d 1855 return covers_regno_p (SET_DEST (pattern), test_regno);
406034fa 1856 else if (GET_CODE (pattern) == PARALLEL)
635aff97 1857 {
19cb6b50 1858 int i;
635aff97 1859
406034fa 1860 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
635aff97 1861 {
406034fa 1862 rtx body = XVECEXP (pattern, 0, i);
1863
1864 if (GET_CODE (body) == COND_EXEC)
1865 body = COND_EXEC_CODE (body);
635aff97 1866
30c74f6d 1867 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1868 && covers_regno_p (SET_DEST (body), test_regno))
1869 return 1;
635aff97 1870 }
1871 }
1872
1873 return 0;
1874}
1875
1876/* Return the reg-note of kind KIND in insn INSN, if there is one.
1877 If DATUM is nonzero, look for one whose datum is DATUM. */
1878
1879rtx
dd9b9fc5 1880find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
635aff97 1881{
19cb6b50 1882 rtx link;
635aff97 1883
0ea2d350 1884 gcc_checking_assert (insn);
62d6a022 1885
49a945b8 1886 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
9204e736 1887 if (! INSN_P (insn))
49a945b8 1888 return 0;
b2a24b6f 1889 if (datum == 0)
1890 {
1891 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1892 if (REG_NOTE_KIND (link) == kind)
1893 return link;
1894 return 0;
1895 }
49a945b8 1896
635aff97 1897 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
b2a24b6f 1898 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
635aff97 1899 return link;
1900 return 0;
1901}
1902
1903/* Return the reg-note of kind KIND in insn INSN which applies to register
da5c9e5f 1904 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1905 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1906 it might be the case that the note overlaps REGNO. */
635aff97 1907
1908rtx
dd9b9fc5 1909find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
635aff97 1910{
19cb6b50 1911 rtx link;
635aff97 1912
49a945b8 1913 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
9204e736 1914 if (! INSN_P (insn))
49a945b8 1915 return 0;
1916
635aff97 1917 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1918 if (REG_NOTE_KIND (link) == kind
1919 /* Verify that it is a register, so that scratch and MEM won't cause a
1920 problem here. */
8ad4c111 1921 && REG_P (XEXP (link, 0))
da5c9e5f 1922 && REGNO (XEXP (link, 0)) <= regno
a2c6f0b7 1923 && END_REGNO (XEXP (link, 0)) > regno)
635aff97 1924 return link;
1925 return 0;
1926}
0c7201ca 1927
53cb61a7 1928/* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1929 has such a note. */
1930
1931rtx
dd9b9fc5 1932find_reg_equal_equiv_note (const_rtx insn)
53cb61a7 1933{
53fffe66 1934 rtx link;
53cb61a7 1935
53fffe66 1936 if (!INSN_P (insn))
53cb61a7 1937 return 0;
e4f51d19 1938
53fffe66 1939 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1940 if (REG_NOTE_KIND (link) == REG_EQUAL
1941 || REG_NOTE_KIND (link) == REG_EQUIV)
1942 {
e4f51d19 1943 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1944 insns that have multiple sets. Checking single_set to
1945 make sure of this is not the proper check, as explained
1946 in the comment in set_unique_reg_note.
1947
1948 This should be changed into an assert. */
1949 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
53fffe66 1950 return 0;
1951 return link;
1952 }
1953 return NULL;
53cb61a7 1954}
1955
3aba99c8 1956/* Check whether INSN is a single_set whose source is known to be
1957 equivalent to a constant. Return that constant if so, otherwise
1958 return null. */
1959
1960rtx
93ee8dfb 1961find_constant_src (const rtx_insn *insn)
3aba99c8 1962{
1963 rtx note, set, x;
1964
1965 set = single_set (insn);
1966 if (set)
1967 {
1968 x = avoid_constant_pool_reference (SET_SRC (set));
1969 if (CONSTANT_P (x))
1970 return x;
1971 }
1972
1973 note = find_reg_equal_equiv_note (insn);
1974 if (note && CONSTANT_P (XEXP (note, 0)))
1975 return XEXP (note, 0);
1976
1977 return NULL_RTX;
1978}
1979
0c7201ca 1980/* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1981 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1982
1983int
dd9b9fc5 1984find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
0c7201ca 1985{
1986 /* If it's not a CALL_INSN, it can't possibly have a
1987 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
6d7dc5b9 1988 if (!CALL_P (insn))
0c7201ca 1989 return 0;
1990
04e579b6 1991 gcc_assert (datum);
0c7201ca 1992
8ad4c111 1993 if (!REG_P (datum))
0c7201ca 1994 {
19cb6b50 1995 rtx link;
0c7201ca 1996
1997 for (link = CALL_INSN_FUNCTION_USAGE (insn);
2617fe26 1998 link;
0c7201ca 1999 link = XEXP (link, 1))
2617fe26 2000 if (GET_CODE (XEXP (link, 0)) == code
ff90a874 2001 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2617fe26 2002 return 1;
0c7201ca 2003 }
2004 else
2005 {
02e7a332 2006 unsigned int regno = REGNO (datum);
0c7201ca 2007
2008 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2009 to pseudo registers, so don't bother checking. */
2010
2011 if (regno < FIRST_PSEUDO_REGISTER)
2617fe26 2012 {
788bed51 2013 unsigned int end_regno = END_REGNO (datum);
02e7a332 2014 unsigned int i;
0c7201ca 2015
2016 for (i = regno; i < end_regno; i++)
2017 if (find_regno_fusage (insn, code, i))
2018 return 1;
2617fe26 2019 }
0c7201ca 2020 }
2021
2022 return 0;
2023}
2024
2025/* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2026 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2027
2028int
dd9b9fc5 2029find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
0c7201ca 2030{
19cb6b50 2031 rtx link;
0c7201ca 2032
2033 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2034 to pseudo registers, so don't bother checking. */
2035
2036 if (regno >= FIRST_PSEUDO_REGISTER
6d7dc5b9 2037 || !CALL_P (insn) )
0c7201ca 2038 return 0;
2039
2040 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
005d995b 2041 {
02e7a332 2042 rtx op, reg;
005d995b 2043
2044 if (GET_CODE (op = XEXP (link, 0)) == code
8ad4c111 2045 && REG_P (reg = XEXP (op, 0))
a2c6f0b7 2046 && REGNO (reg) <= regno
788bed51 2047 && END_REGNO (reg) > regno)
005d995b 2048 return 1;
2049 }
0c7201ca 2050
2051 return 0;
2052}
ef15379a 2053
635aff97 2054\f
9eb946de 2055/* Return true if KIND is an integer REG_NOTE. */
2056
2057static bool
2058int_reg_note_p (enum reg_note kind)
2059{
2060 return kind == REG_BR_PROB;
2061}
2062
5859ee98 2063/* Allocate a register note with kind KIND and datum DATUM. LIST is
2064 stored as the pointer to the next register note. */
a1ddb869 2065
5859ee98 2066rtx
2067alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
a1ddb869 2068{
2069 rtx note;
2070
9eb946de 2071 gcc_checking_assert (!int_reg_note_p (kind));
a1ddb869 2072 switch (kind)
2073 {
2074 case REG_CC_SETTER:
2075 case REG_CC_USER:
2076 case REG_LABEL_TARGET:
2077 case REG_LABEL_OPERAND:
4c0315d0 2078 case REG_TM:
a1ddb869 2079 /* These types of register notes use an INSN_LIST rather than an
2080 EXPR_LIST, so that copying is done right and dumps look
2081 better. */
5859ee98 2082 note = alloc_INSN_LIST (datum, list);
a1ddb869 2083 PUT_REG_NOTE_KIND (note, kind);
2084 break;
2085
2086 default:
5859ee98 2087 note = alloc_EXPR_LIST (kind, datum, list);
a1ddb869 2088 break;
2089 }
2090
5859ee98 2091 return note;
2092}
2093
2094/* Add register note with kind KIND and datum DATUM to INSN. */
2095
2096void
2097add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2098{
2099 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
a1ddb869 2100}
2101
9eb946de 2102/* Add an integer register note with kind KIND and datum DATUM to INSN. */
2103
2104void
2105add_int_reg_note (rtx insn, enum reg_note kind, int datum)
2106{
2107 gcc_checking_assert (int_reg_note_p (kind));
3754d046 2108 REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
9eb946de 2109 datum, REG_NOTES (insn));
2110}
2111
2112/* Add a register note like NOTE to INSN. */
2113
2114void
ca336a81 2115add_shallow_copy_of_reg_note (rtx_insn *insn, rtx note)
9eb946de 2116{
2117 if (GET_CODE (note) == INT_LIST)
2118 add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2119 else
2120 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2121}
2122
635aff97 2123/* Remove register note NOTE from the REG_NOTES of INSN. */
2124
2125void
dd9b9fc5 2126remove_note (rtx insn, const_rtx note)
635aff97 2127{
19cb6b50 2128 rtx link;
635aff97 2129
def93098 2130 if (note == NULL_RTX)
2131 return;
2132
635aff97 2133 if (REG_NOTES (insn) == note)
3072d30e 2134 REG_NOTES (insn) = XEXP (note, 1);
2135 else
2136 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2137 if (XEXP (link, 1) == note)
2138 {
2139 XEXP (link, 1) = XEXP (note, 1);
2140 break;
2141 }
2142
2143 switch (REG_NOTE_KIND (note))
635aff97 2144 {
3072d30e 2145 case REG_EQUAL:
2146 case REG_EQUIV:
e149ca56 2147 df_notes_rescan (as_a <rtx_insn *> (insn));
3072d30e 2148 break;
2149 default:
2150 break;
635aff97 2151 }
635aff97 2152}
13d60e7c 2153
f16feee2 2154/* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
2155
2156void
6dbed5c7 2157remove_reg_equal_equiv_notes (rtx_insn *insn)
f16feee2 2158{
2159 rtx *loc;
2160
2161 loc = &REG_NOTES (insn);
2162 while (*loc)
2163 {
2164 enum reg_note kind = REG_NOTE_KIND (*loc);
2165 if (kind == REG_EQUAL || kind == REG_EQUIV)
2166 *loc = XEXP (*loc, 1);
2167 else
2168 loc = &XEXP (*loc, 1);
2169 }
2170}
09669349 2171
2172/* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2173
2174void
2175remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2176{
2177 df_ref eq_use;
2178
2179 if (!df)
2180 return;
2181
2182 /* This loop is a little tricky. We cannot just go down the chain because
2183 it is being modified by some actions in the loop. So we just iterate
2184 over the head. We plan to drain the list anyway. */
2185 while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2186 {
200c2a8f 2187 rtx_insn *insn = DF_REF_INSN (eq_use);
09669349 2188 rtx note = find_reg_equal_equiv_note (insn);
2189
2190 /* This assert is generally triggered when someone deletes a REG_EQUAL
2191 or REG_EQUIV note by hacking the list manually rather than calling
2192 remove_note. */
2193 gcc_assert (note);
2194
2195 remove_note (insn, note);
2196 }
2197}
f16feee2 2198
5cc577b6 2199/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2200 return 1 if it is found. A simple equality test is used to determine if
2201 NODE matches. */
2202
7a680a39 2203bool
2204in_insn_list_p (const rtx_insn_list *listp, const rtx_insn *node)
5cc577b6 2205{
dd9b9fc5 2206 const_rtx x;
5cc577b6 2207
2208 for (x = listp; x; x = XEXP (x, 1))
2209 if (node == XEXP (x, 0))
7a680a39 2210 return true;
5cc577b6 2211
7a680a39 2212 return false;
5cc577b6 2213}
2214
badb6da9 2215/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2216 remove that entry from the list if it is found.
13d60e7c 2217
badb6da9 2218 A simple equality test is used to determine if NODE matches. */
13d60e7c 2219
2220void
6e16e157 2221remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp)
13d60e7c 2222{
6e16e157 2223 rtx_expr_list *temp = *listp;
9ed997be 2224 rtx_expr_list *prev = NULL;
13d60e7c 2225
2226 while (temp)
2227 {
6e16e157 2228 if (node == temp->element ())
13d60e7c 2229 {
2230 /* Splice the node out of the list. */
2231 if (prev)
6e16e157 2232 XEXP (prev, 1) = temp->next ();
13d60e7c 2233 else
6e16e157 2234 *listp = temp->next ();
13d60e7c 2235
2236 return;
2237 }
badb6da9 2238
2239 prev = temp;
6e16e157 2240 temp = temp->next ();
13d60e7c 2241 }
2242}
a4de1c23 2243
2244/* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
2245 remove that entry from the list if it is found.
2246
2247 A simple equality test is used to determine if NODE matches. */
2248
2249void
2250remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
2251{
2252 rtx_insn_list *temp = *listp;
9ed997be 2253 rtx_insn_list *prev = NULL;
a4de1c23 2254
2255 while (temp)
2256 {
2257 if (node == temp->insn ())
2258 {
2259 /* Splice the node out of the list. */
2260 if (prev)
2261 XEXP (prev, 1) = temp->next ();
2262 else
2263 *listp = temp->next ();
2264
2265 return;
2266 }
2267
2268 prev = temp;
2269 temp = temp->next ();
2270 }
2271}
635aff97 2272\f
ea275ef9 2273/* Nonzero if X contains any volatile instructions. These are instructions
2274 which may cause unpredictable machine state instructions, and thus no
e12b44a3 2275 instructions or register uses should be moved or combined across them.
2276 This includes only volatile asms and UNSPEC_VOLATILE instructions. */
ea275ef9 2277
2278int
dd9b9fc5 2279volatile_insn_p (const_rtx x)
ea275ef9 2280{
dd9b9fc5 2281 const RTX_CODE code = GET_CODE (x);
ea275ef9 2282 switch (code)
2283 {
2284 case LABEL_REF:
2285 case SYMBOL_REF:
ea275ef9 2286 case CONST:
0349edce 2287 CASE_CONST_ANY:
ea275ef9 2288 case CC0:
2289 case PC:
2290 case REG:
2291 case SCRATCH:
2292 case CLOBBER:
ea275ef9 2293 case ADDR_VEC:
2294 case ADDR_DIFF_VEC:
2295 case CALL:
2296 case MEM:
2297 return 0;
2298
2299 case UNSPEC_VOLATILE:
ea275ef9 2300 return 1;
2301
c52051b7 2302 case ASM_INPUT:
ea275ef9 2303 case ASM_OPERANDS:
2304 if (MEM_VOLATILE_P (x))
2305 return 1;
0dbd1c74 2306
2307 default:
2308 break;
ea275ef9 2309 }
2310
2311 /* Recursively scan the operands of this expression. */
2312
2313 {
dd9b9fc5 2314 const char *const fmt = GET_RTX_FORMAT (code);
19cb6b50 2315 int i;
2617fe26 2316
ea275ef9 2317 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2318 {
2319 if (fmt[i] == 'e')
2320 {
c1dadb43 2321 if (volatile_insn_p (XEXP (x, i)))
ea275ef9 2322 return 1;
2323 }
1bd8ca86 2324 else if (fmt[i] == 'E')
ea275ef9 2325 {
19cb6b50 2326 int j;
ea275ef9 2327 for (j = 0; j < XVECLEN (x, i); j++)
c1dadb43 2328 if (volatile_insn_p (XVECEXP (x, i, j)))
ea275ef9 2329 return 1;
2330 }
2331 }
2332 }
2333 return 0;
2334}
2335
635aff97 2336/* Nonzero if X contains any volatile memory references
3384a30e 2337 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
635aff97 2338
2339int
dd9b9fc5 2340volatile_refs_p (const_rtx x)
635aff97 2341{
dd9b9fc5 2342 const RTX_CODE code = GET_CODE (x);
635aff97 2343 switch (code)
2344 {
2345 case LABEL_REF:
2346 case SYMBOL_REF:
635aff97 2347 case CONST:
0349edce 2348 CASE_CONST_ANY:
635aff97 2349 case CC0:
2350 case PC:
2351 case REG:
2352 case SCRATCH:
2353 case CLOBBER:
635aff97 2354 case ADDR_VEC:
2355 case ADDR_DIFF_VEC:
2356 return 0;
2357
3384a30e 2358 case UNSPEC_VOLATILE:
635aff97 2359 return 1;
2360
2361 case MEM:
c52051b7 2362 case ASM_INPUT:
635aff97 2363 case ASM_OPERANDS:
2364 if (MEM_VOLATILE_P (x))
2365 return 1;
0dbd1c74 2366
2367 default:
2368 break;
635aff97 2369 }
2370
2371 /* Recursively scan the operands of this expression. */
2372
2373 {
dd9b9fc5 2374 const char *const fmt = GET_RTX_FORMAT (code);
19cb6b50 2375 int i;
2617fe26 2376
635aff97 2377 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2378 {
2379 if (fmt[i] == 'e')
2380 {
2381 if (volatile_refs_p (XEXP (x, i)))
2382 return 1;
2383 }
1bd8ca86 2384 else if (fmt[i] == 'E')
635aff97 2385 {
19cb6b50 2386 int j;
635aff97 2387 for (j = 0; j < XVECLEN (x, i); j++)
2388 if (volatile_refs_p (XVECEXP (x, i, j)))
2389 return 1;
2390 }
2391 }
2392 }
2393 return 0;
2394}
2395
2396/* Similar to above, except that it also rejects register pre- and post-
2397 incrementing. */
2398
2399int
dd9b9fc5 2400side_effects_p (const_rtx x)
635aff97 2401{
dd9b9fc5 2402 const RTX_CODE code = GET_CODE (x);
635aff97 2403 switch (code)
2404 {
2405 case LABEL_REF:
2406 case SYMBOL_REF:
635aff97 2407 case CONST:
0349edce 2408 CASE_CONST_ANY:
635aff97 2409 case CC0:
2410 case PC:
2411 case REG:
2412 case SCRATCH:
635aff97 2413 case ADDR_VEC:
2414 case ADDR_DIFF_VEC:
9845d120 2415 case VAR_LOCATION:
635aff97 2416 return 0;
2417
2418 case CLOBBER:
2419 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2420 when some combination can't be done. If we see one, don't think
2421 that we can simplify the expression. */
2422 return (GET_MODE (x) != VOIDmode);
2423
2424 case PRE_INC:
2425 case PRE_DEC:
2426 case POST_INC:
2427 case POST_DEC:
a3da8215 2428 case PRE_MODIFY:
2429 case POST_MODIFY:
635aff97 2430 case CALL:
3384a30e 2431 case UNSPEC_VOLATILE:
635aff97 2432 return 1;
2433
2434 case MEM:
c52051b7 2435 case ASM_INPUT:
635aff97 2436 case ASM_OPERANDS:
2437 if (MEM_VOLATILE_P (x))
2438 return 1;
0dbd1c74 2439
2440 default:
2441 break;
635aff97 2442 }
2443
2444 /* Recursively scan the operands of this expression. */
2445
2446 {
19cb6b50 2447 const char *fmt = GET_RTX_FORMAT (code);
2448 int i;
2617fe26 2449
635aff97 2450 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2451 {
2452 if (fmt[i] == 'e')
2453 {
2454 if (side_effects_p (XEXP (x, i)))
2455 return 1;
2456 }
1bd8ca86 2457 else if (fmt[i] == 'E')
635aff97 2458 {
19cb6b50 2459 int j;
635aff97 2460 for (j = 0; j < XVECLEN (x, i); j++)
2461 if (side_effects_p (XVECEXP (x, i, j)))
2462 return 1;
2463 }
2464 }
2465 }
2466 return 0;
2467}
2468\f
5ed26a34 2469/* Return nonzero if evaluating rtx X might cause a trap.
0eee494e 2470 FLAGS controls how to consider MEMs. A nonzero means the context
2471 of the access may have changed from the original, such that the
2472 address may have become invalid. */
635aff97 2473
77ad8e5a 2474int
dd9b9fc5 2475may_trap_p_1 (const_rtx x, unsigned flags)
635aff97 2476{
2477 int i;
2478 enum rtx_code code;
d2ca078f 2479 const char *fmt;
0eee494e 2480
2481 /* We make no distinction currently, but this function is part of
2482 the internal target-hooks ABI so we keep the parameter as
2483 "unsigned flags". */
2484 bool code_changed = flags != 0;
635aff97 2485
2486 if (x == 0)
2487 return 0;
2488 code = GET_CODE (x);
2489 switch (code)
2490 {
2491 /* Handle these cases quickly. */
0349edce 2492 CASE_CONST_ANY:
635aff97 2493 case SYMBOL_REF:
2494 case LABEL_REF:
2495 case CONST:
2496 case PC:
2497 case CC0:
2498 case REG:
2499 case SCRATCH:
2500 return 0;
2501
77ad8e5a 2502 case UNSPEC:
77ad8e5a 2503 return targetm.unspec_may_trap_p (x, flags);
2504
bcbfcebe 2505 case UNSPEC_VOLATILE:
77ad8e5a 2506 case ASM_INPUT:
635aff97 2507 case TRAP_IF:
2508 return 1;
2509
d18a3f6d 2510 case ASM_OPERANDS:
2511 return MEM_VOLATILE_P (x);
2512
635aff97 2513 /* Memory ref can trap unless it's a static var or a stack slot. */
2514 case MEM:
42982f3e 2515 /* Recognize specific pattern of stack checking probes. */
2516 if (flag_stack_check
2517 && MEM_VOLATILE_P (x)
2518 && XEXP (x, 0) == stack_pointer_rtx)
2519 return 1;
5ed26a34 2520 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
0eee494e 2521 reference; moving it out of context such as when moving code
2522 when optimizing, might cause its address to become invalid. */
2523 code_changed
2524 || !MEM_NOTRAP_P (x))
2525 {
5b2a69fa 2526 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
0eee494e 2527 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2528 GET_MODE (x), code_changed);
2529 }
2530
2531 return 0;
635aff97 2532
2533 /* Division by a non-constant might trap. */
2534 case DIV:
2535 case MOD:
2536 case UDIV:
2537 case UMOD:
fe994837 2538 if (HONOR_SNANS (x))
0a8176f3 2539 return 1;
cee7491d 2540 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
8e97b017 2541 return flag_trapping_math;
2542 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
635aff97 2543 return 1;
0dbd1c74 2544 break;
2545
4c3aec45 2546 case EXPR_LIST:
2547 /* An EXPR_LIST is used to represent a function call. This
2548 certainly may trap. */
2549 return 1;
0dbd1c74 2550
27ea6d28 2551 case GE:
2552 case GT:
2553 case LE:
2554 case LT:
f1278f7e 2555 case LTGT:
51ba663d 2556 case COMPARE:
27ea6d28 2557 /* Some floating point comparisons may trap. */
350b17ef 2558 if (!flag_trapping_math)
2559 break;
27ea6d28 2560 /* ??? There is no machine independent way to check for tests that trap
2561 when COMPARE is used, though many targets do make this distinction.
2562 For instance, sparc uses CCFPE for compares which generate exceptions
2563 and CCFP for compares which do not generate exceptions. */
93633022 2564 if (HONOR_NANS (x))
51ba663d 2565 return 1;
2566 /* But often the compare has some CC mode, so check operand
2567 modes as well. */
93633022 2568 if (HONOR_NANS (XEXP (x, 0))
2569 || HONOR_NANS (XEXP (x, 1)))
0a8176f3 2570 return 1;
2571 break;
2572
2573 case EQ:
2574 case NE:
fe994837 2575 if (HONOR_SNANS (x))
0a8176f3 2576 return 1;
2577 /* Often comparison is CC mode, so check operand modes. */
fe994837 2578 if (HONOR_SNANS (XEXP (x, 0))
2579 || HONOR_SNANS (XEXP (x, 1)))
51ba663d 2580 return 1;
2581 break;
2582
d0a099f8 2583 case FIX:
2584 /* Conversion of floating point might trap. */
93633022 2585 if (flag_trapping_math && HONOR_NANS (XEXP (x, 0)))
d0a099f8 2586 return 1;
2587 break;
2588
4f63c6d1 2589 case NEG:
2590 case ABS:
f0fbc1cd 2591 case SUBREG:
4f63c6d1 2592 /* These operations don't trap even with floating point. */
2593 break;
2594
635aff97 2595 default:
2596 /* Any floating arithmetic may trap. */
bcbfcebe 2597 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
635aff97 2598 return 1;
2599 }
2600
2601 fmt = GET_RTX_FORMAT (code);
2602 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2603 {
2604 if (fmt[i] == 'e')
2605 {
5ed26a34 2606 if (may_trap_p_1 (XEXP (x, i), flags))
635aff97 2607 return 1;
2608 }
2609 else if (fmt[i] == 'E')
2610 {
19cb6b50 2611 int j;
635aff97 2612 for (j = 0; j < XVECLEN (x, i); j++)
5ed26a34 2613 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
635aff97 2614 return 1;
2615 }
2616 }
2617 return 0;
2618}
1aecae7f 2619
2620/* Return nonzero if evaluating rtx X might cause a trap. */
2621
2622int
dd9b9fc5 2623may_trap_p (const_rtx x)
1aecae7f 2624{
5ed26a34 2625 return may_trap_p_1 (x, 0);
2626}
2627
334ec2d8 2628/* Same as above, but additionally return nonzero if evaluating rtx X might
1aecae7f 2629 cause a fault. We define a fault for the purpose of this function as a
2630 erroneous execution condition that cannot be encountered during the normal
2631 execution of a valid program; the typical example is an unaligned memory
2632 access on a strict alignment machine. The compiler guarantees that it
2633 doesn't generate code that will fault from a valid program, but this
2634 guarantee doesn't mean anything for individual instructions. Consider
2635 the following example:
2636
2637 struct S { int d; union { char *cp; int *ip; }; };
2638
2639 int foo(struct S *s)
2640 {
2641 if (s->d == 1)
2642 return *s->ip;
2643 else
2644 return *s->cp;
2645 }
2646
2647 on a strict alignment machine. In a valid program, foo will never be
2648 invoked on a structure for which d is equal to 1 and the underlying
2649 unique field of the union not aligned on a 4-byte boundary, but the
2650 expression *s->ip might cause a fault if considered individually.
2651
2652 At the RTL level, potentially problematic expressions will almost always
2653 verify may_trap_p; for example, the above dereference can be emitted as
2654 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2655 However, suppose that foo is inlined in a caller that causes s->cp to
2656 point to a local character variable and guarantees that s->d is not set
2657 to 1; foo may have been effectively translated into pseudo-RTL as:
2658
2659 if ((reg:SI) == 1)
2660 (set (reg:SI) (mem:SI (%fp - 7)))
2661 else
2662 (set (reg:QI) (mem:QI (%fp - 7)))
2663
2664 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2665 memory reference to a stack slot, but it will certainly cause a fault
2666 on a strict alignment machine. */
2667
2668int
dd9b9fc5 2669may_trap_or_fault_p (const_rtx x)
1aecae7f 2670{
0eee494e 2671 return may_trap_p_1 (x, 1);
1aecae7f 2672}
635aff97 2673\f
2674/* Return nonzero if X contains a comparison that is not either EQ or NE,
2675 i.e., an inequality. */
2676
2677int
dd9b9fc5 2678inequality_comparisons_p (const_rtx x)
635aff97 2679{
19cb6b50 2680 const char *fmt;
2681 int len, i;
dd9b9fc5 2682 const enum rtx_code code = GET_CODE (x);
635aff97 2683
2684 switch (code)
2685 {
2686 case REG:
2687 case SCRATCH:
2688 case PC:
2689 case CC0:
0349edce 2690 CASE_CONST_ANY:
635aff97 2691 case CONST:
2692 case LABEL_REF:
2693 case SYMBOL_REF:
2694 return 0;
2695
2696 case LT:
2697 case LTU:
2698 case GT:
2699 case GTU:
2700 case LE:
2701 case LEU:
2702 case GE:
2703 case GEU:
2704 return 1;
2617fe26 2705
0dbd1c74 2706 default:
2707 break;
635aff97 2708 }
2709
2710 len = GET_RTX_LENGTH (code);
2711 fmt = GET_RTX_FORMAT (code);
2712
2713 for (i = 0; i < len; i++)
2714 {
2715 if (fmt[i] == 'e')
2716 {
2717 if (inequality_comparisons_p (XEXP (x, i)))
2718 return 1;
2719 }
2720 else if (fmt[i] == 'E')
2721 {
19cb6b50 2722 int j;
635aff97 2723 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2724 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2725 return 1;
2726 }
2727 }
2617fe26 2728
635aff97 2729 return 0;
2730}
2731\f
0a20afb5 2732/* Replace any occurrence of FROM in X with TO. The function does
2733 not enter into CONST_DOUBLE for the replace.
635aff97 2734
2735 Note that copying is not done so X must not be shared unless all copies
2736 are to be modified. */
2737
2738rtx
3ad4992f 2739replace_rtx (rtx x, rtx from, rtx to)
635aff97 2740{
19cb6b50 2741 int i, j;
2742 const char *fmt;
635aff97 2743
2744 if (x == from)
2745 return to;
2746
2747 /* Allow this function to make replacements in EXPR_LISTs. */
2748 if (x == 0)
2749 return 0;
2750
11896b36 2751 if (GET_CODE (x) == SUBREG)
2752 {
47cfb7f4 2753 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
11896b36 2754
971ba038 2755 if (CONST_INT_P (new_rtx))
11896b36 2756 {
47cfb7f4 2757 x = simplify_subreg (GET_MODE (x), new_rtx,
11896b36 2758 GET_MODE (SUBREG_REG (x)),
2759 SUBREG_BYTE (x));
04e579b6 2760 gcc_assert (x);
11896b36 2761 }
2762 else
47cfb7f4 2763 SUBREG_REG (x) = new_rtx;
11896b36 2764
2765 return x;
2766 }
2767 else if (GET_CODE (x) == ZERO_EXTEND)
2768 {
47cfb7f4 2769 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
11896b36 2770
971ba038 2771 if (CONST_INT_P (new_rtx))
11896b36 2772 {
2773 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
47cfb7f4 2774 new_rtx, GET_MODE (XEXP (x, 0)));
04e579b6 2775 gcc_assert (x);
11896b36 2776 }
2777 else
47cfb7f4 2778 XEXP (x, 0) = new_rtx;
11896b36 2779
2780 return x;
2781 }
2782
635aff97 2783 fmt = GET_RTX_FORMAT (GET_CODE (x));
2784 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2785 {
2786 if (fmt[i] == 'e')
2787 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2788 else if (fmt[i] == 'E')
2789 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2790 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2791 }
2792
2793 return x;
2617fe26 2794}
635aff97 2795\f
956816a2 2796/* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track
2797 the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */
f2756aab 2798
956816a2 2799void
2800replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
f2756aab 2801{
956816a2 2802 /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */
2803 rtx x = *loc;
2804 if (JUMP_TABLE_DATA_P (x))
cda612f5 2805 {
956816a2 2806 x = PATTERN (x);
2807 rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
2808 int len = GET_NUM_ELEM (vec);
2809 for (int i = 0; i < len; ++i)
cda612f5 2810 {
956816a2 2811 rtx ref = RTVEC_ELT (vec, i);
2812 if (XEXP (ref, 0) == old_label)
2813 {
2814 XEXP (ref, 0) = new_label;
2815 if (update_label_nuses)
2816 {
2817 ++LABEL_NUSES (new_label);
2818 --LABEL_NUSES (old_label);
2819 }
2820 }
cda612f5 2821 }
956816a2 2822 return;
cda612f5 2823 }
2824
f2756aab 2825 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
956816a2 2826 field. This is not handled by the iterator because it doesn't
f2756aab 2827 handle unprinted ('0') fields. */
956816a2 2828 if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
2829 JUMP_LABEL (x) = new_label;
f2756aab 2830
956816a2 2831 subrtx_ptr_iterator::array_type array;
2832 FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
cda612f5 2833 {
956816a2 2834 rtx *loc = *iter;
2835 if (rtx x = *loc)
cda612f5 2836 {
956816a2 2837 if (GET_CODE (x) == SYMBOL_REF
2838 && CONSTANT_POOL_ADDRESS_P (x))
2839 {
2840 rtx c = get_pool_constant (x);
2841 if (rtx_referenced_p (old_label, c))
2842 {
2843 /* Create a copy of constant C; replace the label inside
2844 but do not update LABEL_NUSES because uses in constant pool
2845 are not counted. */
2846 rtx new_c = copy_rtx (c);
2847 replace_label (&new_c, old_label, new_label, false);
2848
2849 /* Add the new constant NEW_C to constant pool and replace
2850 the old reference to constant by new reference. */
2851 rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
2852 *loc = replace_rtx (x, x, XEXP (new_mem, 0));
2853 }
2854 }
2855
2856 if ((GET_CODE (x) == LABEL_REF
2857 || GET_CODE (x) == INSN_LIST)
2858 && XEXP (x, 0) == old_label)
2859 {
2860 XEXP (x, 0) = new_label;
2861 if (update_label_nuses)
2862 {
2863 ++LABEL_NUSES (new_label);
2864 --LABEL_NUSES (old_label);
2865 }
2866 }
cda612f5 2867 }
cda612f5 2868 }
956816a2 2869}
f2756aab 2870
956816a2 2871void
2872replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label,
2873 bool update_label_nuses)
2874{
2875 rtx insn_as_rtx = insn;
2876 replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
2877 gcc_checking_assert (insn_as_rtx == insn);
f2756aab 2878}
2879
db184797 2880/* Return true if X is referenced in BODY. */
f2756aab 2881
db184797 2882bool
2883rtx_referenced_p (const_rtx x, const_rtx body)
f2756aab 2884{
db184797 2885 subrtx_iterator::array_type array;
2886 FOR_EACH_SUBRTX (iter, array, body, ALL)
2887 if (const_rtx y = *iter)
2888 {
2889 /* Check if a label_ref Y refers to label X. */
b49f2e4b 2890 if (GET_CODE (y) == LABEL_REF
2891 && LABEL_P (x)
2892 && LABEL_REF_LABEL (y) == x)
db184797 2893 return true;
f2756aab 2894
db184797 2895 if (rtx_equal_p (x, y))
2896 return true;
f2756aab 2897
db184797 2898 /* If Y is a reference to pool constant traverse the constant. */
2899 if (GET_CODE (y) == SYMBOL_REF
2900 && CONSTANT_POOL_ADDRESS_P (y))
2901 iter.substitute (get_pool_constant (y));
2902 }
2903 return false;
f2756aab 2904}
2905
afff715a 2906/* If INSN is a tablejump return true and store the label (before jump table) to
2907 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
f2756aab 2908
2909bool
28fbb2b5 2910tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep)
f2756aab 2911{
f9a00e9e 2912 rtx label;
2913 rtx_insn *table;
afff715a 2914
4115ac36 2915 if (!JUMP_P (insn))
2916 return false;
2917
2918 label = JUMP_LABEL (insn);
2919 if (label != NULL_RTX && !ANY_RETURN_P (label)
91a55c11 2920 && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX
971ba038 2921 && JUMP_TABLE_DATA_P (table))
f2756aab 2922 {
afff715a 2923 if (labelp)
2924 *labelp = label;
2925 if (tablep)
c86d86ff 2926 *tablep = as_a <rtx_jump_table_data *> (table);
f2756aab 2927 return true;
2928 }
2929 return false;
2930}
2931
4e44a132 2932/* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2933 constant that is not in the constant pool and not in the condition
2934 of an IF_THEN_ELSE. */
ca6d6e84 2935
2936static int
dd9b9fc5 2937computed_jump_p_1 (const_rtx x)
ca6d6e84 2938{
dd9b9fc5 2939 const enum rtx_code code = GET_CODE (x);
ca6d6e84 2940 int i, j;
d2ca078f 2941 const char *fmt;
ca6d6e84 2942
2943 switch (code)
2944 {
ca6d6e84 2945 case LABEL_REF:
2946 case PC:
2947 return 0;
2948
4e44a132 2949 case CONST:
0349edce 2950 CASE_CONST_ANY:
4e44a132 2951 case SYMBOL_REF:
ca6d6e84 2952 case REG:
2953 return 1;
2954
2955 case MEM:
2956 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2957 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2958
2959 case IF_THEN_ELSE:
4e44a132 2960 return (computed_jump_p_1 (XEXP (x, 1))
2961 || computed_jump_p_1 (XEXP (x, 2)));
99c14947 2962
2963 default:
2964 break;
ca6d6e84 2965 }
2966
2967 fmt = GET_RTX_FORMAT (code);
2968 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2969 {
2970 if (fmt[i] == 'e'
4e44a132 2971 && computed_jump_p_1 (XEXP (x, i)))
ca6d6e84 2972 return 1;
2973
1bd8ca86 2974 else if (fmt[i] == 'E')
ca6d6e84 2975 for (j = 0; j < XVECLEN (x, i); j++)
4e44a132 2976 if (computed_jump_p_1 (XVECEXP (x, i, j)))
ca6d6e84 2977 return 1;
2978 }
2979
2980 return 0;
2981}
2982
2983/* Return nonzero if INSN is an indirect jump (aka computed jump).
2984
2985 Tablejumps and casesi insns are not considered indirect jumps;
9588521d 2986 we can recognize them by a (use (label_ref)). */
ca6d6e84 2987
2988int
181908bb 2989computed_jump_p (const rtx_insn *insn)
ca6d6e84 2990{
2991 int i;
6d7dc5b9 2992 if (JUMP_P (insn))
ca6d6e84 2993 {
2994 rtx pat = PATTERN (insn);
ca6d6e84 2995
19d2fe05 2996 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2997 if (JUMP_LABEL (insn) != NULL)
d3ff0f75 2998 return 0;
19d2fe05 2999
3000 if (GET_CODE (pat) == PARALLEL)
ca6d6e84 3001 {
3002 int len = XVECLEN (pat, 0);
3003 int has_use_labelref = 0;
3004
3005 for (i = len - 1; i >= 0; i--)
3006 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3007 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3008 == LABEL_REF))
b766ccbc 3009 {
3010 has_use_labelref = 1;
3011 break;
3012 }
ca6d6e84 3013
3014 if (! has_use_labelref)
3015 for (i = len - 1; i >= 0; i--)
3016 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3017 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
4e44a132 3018 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
ca6d6e84 3019 return 1;
3020 }
3021 else if (GET_CODE (pat) == SET
3022 && SET_DEST (pat) == pc_rtx
4e44a132 3023 && computed_jump_p_1 (SET_SRC (pat)))
ca6d6e84 3024 return 1;
3025 }
3026 return 0;
3027}
fb8acade 3028
1f864115 3029\f
3030
623ad592 3031/* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of
3032 the equivalent add insn and pass the result to FN, using DATA as the
3033 final argument. */
1f864115 3034
3035static int
623ad592 3036for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
1f864115 3037{
623ad592 3038 rtx x = XEXP (mem, 0);
1f864115 3039 switch (GET_CODE (x))
3040 {
3041 case PRE_INC:
3042 case POST_INC:
3043 {
623ad592 3044 int size = GET_MODE_SIZE (GET_MODE (mem));
1f864115 3045 rtx r1 = XEXP (x, 0);
3046 rtx c = gen_int_mode (size, GET_MODE (r1));
623ad592 3047 return fn (mem, x, r1, r1, c, data);
1f864115 3048 }
3049
3050 case PRE_DEC:
3051 case POST_DEC:
3052 {
623ad592 3053 int size = GET_MODE_SIZE (GET_MODE (mem));
1f864115 3054 rtx r1 = XEXP (x, 0);
3055 rtx c = gen_int_mode (-size, GET_MODE (r1));
623ad592 3056 return fn (mem, x, r1, r1, c, data);
1f864115 3057 }
3058
3059 case PRE_MODIFY:
3060 case POST_MODIFY:
3061 {
3062 rtx r1 = XEXP (x, 0);
3063 rtx add = XEXP (x, 1);
623ad592 3064 return fn (mem, x, r1, add, NULL, data);
1f864115 3065 }
3066
3067 default:
623ad592 3068 gcc_unreachable ();
1f864115 3069 }
3070}
3071
623ad592 3072/* Traverse *LOC looking for MEMs that have autoinc addresses.
3073 For each such autoinc operation found, call FN, passing it
1f864115 3074 the innermost enclosing MEM, the operation itself, the RTX modified
3075 by the operation, two RTXs (the second may be NULL) that, once
3076 added, represent the value to be held by the modified RTX
623ad592 3077 afterwards, and DATA. FN is to return 0 to continue the
3078 traversal or any other value to have it returned to the caller of
1f864115 3079 for_each_inc_dec. */
3080
3081int
623ad592 3082for_each_inc_dec (rtx x,
1f864115 3083 for_each_inc_dec_fn fn,
623ad592 3084 void *data)
1f864115 3085{
623ad592 3086 subrtx_var_iterator::array_type array;
3087 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
3088 {
3089 rtx mem = *iter;
3090 if (mem
3091 && MEM_P (mem)
3092 && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
3093 {
3094 int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
3095 if (res != 0)
3096 return res;
3097 iter.skip_subrtxes ();
3098 }
3099 }
3100 return 0;
1f864115 3101}
3102
3103\f
0919e10f 3104/* Searches X for any reference to REGNO, returning the rtx of the
3105 reference found if any. Otherwise, returns NULL_RTX. */
3106
3107rtx
3ad4992f 3108regno_use_in (unsigned int regno, rtx x)
0919e10f 3109{
19cb6b50 3110 const char *fmt;
0919e10f 3111 int i, j;
3112 rtx tem;
3113
8ad4c111 3114 if (REG_P (x) && REGNO (x) == regno)
0919e10f 3115 return x;
3116
3117 fmt = GET_RTX_FORMAT (GET_CODE (x));
3118 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3119 {
3120 if (fmt[i] == 'e')
3121 {
3122 if ((tem = regno_use_in (regno, XEXP (x, i))))
3123 return tem;
3124 }
3125 else if (fmt[i] == 'E')
3126 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3127 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3128 return tem;
3129 }
3130
3131 return NULL_RTX;
3132}
c4f0a530 3133
09f800b9 3134/* Return a value indicating whether OP, an operand of a commutative
3135 operation, is preferred as the first or second operand. The higher
3136 the value, the stronger the preference for being the first operand.
3137 We use negative values to indicate a preference for the first operand
3138 and positive values for the second operand. */
3139
f4ad60b7 3140int
3ad4992f 3141commutative_operand_precedence (rtx op)
09f800b9 3142{
b147a3b4 3143 enum rtx_code code = GET_CODE (op);
48e1416a 3144
09f800b9 3145 /* Constants always come the second operand. Prefer "nice" constants. */
b147a3b4 3146 if (code == CONST_INT)
1f3b83af 3147 return -8;
e913b5cd 3148 if (code == CONST_WIDE_INT)
3149 return -8;
b147a3b4 3150 if (code == CONST_DOUBLE)
1f3b83af 3151 return -7;
e397ad8e 3152 if (code == CONST_FIXED)
3153 return -7;
efbc8128 3154 op = avoid_constant_pool_reference (op);
57260f80 3155 code = GET_CODE (op);
6720e96c 3156
3157 switch (GET_RTX_CLASS (code))
3158 {
3159 case RTX_CONST_OBJ:
3160 if (code == CONST_INT)
1f3b83af 3161 return -6;
e913b5cd 3162 if (code == CONST_WIDE_INT)
3163 return -6;
6720e96c 3164 if (code == CONST_DOUBLE)
1f3b83af 3165 return -5;
e397ad8e 3166 if (code == CONST_FIXED)
3167 return -5;
1f3b83af 3168 return -4;
6720e96c 3169
3170 case RTX_EXTRA:
3171 /* SUBREGs of objects should come second. */
3172 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
1f3b83af 3173 return -3;
3072d30e 3174 return 0;
6720e96c 3175
3176 case RTX_OBJ:
3177 /* Complex expressions should be the first, so decrease priority
1f3b83af 3178 of objects. Prefer pointer objects over non pointer objects. */
3179 if ((REG_P (op) && REG_POINTER (op))
3180 || (MEM_P (op) && MEM_POINTER (op)))
3181 return -1;
3182 return -2;
6720e96c 3183
3184 case RTX_COMM_ARITH:
3185 /* Prefer operands that are themselves commutative to be first.
3186 This helps to make things linear. In particular,
3187 (and (and (reg) (reg)) (not (reg))) is canonical. */
3188 return 4;
3189
3190 case RTX_BIN_ARITH:
3191 /* If only one operand is a binary expression, it will be the first
3192 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3193 is canonical, although it will usually be further simplified. */
3194 return 2;
48e1416a 3195
6720e96c 3196 case RTX_UNARY:
3197 /* Then prefer NEG and NOT. */
3198 if (code == NEG || code == NOT)
3199 return 1;
09f800b9 3200
6720e96c 3201 default:
3202 return 0;
3203 }
09f800b9 3204}
3205
dd5b4b36 3206/* Return 1 iff it is necessary to swap operands of commutative operation
09f800b9 3207 in order to canonicalize expression. */
3208
1f3b83af 3209bool
3ad4992f 3210swap_commutative_operands_p (rtx x, rtx y)
09f800b9 3211{
f4ad60b7 3212 return (commutative_operand_precedence (x)
3213 < commutative_operand_precedence (y));
09f800b9 3214}
c4f0a530 3215
3216/* Return 1 if X is an autoincrement side effect and the register is
3217 not the stack pointer. */
3218int
dd9b9fc5 3219auto_inc_p (const_rtx x)
c4f0a530 3220{
3221 switch (GET_CODE (x))
3222 {
3223 case PRE_INC:
3224 case POST_INC:
3225 case PRE_DEC:
3226 case POST_DEC:
3227 case PRE_MODIFY:
3228 case POST_MODIFY:
3229 /* There are no REG_INC notes for SP. */
3230 if (XEXP (x, 0) != stack_pointer_rtx)
3231 return 1;
3232 default:
3233 break;
3234 }
3235 return 0;
3236}
b067e925 3237
2358393e 3238/* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2c663070 3239int
dd9b9fc5 3240loc_mentioned_in_p (rtx *loc, const_rtx in)
2c663070 3241{
42a3a38b 3242 enum rtx_code code;
3243 const char *fmt;
2c663070 3244 int i, j;
3245
42a3a38b 3246 if (!in)
3247 return 0;
3248
3249 code = GET_CODE (in);
3250 fmt = GET_RTX_FORMAT (code);
2c663070 3251 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3252 {
2c663070 3253 if (fmt[i] == 'e')
3254 {
c8707f08 3255 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
2c663070 3256 return 1;
3257 }
3258 else if (fmt[i] == 'E')
3259 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
c8707f08 3260 if (loc == &XVECEXP (in, i, j)
3261 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2c663070 3262 return 1;
3263 }
3264 return 0;
3265}
701e46d0 3266
f36eb1e9 3267/* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3268 and SUBREG_BYTE, return the bit offset where the subreg begins
3269 (counting from the least significant bit of the operand). */
ef4e6755 3270
3271unsigned int
3754d046 3272subreg_lsb_1 (machine_mode outer_mode,
3273 machine_mode inner_mode,
f36eb1e9 3274 unsigned int subreg_byte)
ef4e6755 3275{
ef4e6755 3276 unsigned int bitpos;
3277 unsigned int byte;
3278 unsigned int word;
3279
3280 /* A paradoxical subreg begins at bit position 0. */
ded805e6 3281 if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
ef4e6755 3282 return 0;
3283
3284 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3285 /* If the subreg crosses a word boundary ensure that
3286 it also begins and ends on a word boundary. */
04e579b6 3287 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3288 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3289 && (subreg_byte % UNITS_PER_WORD
3290 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
ef4e6755 3291
3292 if (WORDS_BIG_ENDIAN)
3293 word = (GET_MODE_SIZE (inner_mode)
f36eb1e9 3294 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
ef4e6755 3295 else
f36eb1e9 3296 word = subreg_byte / UNITS_PER_WORD;
ef4e6755 3297 bitpos = word * BITS_PER_WORD;
3298
3299 if (BYTES_BIG_ENDIAN)
3300 byte = (GET_MODE_SIZE (inner_mode)
f36eb1e9 3301 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
ef4e6755 3302 else
f36eb1e9 3303 byte = subreg_byte % UNITS_PER_WORD;
ef4e6755 3304 bitpos += byte * BITS_PER_UNIT;
3305
3306 return bitpos;
3307}
3308
f36eb1e9 3309/* Given a subreg X, return the bit offset where the subreg begins
3310 (counting from the least significant bit of the reg). */
3311
3312unsigned int
dd9b9fc5 3313subreg_lsb (const_rtx x)
f36eb1e9 3314{
3315 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3316 SUBREG_BYTE (x));
3317}
3318
fe2ebfc8 3319/* Fill in information about a subreg of a hard register.
701e46d0 3320 xregno - A regno of an inner hard subreg_reg (or what will become one).
3321 xmode - The mode of xregno.
3322 offset - The byte offset.
3323 ymode - The mode of a top level SUBREG (or what may become one).
5ccab9d4 3324 info - Pointer to structure to fill in.
3325
3326 Rather than considering one particular inner register (and thus one
3327 particular "outer" register) in isolation, this function really uses
3328 XREGNO as a model for a sequence of isomorphic hard registers. Thus the
3329 function does not check whether adding INFO->offset to XREGNO gives
3330 a valid hard register; even if INFO->offset + XREGNO is out of range,
3331 there might be another register of the same type that is in range.
3332 Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new
3333 register, since that can depend on things like whether the final
3334 register number is even or odd. Callers that want to check whether
3335 this particular subreg can be replaced by a simple (reg ...) should
3336 use simplify_subreg_regno. */
3337
9680c846 3338void
3754d046 3339subreg_get_info (unsigned int xregno, machine_mode xmode,
3340 unsigned int offset, machine_mode ymode,
fe2ebfc8 3341 struct subreg_info *info)
d9b3752c 3342{
695595bc 3343 int nregs_xmode, nregs_ymode;
d9b3752c 3344 int mode_multiple, nregs_multiple;
fe2ebfc8 3345 int offset_adj, y_offset, y_offset_adj;
695595bc 3346 int regsize_xmode, regsize_ymode;
fe2ebfc8 3347 bool rknown;
d9b3752c 3348
04e579b6 3349 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
d9b3752c 3350
fe2ebfc8 3351 rknown = false;
3352
ed21e7ff 3353 /* If there are holes in a non-scalar mode in registers, we expect
3354 that it is made up of its units concatenated together. */
695595bc 3355 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
ed21e7ff 3356 {
3754d046 3357 machine_mode xmode_unit;
695595bc 3358
3359 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3360 if (GET_MODE_INNER (xmode) == VOIDmode)
3361 xmode_unit = xmode;
3362 else
3363 xmode_unit = GET_MODE_INNER (xmode);
3364 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3365 gcc_assert (nregs_xmode
3366 == (GET_MODE_NUNITS (xmode)
3367 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3368 gcc_assert (hard_regno_nregs[xregno][xmode]
3369 == (hard_regno_nregs[xregno][xmode_unit]
3370 * GET_MODE_NUNITS (xmode)));
ed21e7ff 3371
3372 /* You can only ask for a SUBREG of a value with holes in the middle
3373 if you don't cross the holes. (Such a SUBREG should be done by
3374 picking a different register class, or doing it in memory if
3375 necessary.) An example of a value with holes is XCmode on 32-bit
3376 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
48e1416a 3377 3 for each part, but in memory it's two 128-bit parts.
ed21e7ff 3378 Padding is assumed to be at the end (not necessarily the 'high part')
3379 of each unit. */
48e1416a 3380 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
695595bc 3381 < GET_MODE_NUNITS (xmode))
3382 && (offset / GET_MODE_SIZE (xmode_unit)
ed21e7ff 3383 != ((offset + GET_MODE_SIZE (ymode) - 1)
695595bc 3384 / GET_MODE_SIZE (xmode_unit))))
fe2ebfc8 3385 {
3386 info->representable_p = false;
3387 rknown = true;
3388 }
ed21e7ff 3389 }
3390 else
3391 nregs_xmode = hard_regno_nregs[xregno][xmode];
48e1416a 3392
67d6c12b 3393 nregs_ymode = hard_regno_nregs[xregno][ymode];
d9b3752c 3394
ed21e7ff 3395 /* Paradoxical subregs are otherwise valid. */
fe2ebfc8 3396 if (!rknown
3397 && offset == 0
ded805e6 3398 && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
fe2ebfc8 3399 {
3400 info->representable_p = true;
3401 /* If this is a big endian paradoxical subreg, which uses more
3402 actual hard registers than the original register, we must
3403 return a negative offset so that we find the proper highpart
3404 of the register. */
3405 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
76c64076 3406 ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
fe2ebfc8 3407 info->offset = nregs_xmode - nregs_ymode;
3408 else
3409 info->offset = 0;
3410 info->nregs = nregs_ymode;
3411 return;
3412 }
d9b3752c 3413
695595bc 3414 /* If registers store different numbers of bits in the different
3415 modes, we cannot generally form this subreg. */
fe2ebfc8 3416 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
a100ece7 3417 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3418 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3419 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
fe2ebfc8 3420 {
3421 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
fe2ebfc8 3422 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
fe2ebfc8 3423 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3424 {
3425 info->representable_p = false;
3426 info->nregs
3427 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3428 info->offset = offset / regsize_xmode;
3429 return;
3430 }
3431 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3432 {
3433 info->representable_p = false;
3434 info->nregs
3435 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3436 info->offset = offset / regsize_xmode;
3437 return;
3438 }
ef8a7a52 3439 /* Quick exit for the simple and common case of extracting whole
3440 subregisters from a multiregister value. */
3441 /* ??? It would be better to integrate this into the code below,
3442 if we can generalize the concept enough and figure out how
3443 odd-sized modes can coexist with the other weird cases we support. */
3444 if (!rknown
3445 && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
3446 && regsize_xmode == regsize_ymode
3447 && (offset % regsize_ymode) == 0)
3448 {
3449 info->representable_p = true;
3450 info->nregs = nregs_ymode;
3451 info->offset = offset / regsize_ymode;
3452 gcc_assert (info->offset + info->nregs <= nregs_xmode);
3453 return;
3454 }
fe2ebfc8 3455 }
695595bc 3456
ed21e7ff 3457 /* Lowpart subregs are otherwise valid. */
fe2ebfc8 3458 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3459 {
3460 info->representable_p = true;
3461 rknown = true;
8ef3d190 3462
3463 if (offset == 0 || nregs_xmode == nregs_ymode)
3464 {
3465 info->offset = 0;
3466 info->nregs = nregs_ymode;
3467 return;
3468 }
fe2ebfc8 3469 }
d9b3752c 3470
ed21e7ff 3471 /* This should always pass, otherwise we don't know how to verify
3472 the constraint. These conditions may be relaxed but
3473 subreg_regno_offset would need to be redesigned. */
04e579b6 3474 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
04e579b6 3475 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
d9b3752c 3476
76c64076 3477 if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3478 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3479 {
3480 HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3481 HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3482 HOST_WIDE_INT off_low = offset & (ysize - 1);
3483 HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3484 offset = (xsize - ysize - off_high) | off_low;
3485 }
df07c3ae 3486 /* The XMODE value can be seen as a vector of NREGS_XMODE
845bebef 3487 values. The subreg must represent a lowpart of given field.
d9b3752c 3488 Compute what field it is. */
fe2ebfc8 3489 offset_adj = offset;
3490 offset_adj -= subreg_lowpart_offset (ymode,
3491 mode_for_size (GET_MODE_BITSIZE (xmode)
3492 / nregs_xmode,
3493 MODE_INT, 0));
d9b3752c 3494
ed21e7ff 3495 /* Size of ymode must not be greater than the size of xmode. */
d9b3752c 3496 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
04e579b6 3497 gcc_assert (mode_multiple != 0);
d9b3752c 3498
3499 y_offset = offset / GET_MODE_SIZE (ymode);
fe2ebfc8 3500 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3501 nregs_multiple = nregs_xmode / nregs_ymode;
04e579b6 3502
fe2ebfc8 3503 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
04e579b6 3504 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3505
fe2ebfc8 3506 if (!rknown)
3507 {
3508 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3509 rknown = true;
3510 }
3511 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3512 info->nregs = nregs_ymode;
3513}
3514
3515/* This function returns the regno offset of a subreg expression.
3516 xregno - A regno of an inner hard subreg_reg (or what will become one).
3517 xmode - The mode of xregno.
3518 offset - The byte offset.
3519 ymode - The mode of a top level SUBREG (or what may become one).
3520 RETURN - The regno offset which would be used. */
3521unsigned int
3754d046 3522subreg_regno_offset (unsigned int xregno, machine_mode xmode,
3523 unsigned int offset, machine_mode ymode)
fe2ebfc8 3524{
3525 struct subreg_info info;
3526 subreg_get_info (xregno, xmode, offset, ymode, &info);
3527 return info.offset;
3528}
3529
3530/* This function returns true when the offset is representable via
3531 subreg_offset in the given regno.
3532 xregno - A regno of an inner hard subreg_reg (or what will become one).
3533 xmode - The mode of xregno.
3534 offset - The byte offset.
3535 ymode - The mode of a top level SUBREG (or what may become one).
3536 RETURN - Whether the offset is representable. */
3537bool
3754d046 3538subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
3539 unsigned int offset, machine_mode ymode)
fe2ebfc8 3540{
3541 struct subreg_info info;
3542 subreg_get_info (xregno, xmode, offset, ymode, &info);
949bf6a9 3543 return info.representable_p;
d9b3752c 3544}
3545
5992d16a 3546/* Return the number of a YMODE register to which
3547
3548 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3549
3550 can be simplified. Return -1 if the subreg can't be simplified.
3551
3552 XREGNO is a hard register number. */
3553
3554int
3754d046 3555simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
3556 unsigned int offset, machine_mode ymode)
5992d16a 3557{
3558 struct subreg_info info;
3559 unsigned int yregno;
3560
3561#ifdef CANNOT_CHANGE_MODE_CLASS
3562 /* Give the backend a chance to disallow the mode change. */
3563 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3564 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
c6a6cdaa 3565 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)
3566 /* We can use mode change in LRA for some transformations. */
3567 && ! lra_in_progress)
5992d16a 3568 return -1;
3569#endif
3570
3571 /* We shouldn't simplify stack-related registers. */
3572 if ((!reload_completed || frame_pointer_needed)
c461d390 3573 && xregno == FRAME_POINTER_REGNUM)
5992d16a 3574 return -1;
3575
3576 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
c78ae7d1 3577 && xregno == ARG_POINTER_REGNUM)
5992d16a 3578 return -1;
3579
c6a6cdaa 3580 if (xregno == STACK_POINTER_REGNUM
3581 /* We should convert hard stack register in LRA if it is
3582 possible. */
3583 && ! lra_in_progress)
5992d16a 3584 return -1;
3585
3586 /* Try to get the register offset. */
3587 subreg_get_info (xregno, xmode, offset, ymode, &info);
3588 if (!info.representable_p)
3589 return -1;
3590
3591 /* Make sure that the offsetted register value is in range. */
3592 yregno = xregno + info.offset;
3593 if (!HARD_REGISTER_NUM_P (yregno))
3594 return -1;
3595
3596 /* See whether (reg:YMODE YREGNO) is valid.
3597
3598 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
7cb63246 3599 This is a kludge to work around how complex FP arguments are passed
3600 on IA-64 and should be fixed. See PR target/49226. */
5992d16a 3601 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3602 && HARD_REGNO_MODE_OK (xregno, xmode))
3603 return -1;
3604
3605 return (int) yregno;
3606}
3607
aa40f561 3608/* Return the final regno that a subreg expression refers to. */
2617fe26 3609unsigned int
dd9b9fc5 3610subreg_regno (const_rtx x)
701e46d0 3611{
3612 unsigned int ret;
3613 rtx subreg = SUBREG_REG (x);
3614 int regno = REGNO (subreg);
3615
2617fe26 3616 ret = regno + subreg_regno_offset (regno,
3617 GET_MODE (subreg),
701e46d0 3618 SUBREG_BYTE (x),
3619 GET_MODE (x));
3620 return ret;
3621
3622}
fe2ebfc8 3623
3624/* Return the number of registers that a subreg expression refers
3625 to. */
3626unsigned int
dd9b9fc5 3627subreg_nregs (const_rtx x)
dea7b504 3628{
3629 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3630}
3631
3632/* Return the number of registers that a subreg REG with REGNO
3633 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3634 changed so that the regno can be passed in. */
3635
3636unsigned int
3637subreg_nregs_with_regno (unsigned int regno, const_rtx x)
fe2ebfc8 3638{
3639 struct subreg_info info;
3640 rtx subreg = SUBREG_REG (x);
fe2ebfc8 3641
3642 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3643 &info);
3644 return info.nregs;
3645}
3646
dea7b504 3647
7c2cc97e 3648struct parms_set_data
3649{
3650 int nregs;
3651 HARD_REG_SET regs;
3652};
3653
3654/* Helper function for noticing stores to parameter registers. */
3655static void
81a410b1 3656parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7c2cc97e 3657{
f7f3687c 3658 struct parms_set_data *const d = (struct parms_set_data *) data;
7c2cc97e 3659 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3660 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3661 {
3662 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3663 d->nregs--;
3664 }
3665}
3666
2617fe26 3667/* Look backward for first parameter to be loaded.
a6971e98 3668 Note that loads of all parameters will not necessarily be
3669 found if CSE has eliminated some of them (e.g., an argument
3670 to the outer function is passed down as a parameter).
7c2cc97e 3671 Do not skip BOUNDARY. */
3ccd8550 3672rtx_insn *
35f8d1c2 3673find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
7c2cc97e 3674{
3675 struct parms_set_data parm;
35f8d1c2 3676 rtx p;
3677 rtx_insn *before, *first_set;
7c2cc97e 3678
3679 /* Since different machines initialize their parameter registers
3680 in different orders, assume nothing. Collect the set of all
3681 parameter registers. */
3682 CLEAR_HARD_REG_SET (parm.regs);
3683 parm.nregs = 0;
3684 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3685 if (GET_CODE (XEXP (p, 0)) == USE
8ad4c111 3686 && REG_P (XEXP (XEXP (p, 0), 0)))
7c2cc97e 3687 {
04e579b6 3688 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
7c2cc97e 3689
3690 /* We only care about registers which can hold function
3691 arguments. */
3692 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3693 continue;
3694
3695 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3696 parm.nregs++;
3697 }
3698 before = call_insn;
a6971e98 3699 first_set = call_insn;
7c2cc97e 3700
3701 /* Search backward for the first set of a register in this set. */
3702 while (parm.nregs && before != boundary)
3703 {
3704 before = PREV_INSN (before);
3705
3706 /* It is possible that some loads got CSEed from one call to
3707 another. Stop in that case. */
6d7dc5b9 3708 if (CALL_P (before))
7c2cc97e 3709 break;
3710
26551efd 3711 /* Our caller needs either ensure that we will find all sets
7c2cc97e 3712 (in case code has not been optimized yet), or take care
4a82352a 3713 for possible labels in a way by setting boundary to preceding
7c2cc97e 3714 CODE_LABEL. */
6d7dc5b9 3715 if (LABEL_P (before))
26551efd 3716 {
04e579b6 3717 gcc_assert (before == boundary);
26551efd 3718 break;
3719 }
7c2cc97e 3720
bd10a7cd 3721 if (INSN_P (before))
a6971e98 3722 {
3723 int nregs_old = parm.nregs;
3724 note_stores (PATTERN (before), parms_set, &parm);
3725 /* If we found something that did not set a parameter reg,
3726 we're done. Do not keep going, as that might result
3727 in hoisting an insn before the setting of a pseudo
3728 that is used by the hoisted insn. */
3729 if (nregs_old != parm.nregs)
3730 first_set = before;
3731 else
3732 break;
3733 }
7c2cc97e 3734 }
35f8d1c2 3735 return first_set;
7c2cc97e 3736}
fb20d6fa 3737
de132707 3738/* Return true if we should avoid inserting code between INSN and preceding
fb20d6fa 3739 call instruction. */
3740
3741bool
2eb8c261 3742keep_with_call_p (const rtx_insn *insn)
fb20d6fa 3743{
3744 rtx set;
3745
3746 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3747 {
8ad4c111 3748 if (REG_P (SET_DEST (set))
0c08cb26 3749 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
fb20d6fa 3750 && fixed_regs[REGNO (SET_DEST (set))]
3751 && general_operand (SET_SRC (set), VOIDmode))
3752 return true;
8ad4c111 3753 if (REG_P (SET_SRC (set))
e1ce1485 3754 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
8ad4c111 3755 && REG_P (SET_DEST (set))
fb20d6fa 3756 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3757 return true;
aee989f5 3758 /* There may be a stack pop just after the call and before the store
3759 of the return register. Search for the actual store when deciding
3760 if we can break or not. */
fb20d6fa 3761 if (SET_DEST (set) == stack_pointer_rtx)
3762 {
ce4469fa 3763 /* This CONST_CAST is okay because next_nonnote_insn just
5ca94202 3764 returns its argument and we assign it to a const_rtx
ce4469fa 3765 variable. */
2eb8c261 3766 const rtx_insn *i2
3767 = next_nonnote_insn (const_cast<rtx_insn *> (insn));
aee989f5 3768 if (i2 && keep_with_call_p (i2))
fb20d6fa 3769 return true;
3770 }
3771 }
3772 return false;
3773}
fa3cb24d 3774
b9de5542 3775/* Return true if LABEL is a target of JUMP_INSN. This applies only
3776 to non-complex jumps. That is, direct unconditional, conditional,
3777 and tablejumps, but not computed jumps or returns. It also does
3778 not apply to the fallthru case of a conditional jump. */
3779
3780bool
28fbb2b5 3781label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
b9de5542 3782{
3783 rtx tmp = JUMP_LABEL (jump_insn);
c86d86ff 3784 rtx_jump_table_data *table;
b9de5542 3785
3786 if (label == tmp)
3787 return true;
3788
c86d86ff 3789 if (tablejump_p (jump_insn, NULL, &table))
b9de5542 3790 {
b77639be 3791 rtvec vec = table->get_labels ();
b9de5542 3792 int i, veclen = GET_NUM_ELEM (vec);
3793
3794 for (i = 0; i < veclen; ++i)
3795 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3796 return true;
3797 }
3798
a8d1dae0 3799 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3800 return true;
3801
b9de5542 3802 return false;
3803}
3804
26619827 3805\f
3806/* Return an estimate of the cost of computing rtx X.
3807 One use is in cse, to decide which expression to keep in the hash table.
3808 Another is in rtl generation, to pick the cheapest way to multiply.
48e1416a 3809 Other uses like the latter are expected in the future.
f529eb25 3810
20d892d1 3811 X appears as operand OPNO in an expression with code OUTER_CODE.
3812 SPEED specifies whether costs optimized for speed or size should
f529eb25 3813 be returned. */
26619827 3814
3815int
20d892d1 3816rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
26619827 3817{
3818 int i, j;
3819 enum rtx_code code;
3820 const char *fmt;
3821 int total;
3b3b530a 3822 int factor;
26619827 3823
3824 if (x == 0)
3825 return 0;
3826
3b3b530a 3827 /* A size N times larger than UNITS_PER_WORD likely needs N times as
3828 many insns, taking N times as long. */
3829 factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD;
3830 if (factor == 0)
3831 factor = 1;
3832
26619827 3833 /* Compute the default costs of certain things.
3834 Note that targetm.rtx_costs can override the defaults. */
3835
3836 code = GET_CODE (x);
3837 switch (code)
3838 {
3839 case MULT:
3b3b530a 3840 /* Multiplication has time-complexity O(N*N), where N is the
3841 number of units (translated from digits) when using
3842 schoolbook long multiplication. */
3843 total = factor * factor * COSTS_N_INSNS (5);
26619827 3844 break;
3845 case DIV:
3846 case UDIV:
3847 case MOD:
3848 case UMOD:
3b3b530a 3849 /* Similarly, complexity for schoolbook long division. */
3850 total = factor * factor * COSTS_N_INSNS (7);
26619827 3851 break;
3852 case USE:
67a5e20a 3853 /* Used in combine.c as a marker. */
26619827 3854 total = 0;
3855 break;
3b3b530a 3856 case SET:
3857 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
3858 the mode for the factor. */
3859 factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD;
3860 if (factor == 0)
3861 factor = 1;
3862 /* Pass through. */
26619827 3863 default:
3b3b530a 3864 total = factor * COSTS_N_INSNS (1);
26619827 3865 }
3866
3867 switch (code)
3868 {
3869 case REG:
3870 return 0;
3871
3872 case SUBREG:
8eb9bb0e 3873 total = 0;
26619827 3874 /* If we can't tie these modes, make this expensive. The larger
3875 the mode, the more expensive it is. */
3876 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3b3b530a 3877 return COSTS_N_INSNS (2 + factor);
26619827 3878 break;
3879
3880 default:
20d892d1 3881 if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
26619827 3882 return total;
3883 break;
3884 }
3885
3886 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3887 which is already in total. */
3888
3889 fmt = GET_RTX_FORMAT (code);
3890 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3891 if (fmt[i] == 'e')
20d892d1 3892 total += rtx_cost (XEXP (x, i), code, i, speed);
26619827 3893 else if (fmt[i] == 'E')
3894 for (j = 0; j < XVECLEN (x, i); j++)
20d892d1 3895 total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
26619827 3896
3897 return total;
3898}
c9a03487 3899
3900/* Fill in the structure C with information about both speed and size rtx
20d892d1 3901 costs for X, which is operand OPNO in an expression with code OUTER. */
c9a03487 3902
3903void
20d892d1 3904get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3905 struct full_rtx_costs *c)
c9a03487 3906{
20d892d1 3907 c->speed = rtx_cost (x, outer, opno, true);
3908 c->size = rtx_cost (x, outer, opno, false);
c9a03487 3909}
3910
26619827 3911\f
3912/* Return cost of address expression X.
48e1416a 3913 Expect that X is properly formed address reference.
f529eb25 3914
3915 SPEED parameter specify whether costs optimized for speed or size should
3916 be returned. */
26619827 3917
3918int
3754d046 3919address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
26619827 3920{
26619827 3921 /* We may be asked for cost of various unusual addresses, such as operands
3922 of push instruction. It is not worthwhile to complicate writing
3923 of the target hook by such cases. */
3924
bd1a81f7 3925 if (!memory_address_addr_space_p (mode, x, as))
26619827 3926 return 1000;
3927
d9c5e5f4 3928 return targetm.address_cost (x, mode, as, speed);
26619827 3929}
3930
3931/* If the target doesn't override, compute the cost as with arithmetic. */
3932
3933int
3754d046 3934default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
26619827 3935{
20d892d1 3936 return rtx_cost (x, MEM, 0, speed);
26619827 3937}
d263732c 3938\f
3939
3940unsigned HOST_WIDE_INT
3754d046 3941nonzero_bits (const_rtx x, machine_mode mode)
d263732c 3942{
3943 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3944}
3945
3946unsigned int
3754d046 3947num_sign_bit_copies (const_rtx x, machine_mode mode)
d263732c 3948{
3949 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3950}
3951
3952/* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3953 It avoids exponential behavior in nonzero_bits1 when X has
3954 identical subexpressions on the first or the second level. */
3955
3956static unsigned HOST_WIDE_INT
3754d046 3957cached_nonzero_bits (const_rtx x, machine_mode mode, const_rtx known_x,
3958 machine_mode known_mode,
d263732c 3959 unsigned HOST_WIDE_INT known_ret)
3960{
3961 if (x == known_x && mode == known_mode)
3962 return known_ret;
3963
3964 /* Try to find identical subexpressions. If found call
3965 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3966 precomputed value for the subexpression as KNOWN_RET. */
3967
3968 if (ARITHMETIC_P (x))
3969 {
3970 rtx x0 = XEXP (x, 0);
3971 rtx x1 = XEXP (x, 1);
3972
3973 /* Check the first level. */
3974 if (x0 == x1)
3975 return nonzero_bits1 (x, mode, x0, mode,
3976 cached_nonzero_bits (x0, mode, known_x,
3977 known_mode, known_ret));
3978
3979 /* Check the second level. */
3980 if (ARITHMETIC_P (x0)
3981 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3982 return nonzero_bits1 (x, mode, x1, mode,
3983 cached_nonzero_bits (x1, mode, known_x,
3984 known_mode, known_ret));
3985
3986 if (ARITHMETIC_P (x1)
3987 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3988 return nonzero_bits1 (x, mode, x0, mode,
3989 cached_nonzero_bits (x0, mode, known_x,
3990 known_mode, known_ret));
3991 }
3992
3993 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3994}
3995
3996/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3997 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3998 is less useful. We can't allow both, because that results in exponential
3999 run time recursion. There is a nullstone testcase that triggered
4000 this. This macro avoids accidental uses of num_sign_bit_copies. */
4001#define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
4002
4003/* Given an expression, X, compute which bits in X can be nonzero.
4004 We don't care about bits outside of those defined in MODE.
4005
4006 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
4007 an arithmetic operation, we can do better. */
4008
4009static unsigned HOST_WIDE_INT
3754d046 4010nonzero_bits1 (const_rtx x, machine_mode mode, const_rtx known_x,
4011 machine_mode known_mode,
d263732c 4012 unsigned HOST_WIDE_INT known_ret)
4013{
4014 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4015 unsigned HOST_WIDE_INT inner_nz;
4016 enum rtx_code code;
3754d046 4017 machine_mode inner_mode;
ded805e6 4018 unsigned int mode_width = GET_MODE_PRECISION (mode);
d263732c 4019
6d5136ab 4020 /* For floating-point and vector values, assume all bits are needed. */
4021 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
4022 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
d263732c 4023 return nonzero;
4024
4025 /* If X is wider than MODE, use its mode instead. */
ded805e6 4026 if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
d263732c 4027 {
4028 mode = GET_MODE (x);
4029 nonzero = GET_MODE_MASK (mode);
ded805e6 4030 mode_width = GET_MODE_PRECISION (mode);
d263732c 4031 }
4032
4033 if (mode_width > HOST_BITS_PER_WIDE_INT)
4034 /* Our only callers in this case look for single bit values. So
4035 just return the mode mask. Those tests will then be false. */
4036 return nonzero;
4037
4038#ifndef WORD_REGISTER_OPERATIONS
4039 /* If MODE is wider than X, but both are a single word for both the host
4040 and target machines, we can compute this from which bits of the
4041 object might be nonzero in its own mode, taking into account the fact
4042 that on many CISC machines, accessing an object in a wider mode
4043 causes the high-order bits to become undefined. So they are
4044 not known to be zero. */
4045
4046 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
ded805e6 4047 && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
4048 && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
4049 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4050 {
4051 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
4052 known_x, known_mode, known_ret);
4053 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
4054 return nonzero;
4055 }
4056#endif
4057
4058 code = GET_CODE (x);
4059 switch (code)
4060 {
4061 case REG:
4062#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4063 /* If pointers extend unsigned and this is a pointer in Pmode, say that
4064 all the bits above ptr_mode are known to be zero. */
04ec15fa 4065 /* As we do not know which address space the pointer is referring to,
98155838 4066 we can do this only if the target does not support different pointer
4067 or address modes depending on the address space. */
4068 if (target_default_pointer_address_modes_p ()
4069 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
d263732c 4070 && REG_POINTER (x))
4071 nonzero &= GET_MODE_MASK (ptr_mode);
4072#endif
4073
4074 /* Include declared information about alignment of pointers. */
4075 /* ??? We don't properly preserve REG_POINTER changes across
4076 pointer-to-integer casts, so we can't trust it except for
4077 things that we know must be pointers. See execute/960116-1.c. */
4078 if ((x == stack_pointer_rtx
4079 || x == frame_pointer_rtx
4080 || x == arg_pointer_rtx)
4081 && REGNO_POINTER_ALIGN (REGNO (x)))
4082 {
4083 unsigned HOST_WIDE_INT alignment
4084 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4085
4086#ifdef PUSH_ROUNDING
4087 /* If PUSH_ROUNDING is defined, it is possible for the
4088 stack to be momentarily aligned only to that amount,
4089 so we pick the least alignment. */
4090 if (x == stack_pointer_rtx && PUSH_ARGS)
4091 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
4092 alignment);
4093#endif
4094
4095 nonzero &= ~(alignment - 1);
4096 }
4097
4098 {
4099 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
47cfb7f4 4100 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
d263732c 4101 known_mode, known_ret,
4102 &nonzero_for_hook);
4103
47cfb7f4 4104 if (new_rtx)
4105 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
d263732c 4106 known_mode, known_ret);
4107
4108 return nonzero_for_hook;
4109 }
4110
4111 case CONST_INT:
4112#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4113 /* If X is negative in MODE, sign-extend the value. */
9d8859f1 4114 if (INTVAL (x) > 0
4115 && mode_width < BITS_PER_WORD
4116 && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4117 != 0)
561f0ec8 4118 return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
d263732c 4119#endif
4120
9d8859f1 4121 return UINTVAL (x);
d263732c 4122
4123 case MEM:
4124#ifdef LOAD_EXTEND_OP
4125 /* In many, if not most, RISC machines, reading a byte from memory
4126 zeros the rest of the register. Noticing that fact saves a lot
4127 of extra zero-extends. */
4128 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4129 nonzero &= GET_MODE_MASK (GET_MODE (x));
4130#endif
4131 break;
4132
4133 case EQ: case NE:
4134 case UNEQ: case LTGT:
4135 case GT: case GTU: case UNGT:
4136 case LT: case LTU: case UNLT:
4137 case GE: case GEU: case UNGE:
4138 case LE: case LEU: case UNLE:
4139 case UNORDERED: case ORDERED:
d263732c 4140 /* If this produces an integer result, we know which bits are set.
4141 Code here used to clear bits outside the mode of X, but that is
4142 now done above. */
48e1416a 4143 /* Mind that MODE is the mode the caller wants to look at this
4144 operation in, and not the actual operation mode. We can wind
8850c3db 4145 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4146 that describes the results of a vector compare. */
4147 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
d263732c 4148 && mode_width <= HOST_BITS_PER_WIDE_INT)
4149 nonzero = STORE_FLAG_VALUE;
4150 break;
4151
4152 case NEG:
4153#if 0
4154 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4155 and num_sign_bit_copies. */
4156 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
ded805e6 4157 == GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4158 nonzero = 1;
4159#endif
4160
b0676cad 4161 if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
d263732c 4162 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4163 break;
4164
4165 case ABS:
4166#if 0
4167 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4168 and num_sign_bit_copies. */
4169 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
ded805e6 4170 == GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4171 nonzero = 1;
4172#endif
4173 break;
4174
4175 case TRUNCATE:
4176 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4177 known_x, known_mode, known_ret)
4178 & GET_MODE_MASK (mode));
4179 break;
4180
4181 case ZERO_EXTEND:
4182 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4183 known_x, known_mode, known_ret);
4184 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4185 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4186 break;
4187
4188 case SIGN_EXTEND:
4189 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4190 Otherwise, show all the bits in the outer mode but not the inner
4191 may be nonzero. */
4192 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4193 known_x, known_mode, known_ret);
4194 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4195 {
4196 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
f92430e0 4197 if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
d263732c 4198 inner_nz |= (GET_MODE_MASK (mode)
4199 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4200 }
4201
4202 nonzero &= inner_nz;
4203 break;
4204
4205 case AND:
4206 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4207 known_x, known_mode, known_ret)
4208 & cached_nonzero_bits (XEXP (x, 1), mode,
4209 known_x, known_mode, known_ret);
4210 break;
4211
4212 case XOR: case IOR:
4213 case UMIN: case UMAX: case SMIN: case SMAX:
4214 {
9d8859f1 4215 unsigned HOST_WIDE_INT nonzero0
4216 = cached_nonzero_bits (XEXP (x, 0), mode,
4217 known_x, known_mode, known_ret);
d263732c 4218
4219 /* Don't call nonzero_bits for the second time if it cannot change
4220 anything. */
4221 if ((nonzero & nonzero0) != nonzero)
4222 nonzero &= nonzero0
4223 | cached_nonzero_bits (XEXP (x, 1), mode,
4224 known_x, known_mode, known_ret);
4225 }
4226 break;
4227
4228 case PLUS: case MINUS:
4229 case MULT:
4230 case DIV: case UDIV:
4231 case MOD: case UMOD:
4232 /* We can apply the rules of arithmetic to compute the number of
4233 high- and low-order zero bits of these operations. We start by
4234 computing the width (position of the highest-order nonzero bit)
4235 and the number of low-order zero bits for each value. */
4236 {
9d8859f1 4237 unsigned HOST_WIDE_INT nz0
4238 = cached_nonzero_bits (XEXP (x, 0), mode,
4239 known_x, known_mode, known_ret);
4240 unsigned HOST_WIDE_INT nz1
4241 = cached_nonzero_bits (XEXP (x, 1), mode,
4242 known_x, known_mode, known_ret);
ded805e6 4243 int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
d263732c 4244 int width0 = floor_log2 (nz0) + 1;
4245 int width1 = floor_log2 (nz1) + 1;
4246 int low0 = floor_log2 (nz0 & -nz0);
4247 int low1 = floor_log2 (nz1 & -nz1);
9d8859f1 4248 unsigned HOST_WIDE_INT op0_maybe_minusp
4249 = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4250 unsigned HOST_WIDE_INT op1_maybe_minusp
4251 = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
d263732c 4252 unsigned int result_width = mode_width;
4253 int result_low = 0;
4254
4255 switch (code)
4256 {
4257 case PLUS:
4258 result_width = MAX (width0, width1) + 1;
4259 result_low = MIN (low0, low1);
4260 break;
4261 case MINUS:
4262 result_low = MIN (low0, low1);
4263 break;
4264 case MULT:
4265 result_width = width0 + width1;
4266 result_low = low0 + low1;
4267 break;
4268 case DIV:
4269 if (width1 == 0)
4270 break;
9d8859f1 4271 if (!op0_maybe_minusp && !op1_maybe_minusp)
d263732c 4272 result_width = width0;
4273 break;
4274 case UDIV:
4275 if (width1 == 0)
4276 break;
4277 result_width = width0;
4278 break;
4279 case MOD:
4280 if (width1 == 0)
4281 break;
9d8859f1 4282 if (!op0_maybe_minusp && !op1_maybe_minusp)
d263732c 4283 result_width = MIN (width0, width1);
4284 result_low = MIN (low0, low1);
4285 break;
4286 case UMOD:
4287 if (width1 == 0)
4288 break;
4289 result_width = MIN (width0, width1);
4290 result_low = MIN (low0, low1);
4291 break;
4292 default:
04e579b6 4293 gcc_unreachable ();
d263732c 4294 }
4295
4296 if (result_width < mode_width)
9d8859f1 4297 nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
d263732c 4298
4299 if (result_low > 0)
9d8859f1 4300 nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
d263732c 4301 }
4302 break;
4303
4304 case ZERO_EXTRACT:
971ba038 4305 if (CONST_INT_P (XEXP (x, 1))
d263732c 4306 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
9d8859f1 4307 nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
d263732c 4308 break;
4309
4310 case SUBREG:
4311 /* If this is a SUBREG formed for a promoted variable that has
4312 been zero-extended, we know that at least the high-order bits
4313 are zero, though others might be too. */
4314
e8629f9e 4315 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
d263732c 4316 nonzero = GET_MODE_MASK (GET_MODE (x))
4317 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4318 known_x, known_mode, known_ret);
4319
f92430e0 4320 inner_mode = GET_MODE (SUBREG_REG (x));
d263732c 4321 /* If the inner mode is a single word for both the host and target
4322 machines, we can compute this from which bits of the inner
4323 object might be nonzero. */
ded805e6 4324 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4325 && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
d263732c 4326 {
4327 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4328 known_x, known_mode, known_ret);
4329
4330#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4331 /* If this is a typical RISC machine, we only have to worry
4332 about the way loads are extended. */
f92430e0 4333 if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4334 ? val_signbit_known_set_p (inner_mode, nonzero)
4335 : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
e16ceb8e 4336 || !MEM_P (SUBREG_REG (x)))
d263732c 4337#endif
4338 {
4339 /* On many CISC machines, accessing an object in a wider mode
4340 causes the high-order bits to become undefined. So they are
4341 not known to be zero. */
ded805e6 4342 if (GET_MODE_PRECISION (GET_MODE (x))
4343 > GET_MODE_PRECISION (inner_mode))
d263732c 4344 nonzero |= (GET_MODE_MASK (GET_MODE (x))
f92430e0 4345 & ~GET_MODE_MASK (inner_mode));
d263732c 4346 }
4347 }
4348 break;
4349
4350 case ASHIFTRT:
4351 case LSHIFTRT:
4352 case ASHIFT:
4353 case ROTATE:
4354 /* The nonzero bits are in two classes: any bits within MODE
4355 that aren't in GET_MODE (x) are always significant. The rest of the
4356 nonzero bits are those that are significant in the operand of
4357 the shift when shifted the appropriate number of bits. This
4358 shows that high-order bits are cleared by the right shift and
4359 low-order bits by left shifts. */
971ba038 4360 if (CONST_INT_P (XEXP (x, 1))
d263732c 4361 && INTVAL (XEXP (x, 1)) >= 0
6026d749 4362 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
ded805e6 4363 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4364 {
3754d046 4365 machine_mode inner_mode = GET_MODE (x);
ded805e6 4366 unsigned int width = GET_MODE_PRECISION (inner_mode);
d263732c 4367 int count = INTVAL (XEXP (x, 1));
4368 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
9d8859f1 4369 unsigned HOST_WIDE_INT op_nonzero
4370 = cached_nonzero_bits (XEXP (x, 0), mode,
4371 known_x, known_mode, known_ret);
d263732c 4372 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4373 unsigned HOST_WIDE_INT outer = 0;
4374
4375 if (mode_width > width)
4376 outer = (op_nonzero & nonzero & ~mode_mask);
4377
4378 if (code == LSHIFTRT)
4379 inner >>= count;
4380 else if (code == ASHIFTRT)
4381 {
4382 inner >>= count;
4383
4384 /* If the sign bit may have been nonzero before the shift, we
4385 need to mark all the places it could have been copied to
4386 by the shift as possibly nonzero. */
9d8859f1 4387 if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4388 inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4389 << (width - count);
d263732c 4390 }
4391 else if (code == ASHIFT)
4392 inner <<= count;
4393 else
4394 inner = ((inner << (count % width)
4395 | (inner >> (width - (count % width)))) & mode_mask);
4396
4397 nonzero &= (outer | inner);
4398 }
4399 break;
4400
4401 case FFS:
4402 case POPCOUNT:
4403 /* This is at most the number of bits in the mode. */
9d8859f1 4404 nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
d263732c 4405 break;
4406
4407 case CLZ:
4408 /* If CLZ has a known value at zero, then the nonzero bits are
4409 that value, plus the number of bits in the mode minus one. */
4410 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
9d8859f1 4411 nonzero
4412 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
d263732c 4413 else
4414 nonzero = -1;
4415 break;
4416
4417 case CTZ:
4418 /* If CTZ has a known value at zero, then the nonzero bits are
4419 that value, plus the number of bits in the mode minus one. */
4420 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
9d8859f1 4421 nonzero
4422 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
d263732c 4423 else
4424 nonzero = -1;
4425 break;
4426
3b23b4cc 4427 case CLRSB:
4428 /* This is at most the number of bits in the mode minus 1. */
4429 nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4430 break;
4431
d263732c 4432 case PARITY:
4433 nonzero = 1;
4434 break;
4435
4436 case IF_THEN_ELSE:
4437 {
9d8859f1 4438 unsigned HOST_WIDE_INT nonzero_true
4439 = cached_nonzero_bits (XEXP (x, 1), mode,
4440 known_x, known_mode, known_ret);
d263732c 4441
4442 /* Don't call nonzero_bits for the second time if it cannot change
4443 anything. */
4444 if ((nonzero & nonzero_true) != nonzero)
4445 nonzero &= nonzero_true
4446 | cached_nonzero_bits (XEXP (x, 2), mode,
4447 known_x, known_mode, known_ret);
4448 }
4449 break;
4450
4451 default:
4452 break;
4453 }
4454
4455 return nonzero;
4456}
4457
4458/* See the macro definition above. */
4459#undef cached_num_sign_bit_copies
4460
4461\f
4462/* The function cached_num_sign_bit_copies is a wrapper around
4463 num_sign_bit_copies1. It avoids exponential behavior in
4464 num_sign_bit_copies1 when X has identical subexpressions on the
4465 first or the second level. */
4466
4467static unsigned int
3754d046 4468cached_num_sign_bit_copies (const_rtx x, machine_mode mode, const_rtx known_x,
4469 machine_mode known_mode,
d263732c 4470 unsigned int known_ret)
4471{
4472 if (x == known_x && mode == known_mode)
4473 return known_ret;
4474
4475 /* Try to find identical subexpressions. If found call
4476 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4477 the precomputed value for the subexpression as KNOWN_RET. */
4478
4479 if (ARITHMETIC_P (x))
4480 {
4481 rtx x0 = XEXP (x, 0);
4482 rtx x1 = XEXP (x, 1);
4483
4484 /* Check the first level. */
4485 if (x0 == x1)
4486 return
4487 num_sign_bit_copies1 (x, mode, x0, mode,
4488 cached_num_sign_bit_copies (x0, mode, known_x,
4489 known_mode,
4490 known_ret));
4491
4492 /* Check the second level. */
4493 if (ARITHMETIC_P (x0)
4494 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4495 return
4496 num_sign_bit_copies1 (x, mode, x1, mode,
4497 cached_num_sign_bit_copies (x1, mode, known_x,
4498 known_mode,
4499 known_ret));
4500
4501 if (ARITHMETIC_P (x1)
4502 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4503 return
4504 num_sign_bit_copies1 (x, mode, x0, mode,
4505 cached_num_sign_bit_copies (x0, mode, known_x,
4506 known_mode,
4507 known_ret));
4508 }
4509
4510 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4511}
4512
4513/* Return the number of bits at the high-order end of X that are known to
4514 be equal to the sign bit. X will be used in mode MODE; if MODE is
4515 VOIDmode, X will be used in its own mode. The returned value will always
4516 be between 1 and the number of bits in MODE. */
4517
4518static unsigned int
3754d046 4519num_sign_bit_copies1 (const_rtx x, machine_mode mode, const_rtx known_x,
4520 machine_mode known_mode,
d263732c 4521 unsigned int known_ret)
4522{
4523 enum rtx_code code = GET_CODE (x);
ded805e6 4524 unsigned int bitwidth = GET_MODE_PRECISION (mode);
d263732c 4525 int num0, num1, result;
4526 unsigned HOST_WIDE_INT nonzero;
4527
4528 /* If we weren't given a mode, use the mode of X. If the mode is still
4529 VOIDmode, we don't know anything. Likewise if one of the modes is
4530 floating-point. */
4531
4532 if (mode == VOIDmode)
4533 mode = GET_MODE (x);
4534
6d5136ab 4535 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4536 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
d263732c 4537 return 1;
4538
4539 /* For a smaller object, just ignore the high bits. */
ded805e6 4540 if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4541 {
4542 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4543 known_x, known_mode, known_ret);
4544 return MAX (1,
ded805e6 4545 num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
d263732c 4546 }
4547
ded805e6 4548 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4549 {
4550#ifndef WORD_REGISTER_OPERATIONS
ded805e6 4551 /* If this machine does not do all register operations on the entire
4552 register and MODE is wider than the mode of X, we can say nothing
4553 at all about the high-order bits. */
d263732c 4554 return 1;
4555#else
4556 /* Likewise on machines that do, if the mode of the object is smaller
4557 than a word and loads of that size don't sign extend, we can say
4558 nothing about the high order bits. */
ded805e6 4559 if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
d263732c 4560#ifdef LOAD_EXTEND_OP
4561 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4562#endif
4563 )
4564 return 1;
4565#endif
4566 }
4567
4568 switch (code)
4569 {
4570 case REG:
4571
4572#if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4573 /* If pointers extend signed and this is a pointer in Pmode, say that
4574 all the bits above ptr_mode are known to be sign bit copies. */
04ec15fa 4575 /* As we do not know which address space the pointer is referring to,
98155838 4576 we can do this only if the target does not support different pointer
4577 or address modes depending on the address space. */
4578 if (target_default_pointer_address_modes_p ()
4579 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4580 && mode == Pmode && REG_POINTER (x))
ded805e6 4581 return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
d263732c 4582#endif
4583
4584 {
4585 unsigned int copies_for_hook = 1, copies = 1;
47cfb7f4 4586 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
d263732c 4587 known_mode, known_ret,
4588 &copies_for_hook);
4589
47cfb7f4 4590 if (new_rtx)
4591 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
d263732c 4592 known_mode, known_ret);
4593
4594 if (copies > 1 || copies_for_hook > 1)
4595 return MAX (copies, copies_for_hook);
4596
4597 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4598 }
4599 break;
4600
4601 case MEM:
4602#ifdef LOAD_EXTEND_OP
4603 /* Some RISC machines sign-extend all loads of smaller than a word. */
4604 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4605 return MAX (1, ((int) bitwidth
ded805e6 4606 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
d263732c 4607#endif
4608 break;
4609
4610 case CONST_INT:
4611 /* If the constant is negative, take its 1's complement and remask.
4612 Then see how many zero bits we have. */
9d8859f1 4613 nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
d263732c 4614 if (bitwidth <= HOST_BITS_PER_WIDE_INT
9d8859f1 4615 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
d263732c 4616 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4617
4618 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4619
4620 case SUBREG:
4621 /* If this is a SUBREG for a promoted object that is sign-extended
4622 and we are looking at it in a wider mode, we know that at least the
4623 high-order bits are known to be sign bit copies. */
4624
e8629f9e 4625 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
d263732c 4626 {
4627 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4628 known_x, known_mode, known_ret);
4629 return MAX ((int) bitwidth
ded805e6 4630 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
d263732c 4631 num0);
4632 }
4633
4634 /* For a smaller object, just ignore the high bits. */
ded805e6 4635 if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
d263732c 4636 {
4637 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4638 known_x, known_mode, known_ret);
4639 return MAX (1, (num0
ded805e6 4640 - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
d263732c 4641 - bitwidth)));
4642 }
4643
4644#ifdef WORD_REGISTER_OPERATIONS
4645#ifdef LOAD_EXTEND_OP
4646 /* For paradoxical SUBREGs on machines where all register operations
4647 affect the entire register, just look inside. Note that we are
4648 passing MODE to the recursive call, so the number of sign bit copies
4649 will remain relative to that mode, not the inner mode. */
4650
4651 /* This works only if loads sign extend. Otherwise, if we get a
4652 reload for the inner part, it may be loaded from the stack, and
4653 then we lose all sign bit copies that existed before the store
4654 to the stack. */
4655
b537bfdb 4656 if (paradoxical_subreg_p (x)
d263732c 4657 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
e16ceb8e 4658 && MEM_P (SUBREG_REG (x)))
d263732c 4659 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4660 known_x, known_mode, known_ret);
4661#endif
4662#endif
4663 break;
4664
4665 case SIGN_EXTRACT:
971ba038 4666 if (CONST_INT_P (XEXP (x, 1)))
d263732c 4667 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4668 break;
4669
4670 case SIGN_EXTEND:
ded805e6 4671 return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
d263732c 4672 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4673 known_x, known_mode, known_ret));
4674
4675 case TRUNCATE:
4676 /* For a smaller object, just ignore the high bits. */
4677 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4678 known_x, known_mode, known_ret);
ded805e6 4679 return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
d263732c 4680 - bitwidth)));
4681
4682 case NOT:
4683 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4684 known_x, known_mode, known_ret);
4685
4686 case ROTATE: case ROTATERT:
4687 /* If we are rotating left by a number of bits less than the number
4688 of sign bit copies, we can just subtract that amount from the
4689 number. */
971ba038 4690 if (CONST_INT_P (XEXP (x, 1))
d263732c 4691 && INTVAL (XEXP (x, 1)) >= 0
4692 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4693 {
4694 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4695 known_x, known_mode, known_ret);
4696 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4697 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4698 }
4699 break;
4700
4701 case NEG:
4702 /* In general, this subtracts one sign bit copy. But if the value
4703 is known to be positive, the number of sign bit copies is the
4704 same as that of the input. Finally, if the input has just one bit
4705 that might be nonzero, all the bits are copies of the sign bit. */
4706 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4707 known_x, known_mode, known_ret);
4708 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4709 return num0 > 1 ? num0 - 1 : 1;
4710
4711 nonzero = nonzero_bits (XEXP (x, 0), mode);
4712 if (nonzero == 1)
4713 return bitwidth;
4714
4715 if (num0 > 1
9d8859f1 4716 && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d263732c 4717 num0--;
4718
4719 return num0;
4720
4721 case IOR: case AND: case XOR:
4722 case SMIN: case SMAX: case UMIN: case UMAX:
4723 /* Logical operations will preserve the number of sign-bit copies.
4724 MIN and MAX operations always return one of the operands. */
4725 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4726 known_x, known_mode, known_ret);
4727 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4728 known_x, known_mode, known_ret);
c07054e9 4729
4730 /* If num1 is clearing some of the top bits then regardless of
4731 the other term, we are guaranteed to have at least that many
4732 high-order zero bits. */
4733 if (code == AND
4734 && num1 > 1
4735 && bitwidth <= HOST_BITS_PER_WIDE_INT
971ba038 4736 && CONST_INT_P (XEXP (x, 1))
9d8859f1 4737 && (UINTVAL (XEXP (x, 1))
4738 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
c07054e9 4739 return num1;
4740
4741 /* Similarly for IOR when setting high-order bits. */
4742 if (code == IOR
4743 && num1 > 1
4744 && bitwidth <= HOST_BITS_PER_WIDE_INT
971ba038 4745 && CONST_INT_P (XEXP (x, 1))
9d8859f1 4746 && (UINTVAL (XEXP (x, 1))
4747 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
c07054e9 4748 return num1;
4749
d263732c 4750 return MIN (num0, num1);
4751
4752 case PLUS: case MINUS:
4753 /* For addition and subtraction, we can have a 1-bit carry. However,
4754 if we are subtracting 1 from a positive number, there will not
4755 be such a carry. Furthermore, if the positive number is known to
4756 be 0 or 1, we know the result is either -1 or 0. */
4757
4758 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4759 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4760 {
4761 nonzero = nonzero_bits (XEXP (x, 0), mode);
9d8859f1 4762 if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
d263732c 4763 return (nonzero == 1 || nonzero == 0 ? bitwidth
4764 : bitwidth - floor_log2 (nonzero) - 1);
4765 }
4766
4767 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4768 known_x, known_mode, known_ret);
4769 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4770 known_x, known_mode, known_ret);
4771 result = MAX (1, MIN (num0, num1) - 1);
4772
d263732c 4773 return result;
4774
4775 case MULT:
4776 /* The number of bits of the product is the sum of the number of
4777 bits of both terms. However, unless one of the terms if known
4778 to be positive, we must allow for an additional bit since negating
4779 a negative number can remove one sign bit copy. */
4780
4781 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4782 known_x, known_mode, known_ret);
4783 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4784 known_x, known_mode, known_ret);
4785
4786 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4787 if (result > 0
4788 && (bitwidth > HOST_BITS_PER_WIDE_INT
4789 || (((nonzero_bits (XEXP (x, 0), mode)
9d8859f1 4790 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
d263732c 4791 && ((nonzero_bits (XEXP (x, 1), mode)
9d8859f1 4792 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4793 != 0))))
d263732c 4794 result--;
4795
4796 return MAX (1, result);
4797
4798 case UDIV:
4799 /* The result must be <= the first operand. If the first operand
4800 has the high bit set, we know nothing about the number of sign
4801 bit copies. */
4802 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4803 return 1;
4804 else if ((nonzero_bits (XEXP (x, 0), mode)
9d8859f1 4805 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
d263732c 4806 return 1;
4807 else
4808 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4809 known_x, known_mode, known_ret);
4810
4811 case UMOD:
3c2a960d 4812 /* The result must be <= the second operand. If the second operand
4813 has (or just might have) the high bit set, we know nothing about
4814 the number of sign bit copies. */
4815 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4816 return 1;
4817 else if ((nonzero_bits (XEXP (x, 1), mode)
9d8859f1 4818 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
3c2a960d 4819 return 1;
4820 else
4821 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
d263732c 4822 known_x, known_mode, known_ret);
4823
4824 case DIV:
4825 /* Similar to unsigned division, except that we have to worry about
4826 the case where the divisor is negative, in which case we have
4827 to add 1. */
4828 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4829 known_x, known_mode, known_ret);
4830 if (result > 1
4831 && (bitwidth > HOST_BITS_PER_WIDE_INT
4832 || (nonzero_bits (XEXP (x, 1), mode)
9d8859f1 4833 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d263732c 4834 result--;
4835
4836 return result;
4837
4838 case MOD:
4839 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4840 known_x, known_mode, known_ret);
4841 if (result > 1
4842 && (bitwidth > HOST_BITS_PER_WIDE_INT
4843 || (nonzero_bits (XEXP (x, 1), mode)
9d8859f1 4844 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d263732c 4845 result--;
4846
4847 return result;
4848
4849 case ASHIFTRT:
4850 /* Shifts by a constant add to the number of bits equal to the
4851 sign bit. */
4852 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4853 known_x, known_mode, known_ret);
971ba038 4854 if (CONST_INT_P (XEXP (x, 1))
6026d749 4855 && INTVAL (XEXP (x, 1)) > 0
ded805e6 4856 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4857 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4858
4859 return num0;
4860
4861 case ASHIFT:
4862 /* Left shifts destroy copies. */
971ba038 4863 if (!CONST_INT_P (XEXP (x, 1))
d263732c 4864 || INTVAL (XEXP (x, 1)) < 0
6026d749 4865 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
ded805e6 4866 || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
d263732c 4867 return 1;
4868
4869 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4870 known_x, known_mode, known_ret);
4871 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4872
4873 case IF_THEN_ELSE:
4874 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4875 known_x, known_mode, known_ret);
4876 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4877 known_x, known_mode, known_ret);
4878 return MIN (num0, num1);
4879
4880 case EQ: case NE: case GE: case GT: case LE: case LT:
4881 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4882 case GEU: case GTU: case LEU: case LTU:
4883 case UNORDERED: case ORDERED:
4884 /* If the constant is negative, take its 1's complement and remask.
4885 Then see how many zero bits we have. */
4886 nonzero = STORE_FLAG_VALUE;
4887 if (bitwidth <= HOST_BITS_PER_WIDE_INT
9d8859f1 4888 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
d263732c 4889 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4890
4891 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4892
4893 default:
4894 break;
4895 }
4896
4897 /* If we haven't been able to figure it out by one of the above rules,
4898 see if some of the high-order bits are known to be zero. If so,
4899 count those bits and return one less than that amount. If we can't
4900 safely compute the mask for this mode, always return BITWIDTH. */
4901
ded805e6 4902 bitwidth = GET_MODE_PRECISION (mode);
d263732c 4903 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4904 return 1;
4905
4906 nonzero = nonzero_bits (x, mode);
9d8859f1 4907 return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
d263732c 4908 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4909}
0a8a047c 4910
4911/* Calculate the rtx_cost of a single instruction. A return value of
4912 zero indicates an instruction pattern without a known cost. */
4913
4914int
f529eb25 4915insn_rtx_cost (rtx pat, bool speed)
0a8a047c 4916{
4917 int i, cost;
4918 rtx set;
4919
4920 /* Extract the single set rtx from the instruction pattern.
4921 We can't use single_set since we only have the pattern. */
4922 if (GET_CODE (pat) == SET)
4923 set = pat;
4924 else if (GET_CODE (pat) == PARALLEL)
4925 {
4926 set = NULL_RTX;
4927 for (i = 0; i < XVECLEN (pat, 0); i++)
4928 {
4929 rtx x = XVECEXP (pat, 0, i);
4930 if (GET_CODE (x) == SET)
4931 {
4932 if (set)
4933 return 0;
4934 set = x;
4935 }
4936 }
4937 if (!set)
4938 return 0;
4939 }
4940 else
4941 return 0;
4942
7013e87c 4943 cost = set_src_cost (SET_SRC (set), speed);
0a8a047c 4944 return cost > 0 ? cost : COSTS_N_INSNS (1);
4945}
ea92ba80 4946
9a416363 4947/* Returns estimate on cost of computing SEQ. */
4948
4949unsigned
4950seq_cost (const rtx_insn *seq, bool speed)
4951{
4952 unsigned cost = 0;
4953 rtx set;
4954
4955 for (; seq; seq = NEXT_INSN (seq))
4956 {
4957 set = single_set (seq);
4958 if (set)
4959 cost += set_rtx_cost (set, speed);
4960 else
4961 cost++;
4962 }
4963
4964 return cost;
4965}
4966
ea92ba80 4967/* Given an insn INSN and condition COND, return the condition in a
4968 canonical form to simplify testing by callers. Specifically:
4969
4970 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4971 (2) Both operands will be machine operands; (cc0) will have been replaced.
4972 (3) If an operand is a constant, it will be the second operand.
4973 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4974 for GE, GEU, and LEU.
4975
4976 If the condition cannot be understood, or is an inequality floating-point
4977 comparison which needs to be reversed, 0 will be returned.
4978
4979 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4980
4981 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4982 insn used in locating the condition was found. If a replacement test
4983 of the condition is desired, it should be placed in front of that
4984 insn and we will be sure that the inputs are still valid.
4985
4986 If WANT_REG is nonzero, we wish the condition to be relative to that
4987 register, if possible. Therefore, do not canonicalize the condition
48e1416a 4988 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
ea92ba80 4989 to be a compare to a CC mode register.
4990
4991 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4992 and at INSN. */
4993
4994rtx
2d650f54 4995canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
4996 rtx_insn **earliest,
ea92ba80 4997 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4998{
4999 enum rtx_code code;
2d650f54 5000 rtx_insn *prev = insn;
dd9b9fc5 5001 const_rtx set;
ea92ba80 5002 rtx tem;
5003 rtx op0, op1;
5004 int reverse_code = 0;
3754d046 5005 machine_mode mode;
64685a89 5006 basic_block bb = BLOCK_FOR_INSN (insn);
ea92ba80 5007
5008 code = GET_CODE (cond);
5009 mode = GET_MODE (cond);
5010 op0 = XEXP (cond, 0);
5011 op1 = XEXP (cond, 1);
5012
5013 if (reverse)
5014 code = reversed_comparison_code (cond, insn);
5015 if (code == UNKNOWN)
5016 return 0;
5017
5018 if (earliest)
5019 *earliest = insn;
5020
5021 /* If we are comparing a register with zero, see if the register is set
5022 in the previous insn to a COMPARE or a comparison operation. Perform
5023 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5024 in cse.c */
5025
5026 while ((GET_RTX_CLASS (code) == RTX_COMPARE
5027 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5028 && op1 == CONST0_RTX (GET_MODE (op0))
5029 && op0 != want_reg)
5030 {
5031 /* Set nonzero when we find something of interest. */
5032 rtx x = 0;
5033
ea92ba80 5034 /* If comparison with cc0, import actual comparison from compare
5035 insn. */
5036 if (op0 == cc0_rtx)
5037 {
5038 if ((prev = prev_nonnote_insn (prev)) == 0
5039 || !NONJUMP_INSN_P (prev)
5040 || (set = single_set (prev)) == 0
5041 || SET_DEST (set) != cc0_rtx)
5042 return 0;
5043
5044 op0 = SET_SRC (set);
5045 op1 = CONST0_RTX (GET_MODE (op0));
5046 if (earliest)
5047 *earliest = prev;
5048 }
ea92ba80 5049
5050 /* If this is a COMPARE, pick up the two things being compared. */
5051 if (GET_CODE (op0) == COMPARE)
5052 {
5053 op1 = XEXP (op0, 1);
5054 op0 = XEXP (op0, 0);
5055 continue;
5056 }
5057 else if (!REG_P (op0))
5058 break;
5059
5060 /* Go back to the previous insn. Stop if it is not an INSN. We also
5061 stop if it isn't a single set or if it has a REG_INC note because
5062 we don't want to bother dealing with it. */
5063
5b8537a8 5064 prev = prev_nonnote_nondebug_insn (prev);
9845d120 5065
5066 if (prev == 0
ea92ba80 5067 || !NONJUMP_INSN_P (prev)
64685a89 5068 || FIND_REG_INC_NOTE (prev, NULL_RTX)
5069 /* In cfglayout mode, there do not have to be labels at the
5070 beginning of a block, or jumps at the end, so the previous
5071 conditions would not stop us when we reach bb boundary. */
5072 || BLOCK_FOR_INSN (prev) != bb)
ea92ba80 5073 break;
5074
5075 set = set_of (op0, prev);
5076
5077 if (set
5078 && (GET_CODE (set) != SET
5079 || !rtx_equal_p (SET_DEST (set), op0)))
5080 break;
5081
5082 /* If this is setting OP0, get what it sets it to if it looks
5083 relevant. */
5084 if (set)
5085 {
3754d046 5086 machine_mode inner_mode = GET_MODE (SET_DEST (set));
ea92ba80 5087#ifdef FLOAT_STORE_FLAG_VALUE
5088 REAL_VALUE_TYPE fsfv;
5089#endif
5090
5091 /* ??? We may not combine comparisons done in a CCmode with
5092 comparisons not done in a CCmode. This is to aid targets
5093 like Alpha that have an IEEE compliant EQ instruction, and
5094 a non-IEEE compliant BEQ instruction. The use of CCmode is
5095 actually artificial, simply to prevent the combination, but
5096 should not affect other platforms.
5097
5098 However, we must allow VOIDmode comparisons to match either
5099 CCmode or non-CCmode comparison, because some ports have
5100 modeless comparisons inside branch patterns.
5101
5102 ??? This mode check should perhaps look more like the mode check
5103 in simplify_comparison in combine. */
d7f4ca1d 5104 if (((GET_MODE_CLASS (mode) == MODE_CC)
5105 != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5106 && mode != VOIDmode
5107 && inner_mode != VOIDmode)
5108 break;
5109 if (GET_CODE (SET_SRC (set)) == COMPARE
5110 || (((code == NE
5111 || (code == LT
5112 && val_signbit_known_set_p (inner_mode,
5113 STORE_FLAG_VALUE))
ea92ba80 5114#ifdef FLOAT_STORE_FLAG_VALUE
d7f4ca1d 5115 || (code == LT
5116 && SCALAR_FLOAT_MODE_P (inner_mode)
5117 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5118 REAL_VALUE_NEGATIVE (fsfv)))
ea92ba80 5119#endif
d7f4ca1d 5120 ))
5121 && COMPARISON_P (SET_SRC (set))))
ea92ba80 5122 x = SET_SRC (set);
5123 else if (((code == EQ
5124 || (code == GE
f92430e0 5125 && val_signbit_known_set_p (inner_mode,
5126 STORE_FLAG_VALUE))
ea92ba80 5127#ifdef FLOAT_STORE_FLAG_VALUE
5128 || (code == GE
cee7491d 5129 && SCALAR_FLOAT_MODE_P (inner_mode)
ea92ba80 5130 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5131 REAL_VALUE_NEGATIVE (fsfv)))
5132#endif
5133 ))
d7f4ca1d 5134 && COMPARISON_P (SET_SRC (set)))
ea92ba80 5135 {
5136 reverse_code = 1;
5137 x = SET_SRC (set);
5138 }
d7f4ca1d 5139 else if ((code == EQ || code == NE)
5140 && GET_CODE (SET_SRC (set)) == XOR)
5141 /* Handle sequences like:
5142
5143 (set op0 (xor X Y))
5144 ...(eq|ne op0 (const_int 0))...
5145
5146 in which case:
5147
5148 (eq op0 (const_int 0)) reduces to (eq X Y)
5149 (ne op0 (const_int 0)) reduces to (ne X Y)
5150
5151 This is the form used by MIPS16, for example. */
5152 x = SET_SRC (set);
ea92ba80 5153 else
5154 break;
5155 }
5156
5157 else if (reg_set_p (op0, prev))
5158 /* If this sets OP0, but not directly, we have to give up. */
5159 break;
5160
5161 if (x)
5162 {
5163 /* If the caller is expecting the condition to be valid at INSN,
5164 make sure X doesn't change before INSN. */
5165 if (valid_at_insn_p)
5166 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5167 break;
5168 if (COMPARISON_P (x))
5169 code = GET_CODE (x);
5170 if (reverse_code)
5171 {
5172 code = reversed_comparison_code (x, prev);
5173 if (code == UNKNOWN)
5174 return 0;
5175 reverse_code = 0;
5176 }
5177
5178 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5179 if (earliest)
5180 *earliest = prev;
5181 }
5182 }
5183
5184 /* If constant is first, put it last. */
5185 if (CONSTANT_P (op0))
5186 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5187
5188 /* If OP0 is the result of a comparison, we weren't able to find what
5189 was really being compared, so fail. */
5190 if (!allow_cc_mode
5191 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5192 return 0;
5193
5194 /* Canonicalize any ordered comparison with integers involving equality
5195 if we can do computations in the relevant mode and we do not
5196 overflow. */
5197
5198 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
971ba038 5199 && CONST_INT_P (op1)
ea92ba80 5200 && GET_MODE (op0) != VOIDmode
ded805e6 5201 && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
ea92ba80 5202 {
5203 HOST_WIDE_INT const_val = INTVAL (op1);
5204 unsigned HOST_WIDE_INT uconst_val = const_val;
5205 unsigned HOST_WIDE_INT max_val
5206 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5207
5208 switch (code)
5209 {
5210 case LE:
5211 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5212 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5213 break;
5214
5215 /* When cross-compiling, const_val might be sign-extended from
5216 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5217 case GE:
9d8859f1 5218 if ((const_val & max_val)
5219 != ((unsigned HOST_WIDE_INT) 1
ded805e6 5220 << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
ea92ba80 5221 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5222 break;
5223
5224 case LEU:
5225 if (uconst_val < max_val)
5226 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5227 break;
5228
5229 case GEU:
5230 if (uconst_val != 0)
5231 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5232 break;
5233
5234 default:
5235 break;
5236 }
5237 }
5238
5239 /* Never return CC0; return zero instead. */
5240 if (CC0_P (op0))
5241 return 0;
5242
5243 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5244}
5245
5246/* Given a jump insn JUMP, return the condition that will cause it to branch
5247 to its JUMP_LABEL. If the condition cannot be understood, or is an
5248 inequality floating-point comparison which needs to be reversed, 0 will
5249 be returned.
5250
5251 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5252 insn used in locating the condition was found. If a replacement test
5253 of the condition is desired, it should be placed in front of that
5254 insn and we will be sure that the inputs are still valid. If EARLIEST
5255 is null, the returned condition will be valid at INSN.
5256
5257 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5258 compare CC mode register.
5259
5260 VALID_AT_INSN_P is the same as for canonicalize_condition. */
5261
5262rtx
2d650f54 5263get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
5264 int valid_at_insn_p)
ea92ba80 5265{
5266 rtx cond;
5267 int reverse;
5268 rtx set;
5269
5270 /* If this is not a standard conditional jump, we can't parse it. */
5271 if (!JUMP_P (jump)
5272 || ! any_condjump_p (jump))
5273 return 0;
5274 set = pc_set (jump);
5275
5276 cond = XEXP (SET_SRC (set), 0);
5277
5278 /* If this branches to JUMP_LABEL when the condition is false, reverse
5279 the condition. */
5280 reverse
5281 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
b49f2e4b 5282 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
ea92ba80 5283
5284 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5285 allow_cc_mode, valid_at_insn_p);
5286}
5287
4956440a 5288/* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5289 TARGET_MODE_REP_EXTENDED.
5290
5291 Note that we assume that the property of
5292 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5293 narrower than mode B. I.e., if A is a mode narrower than B then in
5294 order to be able to operate on it in mode B, mode A needs to
5295 satisfy the requirements set by the representation of mode B. */
5296
5297static void
5298init_num_sign_bit_copies_in_rep (void)
5299{
3754d046 5300 machine_mode mode, in_mode;
4956440a 5301
5302 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5303 in_mode = GET_MODE_WIDER_MODE (mode))
5304 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5305 mode = GET_MODE_WIDER_MODE (mode))
5306 {
3754d046 5307 machine_mode i;
4956440a 5308
5309 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5310 extends to the next widest mode. */
5311 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5312 || GET_MODE_WIDER_MODE (mode) == in_mode);
5313
5314 /* We are in in_mode. Count how many bits outside of mode
5315 have to be copies of the sign-bit. */
5316 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5317 {
3754d046 5318 machine_mode wider = GET_MODE_WIDER_MODE (i);
4956440a 5319
5320 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5321 /* We can only check sign-bit copies starting from the
5322 top-bit. In order to be able to check the bits we
5323 have already seen we pretend that subsequent bits
5324 have to be sign-bit copies too. */
5325 || num_sign_bit_copies_in_rep [in_mode][mode])
5326 num_sign_bit_copies_in_rep [in_mode][mode]
ded805e6 5327 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
4956440a 5328 }
5329 }
5330}
5331
fd95fba4 5332/* Suppose that truncation from the machine mode of X to MODE is not a
5333 no-op. See if there is anything special about X so that we can
5334 assume it already contains a truncated value of MODE. */
5335
5336bool
3754d046 5337truncated_to_mode (machine_mode mode, const_rtx x)
fd95fba4 5338{
4956440a 5339 /* This register has already been used in MODE without explicit
5340 truncation. */
5341 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5342 return true;
5343
5344 /* See if we already satisfy the requirements of MODE. If yes we
5345 can just switch to MODE. */
5346 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5347 && (num_sign_bit_copies (x, GET_MODE (x))
5348 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5349 return true;
fd95fba4 5350
4956440a 5351 return false;
5352}
a87cf6e5 5353\f
69924e56 5354/* Return true if RTX code CODE has a single sequence of zero or more
5355 "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds
5356 entry in that case. */
5357
5358static bool
5359setup_reg_subrtx_bounds (unsigned int code)
5360{
5361 const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
5362 unsigned int i = 0;
5363 for (; format[i] != 'e'; ++i)
5364 {
5365 if (!format[i])
5366 /* No subrtxes. Leave start and count as 0. */
5367 return true;
5368 if (format[i] == 'E' || format[i] == 'V')
5369 return false;
5370 }
5371
5372 /* Record the sequence of 'e's. */
5373 rtx_all_subrtx_bounds[code].start = i;
5374 do
5375 ++i;
5376 while (format[i] == 'e');
5377 rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
5378 /* rtl-iter.h relies on this. */
5379 gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
5380
5381 for (; format[i]; ++i)
5382 if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
5383 return false;
5384
5385 return true;
5386}
5387
e506ea62 5388/* Initialize rtx_all_subrtx_bounds. */
a87cf6e5 5389void
5390init_rtlanal (void)
5391{
5392 int i;
5393 for (i = 0; i < NUM_RTX_CODE; i++)
5394 {
69924e56 5395 if (!setup_reg_subrtx_bounds (i))
5396 rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
5397 if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
5398 rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
a87cf6e5 5399 }
4956440a 5400
5401 init_num_sign_bit_copies_in_rep ();
a87cf6e5 5402}
e207fd7a 5403\f
5404/* Check whether this is a constant pool constant. */
5405bool
5406constant_pool_constant_p (rtx x)
5407{
5408 x = avoid_constant_pool_reference (x);
78f1962f 5409 return CONST_DOUBLE_P (x);
e207fd7a 5410}
d16b48d5 5411\f
5412/* If M is a bitmask that selects a field of low-order bits within an item but
5413 not the entire word, return the length of the field. Return -1 otherwise.
5414 M is used in machine mode MODE. */
5415
5416int
3754d046 5417low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
d16b48d5 5418{
5419 if (mode != VOIDmode)
5420 {
ded805e6 5421 if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
d16b48d5 5422 return -1;
5423 m &= GET_MODE_MASK (mode);
5424 }
5425
5426 return exact_log2 (m + 1);
5427}
87cf5753 5428
5429/* Return the mode of MEM's address. */
5430
3754d046 5431machine_mode
87cf5753 5432get_address_mode (rtx mem)
5433{
3754d046 5434 machine_mode mode;
87cf5753 5435
5436 gcc_assert (MEM_P (mem));
5437 mode = GET_MODE (XEXP (mem, 0));
5438 if (mode != VOIDmode)
5439 return mode;
5440 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5441}
21eb4639 5442\f
5443/* Split up a CONST_DOUBLE or integer constant rtx
5444 into two rtx's for single words,
5445 storing in *FIRST the word that comes first in memory in the target
ddb1be65 5446 and in *SECOND the other.
e913b5cd 5447
5448 TODO: This function needs to be rewritten to work on any size
5449 integer. */
21eb4639 5450
5451void
5452split_double (rtx value, rtx *first, rtx *second)
5453{
5454 if (CONST_INT_P (value))
5455 {
5456 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5457 {
5458 /* In this case the CONST_INT holds both target words.
5459 Extract the bits from it into two word-sized pieces.
5460 Sign extend each half to HOST_WIDE_INT. */
5461 unsigned HOST_WIDE_INT low, high;
5462 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5463 unsigned bits_per_word = BITS_PER_WORD;
5464
5465 /* Set sign_bit to the most significant bit of a word. */
5466 sign_bit = 1;
5467 sign_bit <<= bits_per_word - 1;
5468
5469 /* Set mask so that all bits of the word are set. We could
5470 have used 1 << BITS_PER_WORD instead of basing the
5471 calculation on sign_bit. However, on machines where
5472 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5473 compiler warning, even though the code would never be
5474 executed. */
5475 mask = sign_bit << 1;
5476 mask--;
5477
5478 /* Set sign_extend as any remaining bits. */
5479 sign_extend = ~mask;
5480
5481 /* Pick the lower word and sign-extend it. */
5482 low = INTVAL (value);
5483 low &= mask;
5484 if (low & sign_bit)
5485 low |= sign_extend;
5486
5487 /* Pick the higher word, shifted to the least significant
5488 bits, and sign-extend it. */
5489 high = INTVAL (value);
5490 high >>= bits_per_word - 1;
5491 high >>= 1;
5492 high &= mask;
5493 if (high & sign_bit)
5494 high |= sign_extend;
5495
5496 /* Store the words in the target machine order. */
5497 if (WORDS_BIG_ENDIAN)
5498 {
5499 *first = GEN_INT (high);
5500 *second = GEN_INT (low);
5501 }
5502 else
5503 {
5504 *first = GEN_INT (low);
5505 *second = GEN_INT (high);
5506 }
5507 }
5508 else
5509 {
5510 /* The rule for using CONST_INT for a wider mode
5511 is that we regard the value as signed.
5512 So sign-extend it. */
5513 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5514 if (WORDS_BIG_ENDIAN)
5515 {
5516 *first = high;
5517 *second = value;
5518 }
5519 else
5520 {
5521 *first = value;
5522 *second = high;
5523 }
5524 }
5525 }
e913b5cd 5526 else if (GET_CODE (value) == CONST_WIDE_INT)
5527 {
5528 /* All of this is scary code and needs to be converted to
5529 properly work with any size integer. */
5530 gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
5531 if (WORDS_BIG_ENDIAN)
5532 {
5533 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5534 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5535 }
5536 else
5537 {
5538 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5539 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5540 }
5541 }
78f1962f 5542 else if (!CONST_DOUBLE_P (value))
21eb4639 5543 {
5544 if (WORDS_BIG_ENDIAN)
5545 {
5546 *first = const0_rtx;
5547 *second = value;
5548 }
5549 else
5550 {
5551 *first = value;
5552 *second = const0_rtx;
5553 }
5554 }
5555 else if (GET_MODE (value) == VOIDmode
5556 /* This is the old way we did CONST_DOUBLE integers. */
5557 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5558 {
5559 /* In an integer, the words are defined as most and least significant.
5560 So order them by the target's convention. */
5561 if (WORDS_BIG_ENDIAN)
5562 {
5563 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5564 *second = GEN_INT (CONST_DOUBLE_LOW (value));
5565 }
5566 else
5567 {
5568 *first = GEN_INT (CONST_DOUBLE_LOW (value));
5569 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5570 }
5571 }
5572 else
5573 {
5574 REAL_VALUE_TYPE r;
5575 long l[2];
5576 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5577
5578 /* Note, this converts the REAL_VALUE_TYPE to the target's
5579 format, splits up the floating point double and outputs
5580 exactly 32 bits of it into each of l[0] and l[1] --
5581 not necessarily BITS_PER_WORD bits. */
5582 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5583
5584 /* If 32 bits is an entire word for the target, but not for the host,
5585 then sign-extend on the host so that the number will look the same
5586 way on the host that it would on the target. See for instance
5587 simplify_unary_operation. The #if is needed to avoid compiler
5588 warnings. */
5589
5590#if HOST_BITS_PER_LONG > 32
5591 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5592 {
5593 if (l[0] & ((long) 1 << 31))
5594 l[0] |= ((long) (-1) << 32);
5595 if (l[1] & ((long) 1 << 31))
5596 l[1] |= ((long) (-1) << 32);
5597 }
5598#endif
5599
5600 *first = GEN_INT (l[0]);
5601 *second = GEN_INT (l[1]);
5602 }
5603}
5604
cc1925e7 5605/* Return true if X is a sign_extract or zero_extract from the least
5606 significant bit. */
5607
5608static bool
5609lsb_bitfield_op_p (rtx x)
5610{
5611 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
5612 {
3754d046 5613 machine_mode mode = GET_MODE (XEXP (x, 0));
041222b1 5614 HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
cc1925e7 5615 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
5616
5617 return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0));
5618 }
5619 return false;
5620}
5621
1efe9e9d 5622/* Strip outer address "mutations" from LOC and return a pointer to the
5623 inner value. If OUTER_CODE is nonnull, store the code of the innermost
5624 stripped expression there.
5625
5626 "Mutations" either convert between modes or apply some kind of
cc1925e7 5627 extension, truncation or alignment. */
1efe9e9d 5628
5629rtx *
5630strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
5631{
5632 for (;;)
5633 {
5634 enum rtx_code code = GET_CODE (*loc);
5635 if (GET_RTX_CLASS (code) == RTX_UNARY)
5636 /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
5637 used to convert between pointer sizes. */
5638 loc = &XEXP (*loc, 0);
cc1925e7 5639 else if (lsb_bitfield_op_p (*loc))
5640 /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
5641 acts as a combined truncation and extension. */
5642 loc = &XEXP (*loc, 0);
1efe9e9d 5643 else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
5644 /* (and ... (const_int -X)) is used to align to X bytes. */
5645 loc = &XEXP (*loc, 0);
6482f991 5646 else if (code == SUBREG
5647 && !OBJECT_P (SUBREG_REG (*loc))
5648 && subreg_lowpart_p (*loc))
5649 /* (subreg (operator ...) ...) inside and is used for mode
5650 conversion too. */
3190d74b 5651 loc = &SUBREG_REG (*loc);
1efe9e9d 5652 else
5653 return loc;
5654 if (outer_code)
5655 *outer_code = code;
5656 }
5657}
5658
2de52d09 5659/* Return true if CODE applies some kind of scale. The scaled value is
5660 is the first operand and the scale is the second. */
1efe9e9d 5661
5662static bool
2de52d09 5663binary_scale_code_p (enum rtx_code code)
1efe9e9d 5664{
2de52d09 5665 return (code == MULT
5666 || code == ASHIFT
5667 /* Needed by ARM targets. */
5668 || code == ASHIFTRT
5669 || code == LSHIFTRT
5670 || code == ROTATE
5671 || code == ROTATERT);
1efe9e9d 5672}
5673
2de52d09 5674/* If *INNER can be interpreted as a base, return a pointer to the inner term
5675 (see address_info). Return null otherwise. */
1efe9e9d 5676
2de52d09 5677static rtx *
5678get_base_term (rtx *inner)
1efe9e9d 5679{
2de52d09 5680 if (GET_CODE (*inner) == LO_SUM)
5681 inner = strip_address_mutations (&XEXP (*inner, 0));
5682 if (REG_P (*inner)
5683 || MEM_P (*inner)
8f184921 5684 || GET_CODE (*inner) == SUBREG
5685 || GET_CODE (*inner) == SCRATCH)
2de52d09 5686 return inner;
5687 return 0;
5688}
5689
5690/* If *INNER can be interpreted as an index, return a pointer to the inner term
5691 (see address_info). Return null otherwise. */
5692
5693static rtx *
5694get_index_term (rtx *inner)
1efe9e9d 5695{
2de52d09 5696 /* At present, only constant scales are allowed. */
5697 if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
5698 inner = strip_address_mutations (&XEXP (*inner, 0));
5699 if (REG_P (*inner)
5700 || MEM_P (*inner)
b61d7819 5701 || GET_CODE (*inner) == SUBREG
5702 || GET_CODE (*inner) == SCRATCH)
2de52d09 5703 return inner;
5704 return 0;
1efe9e9d 5705}
5706
5707/* Set the segment part of address INFO to LOC, given that INNER is the
5708 unmutated value. */
5709
5710static void
5711set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
5712{
1efe9e9d 5713 gcc_assert (!info->segment);
5714 info->segment = loc;
5715 info->segment_term = inner;
5716}
5717
5718/* Set the base part of address INFO to LOC, given that INNER is the
5719 unmutated value. */
5720
5721static void
5722set_address_base (struct address_info *info, rtx *loc, rtx *inner)
5723{
1efe9e9d 5724 gcc_assert (!info->base);
5725 info->base = loc;
5726 info->base_term = inner;
5727}
5728
5729/* Set the index part of address INFO to LOC, given that INNER is the
5730 unmutated value. */
5731
5732static void
5733set_address_index (struct address_info *info, rtx *loc, rtx *inner)
5734{
1efe9e9d 5735 gcc_assert (!info->index);
5736 info->index = loc;
5737 info->index_term = inner;
5738}
5739
5740/* Set the displacement part of address INFO to LOC, given that INNER
5741 is the constant term. */
5742
5743static void
5744set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
5745{
1efe9e9d 5746 gcc_assert (!info->disp);
5747 info->disp = loc;
5748 info->disp_term = inner;
5749}
5750
5751/* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
5752 rest of INFO accordingly. */
5753
5754static void
5755decompose_incdec_address (struct address_info *info)
5756{
5757 info->autoinc_p = true;
5758
5759 rtx *base = &XEXP (*info->inner, 0);
5760 set_address_base (info, base, base);
5761 gcc_checking_assert (info->base == info->base_term);
5762
5763 /* These addresses are only valid when the size of the addressed
5764 value is known. */
5765 gcc_checking_assert (info->mode != VOIDmode);
5766}
5767
5768/* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
5769 of INFO accordingly. */
5770
5771static void
5772decompose_automod_address (struct address_info *info)
5773{
5774 info->autoinc_p = true;
5775
5776 rtx *base = &XEXP (*info->inner, 0);
5777 set_address_base (info, base, base);
5778 gcc_checking_assert (info->base == info->base_term);
5779
5780 rtx plus = XEXP (*info->inner, 1);
5781 gcc_assert (GET_CODE (plus) == PLUS);
5782
5783 info->base_term2 = &XEXP (plus, 0);
5784 gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
5785
5786 rtx *step = &XEXP (plus, 1);
5787 rtx *inner_step = strip_address_mutations (step);
5788 if (CONSTANT_P (*inner_step))
5789 set_address_disp (info, step, inner_step);
5790 else
5791 set_address_index (info, step, inner_step);
5792}
5793
5794/* Treat *LOC as a tree of PLUS operands and store pointers to the summed
5795 values in [PTR, END). Return a pointer to the end of the used array. */
5796
5797static rtx **
5798extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
5799{
5800 rtx x = *loc;
5801 if (GET_CODE (x) == PLUS)
5802 {
5803 ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
5804 ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
5805 }
5806 else
5807 {
5808 gcc_assert (ptr != end);
5809 *ptr++ = loc;
5810 }
5811 return ptr;
5812}
5813
5814/* Evaluate the likelihood of X being a base or index value, returning
5815 positive if it is likely to be a base, negative if it is likely to be
5816 an index, and 0 if we can't tell. Make the magnitude of the return
5817 value reflect the amount of confidence we have in the answer.
5818
5819 MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
5820
5821static int
3754d046 5822baseness (rtx x, machine_mode mode, addr_space_t as,
1efe9e9d 5823 enum rtx_code outer_code, enum rtx_code index_code)
5824{
1efe9e9d 5825 /* Believe *_POINTER unless the address shape requires otherwise. */
5826 if (REG_P (x) && REG_POINTER (x))
5827 return 2;
5828 if (MEM_P (x) && MEM_POINTER (x))
5829 return 2;
5830
5831 if (REG_P (x) && HARD_REGISTER_P (x))
5832 {
5833 /* X is a hard register. If it only fits one of the base
5834 or index classes, choose that interpretation. */
5835 int regno = REGNO (x);
5836 bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
5837 bool index_p = REGNO_OK_FOR_INDEX_P (regno);
5838 if (base_p != index_p)
5839 return base_p ? 1 : -1;
5840 }
5841 return 0;
5842}
5843
5844/* INFO->INNER describes a normal, non-automodified address.
5845 Fill in the rest of INFO accordingly. */
5846
5847static void
5848decompose_normal_address (struct address_info *info)
5849{
5850 /* Treat the address as the sum of up to four values. */
5851 rtx *ops[4];
5852 size_t n_ops = extract_plus_operands (info->inner, ops,
5853 ops + ARRAY_SIZE (ops)) - ops;
5854
5855 /* If there is more than one component, any base component is in a PLUS. */
5856 if (n_ops > 1)
5857 info->base_outer_code = PLUS;
5858
2de52d09 5859 /* Try to classify each sum operand now. Leave those that could be
5860 either a base or an index in OPS. */
1efe9e9d 5861 rtx *inner_ops[4];
5862 size_t out = 0;
5863 for (size_t in = 0; in < n_ops; ++in)
5864 {
5865 rtx *loc = ops[in];
5866 rtx *inner = strip_address_mutations (loc);
5867 if (CONSTANT_P (*inner))
5868 set_address_disp (info, loc, inner);
5869 else if (GET_CODE (*inner) == UNSPEC)
5870 set_address_segment (info, loc, inner);
5871 else
5872 {
2de52d09 5873 /* The only other possibilities are a base or an index. */
5874 rtx *base_term = get_base_term (inner);
5875 rtx *index_term = get_index_term (inner);
5876 gcc_assert (base_term || index_term);
5877 if (!base_term)
5878 set_address_index (info, loc, index_term);
5879 else if (!index_term)
5880 set_address_base (info, loc, base_term);
5881 else
5882 {
5883 gcc_assert (base_term == index_term);
5884 ops[out] = loc;
5885 inner_ops[out] = base_term;
5886 ++out;
5887 }
1efe9e9d 5888 }
5889 }
5890
5891 /* Classify the remaining OPS members as bases and indexes. */
5892 if (out == 1)
5893 {
2de52d09 5894 /* If we haven't seen a base or an index yet, assume that this is
5895 the base. If we were confident that another term was the base
5896 or index, treat the remaining operand as the other kind. */
5897 if (!info->base)
1efe9e9d 5898 set_address_base (info, ops[0], inner_ops[0]);
5899 else
5900 set_address_index (info, ops[0], inner_ops[0]);
5901 }
5902 else if (out == 2)
5903 {
5904 /* In the event of a tie, assume the base comes first. */
5905 if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
5906 GET_CODE (*ops[1]))
5907 >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
5908 GET_CODE (*ops[0])))
5909 {
5910 set_address_base (info, ops[0], inner_ops[0]);
5911 set_address_index (info, ops[1], inner_ops[1]);
5912 }
5913 else
5914 {
5915 set_address_base (info, ops[1], inner_ops[1]);
5916 set_address_index (info, ops[0], inner_ops[0]);
5917 }
5918 }
5919 else
5920 gcc_assert (out == 0);
5921}
5922
5923/* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
5924 or VOIDmode if not known. AS is the address space associated with LOC.
5925 OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
5926
5927void
3754d046 5928decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
1efe9e9d 5929 addr_space_t as, enum rtx_code outer_code)
5930{
5931 memset (info, 0, sizeof (*info));
5932 info->mode = mode;
5933 info->as = as;
5934 info->addr_outer_code = outer_code;
5935 info->outer = loc;
5936 info->inner = strip_address_mutations (loc, &outer_code);
5937 info->base_outer_code = outer_code;
5938 switch (GET_CODE (*info->inner))
5939 {
5940 case PRE_DEC:
5941 case PRE_INC:
5942 case POST_DEC:
5943 case POST_INC:
5944 decompose_incdec_address (info);
5945 break;
5946
5947 case PRE_MODIFY:
5948 case POST_MODIFY:
5949 decompose_automod_address (info);
5950 break;
5951
5952 default:
5953 decompose_normal_address (info);
5954 break;
5955 }
5956}
5957
5958/* Describe address operand LOC in INFO. */
5959
5960void
5961decompose_lea_address (struct address_info *info, rtx *loc)
5962{
5963 decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
5964}
5965
5966/* Describe the address of MEM X in INFO. */
5967
5968void
5969decompose_mem_address (struct address_info *info, rtx x)
5970{
5971 gcc_assert (MEM_P (x));
5972 decompose_address (info, &XEXP (x, 0), GET_MODE (x),
5973 MEM_ADDR_SPACE (x), MEM);
5974}
5975
5976/* Update INFO after a change to the address it describes. */
5977
5978void
5979update_address (struct address_info *info)
5980{
5981 decompose_address (info, info->outer, info->mode, info->as,
5982 info->addr_outer_code);
5983}
5984
5985/* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
5986 more complicated than that. */
5987
5988HOST_WIDE_INT
5989get_index_scale (const struct address_info *info)
5990{
5991 rtx index = *info->index;
5992 if (GET_CODE (index) == MULT
5993 && CONST_INT_P (XEXP (index, 1))
5994 && info->index_term == &XEXP (index, 0))
5995 return INTVAL (XEXP (index, 1));
5996
5997 if (GET_CODE (index) == ASHIFT
5998 && CONST_INT_P (XEXP (index, 1))
5999 && info->index_term == &XEXP (index, 0))
6000 return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1));
6001
6002 if (info->index == info->index_term)
6003 return 1;
6004
6005 return 0;
6006}
6007
6008/* Return the "index code" of INFO, in the form required by
6009 ok_for_base_p_1. */
6010
6011enum rtx_code
6012get_index_code (const struct address_info *info)
6013{
6014 if (info->index)
6015 return GET_CODE (*info->index);
6016
6017 if (info->disp)
6018 return GET_CODE (*info->disp);
6019
6020 return SCRATCH;
6021}
53ea4c57 6022
53ea4c57 6023/* Return true if X contains a thread-local symbol. */
6024
6025bool
7739c19e 6026tls_referenced_p (const_rtx x)
53ea4c57 6027{
6028 if (!targetm.have_tls)
6029 return false;
6030
7739c19e 6031 subrtx_iterator::array_type array;
02c7697d 6032 FOR_EACH_SUBRTX (iter, array, x, ALL)
7739c19e 6033 if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6034 return true;
6035 return false;
53ea4c57 6036}